]> Pileus Git - ~andy/linux/blob - arch/arm/kernel/perf_event_v7.c
Merge branch 'dma' of http://git.linaro.org/git/people/nico/linux into devel-stable
[~andy/linux] / arch / arm / kernel / perf_event_v7.c
1 /*
2  * ARMv7 Cortex-A8 and Cortex-A9 Performance Events handling code.
3  *
4  * ARMv7 support: Jean Pihet <jpihet@mvista.com>
5  * 2010 (c) MontaVista Software, LLC.
6  *
7  * Copied from ARMv6 code, with the low level code inspired
8  *  by the ARMv7 Oprofile code.
9  *
10  * Cortex-A8 has up to 4 configurable performance counters and
11  *  a single cycle counter.
12  * Cortex-A9 has up to 31 configurable performance counters and
13  *  a single cycle counter.
14  *
15  * All counters can be enabled/disabled and IRQ masked separately. The cycle
16  *  counter and all 4 performance counters together can be reset separately.
17  */
18
19 #ifdef CONFIG_CPU_V7
20 /*
21  * Common ARMv7 event types
22  *
23  * Note: An implementation may not be able to count all of these events
24  * but the encodings are considered to be `reserved' in the case that
25  * they are not available.
26  */
27 enum armv7_perf_types {
28         ARMV7_PERFCTR_PMNC_SW_INCR              = 0x00,
29         ARMV7_PERFCTR_IFETCH_MISS               = 0x01,
30         ARMV7_PERFCTR_ITLB_MISS                 = 0x02,
31         ARMV7_PERFCTR_DCACHE_REFILL             = 0x03, /* L1 */
32         ARMV7_PERFCTR_DCACHE_ACCESS             = 0x04, /* L1 */
33         ARMV7_PERFCTR_DTLB_REFILL               = 0x05,
34         ARMV7_PERFCTR_DREAD                     = 0x06,
35         ARMV7_PERFCTR_DWRITE                    = 0x07,
36         ARMV7_PERFCTR_INSTR_EXECUTED            = 0x08,
37         ARMV7_PERFCTR_EXC_TAKEN                 = 0x09,
38         ARMV7_PERFCTR_EXC_EXECUTED              = 0x0A,
39         ARMV7_PERFCTR_CID_WRITE                 = 0x0B,
40         /* ARMV7_PERFCTR_PC_WRITE is equivalent to HW_BRANCH_INSTRUCTIONS.
41          * It counts:
42          *  - all branch instructions,
43          *  - instructions that explicitly write the PC,
44          *  - exception generating instructions.
45          */
46         ARMV7_PERFCTR_PC_WRITE                  = 0x0C,
47         ARMV7_PERFCTR_PC_IMM_BRANCH             = 0x0D,
48         ARMV7_PERFCTR_PC_PROC_RETURN            = 0x0E,
49         ARMV7_PERFCTR_UNALIGNED_ACCESS          = 0x0F,
50
51         /* These events are defined by the PMUv2 supplement (ARM DDI 0457A). */
52         ARMV7_PERFCTR_PC_BRANCH_MIS_PRED        = 0x10,
53         ARMV7_PERFCTR_CLOCK_CYCLES              = 0x11,
54         ARMV7_PERFCTR_PC_BRANCH_PRED            = 0x12,
55         ARMV7_PERFCTR_MEM_ACCESS                = 0x13,
56         ARMV7_PERFCTR_L1_ICACHE_ACCESS          = 0x14,
57         ARMV7_PERFCTR_L1_DCACHE_WB              = 0x15,
58         ARMV7_PERFCTR_L2_DCACHE_ACCESS          = 0x16,
59         ARMV7_PERFCTR_L2_DCACHE_REFILL          = 0x17,
60         ARMV7_PERFCTR_L2_DCACHE_WB              = 0x18,
61         ARMV7_PERFCTR_BUS_ACCESS                = 0x19,
62         ARMV7_PERFCTR_MEMORY_ERROR              = 0x1A,
63         ARMV7_PERFCTR_INSTR_SPEC                = 0x1B,
64         ARMV7_PERFCTR_TTBR_WRITE                = 0x1C,
65         ARMV7_PERFCTR_BUS_CYCLES                = 0x1D,
66
67         ARMV7_PERFCTR_CPU_CYCLES                = 0xFF
68 };
69
70 /* ARMv7 Cortex-A8 specific event types */
71 enum armv7_a8_perf_types {
72         ARMV7_PERFCTR_WRITE_BUFFER_FULL         = 0x40,
73         ARMV7_PERFCTR_L2_STORE_MERGED           = 0x41,
74         ARMV7_PERFCTR_L2_STORE_BUFF             = 0x42,
75         ARMV7_PERFCTR_L2_ACCESS                 = 0x43,
76         ARMV7_PERFCTR_L2_CACH_MISS              = 0x44,
77         ARMV7_PERFCTR_AXI_READ_CYCLES           = 0x45,
78         ARMV7_PERFCTR_AXI_WRITE_CYCLES          = 0x46,
79         ARMV7_PERFCTR_MEMORY_REPLAY             = 0x47,
80         ARMV7_PERFCTR_UNALIGNED_ACCESS_REPLAY   = 0x48,
81         ARMV7_PERFCTR_L1_DATA_MISS              = 0x49,
82         ARMV7_PERFCTR_L1_INST_MISS              = 0x4A,
83         ARMV7_PERFCTR_L1_DATA_COLORING          = 0x4B,
84         ARMV7_PERFCTR_L1_NEON_DATA              = 0x4C,
85         ARMV7_PERFCTR_L1_NEON_CACH_DATA         = 0x4D,
86         ARMV7_PERFCTR_L2_NEON                   = 0x4E,
87         ARMV7_PERFCTR_L2_NEON_HIT               = 0x4F,
88         ARMV7_PERFCTR_L1_INST                   = 0x50,
89         ARMV7_PERFCTR_PC_RETURN_MIS_PRED        = 0x51,
90         ARMV7_PERFCTR_PC_BRANCH_FAILED          = 0x52,
91         ARMV7_PERFCTR_PC_BRANCH_TAKEN           = 0x53,
92         ARMV7_PERFCTR_PC_BRANCH_EXECUTED        = 0x54,
93         ARMV7_PERFCTR_OP_EXECUTED               = 0x55,
94         ARMV7_PERFCTR_CYCLES_INST_STALL         = 0x56,
95         ARMV7_PERFCTR_CYCLES_INST               = 0x57,
96         ARMV7_PERFCTR_CYCLES_NEON_DATA_STALL    = 0x58,
97         ARMV7_PERFCTR_CYCLES_NEON_INST_STALL    = 0x59,
98         ARMV7_PERFCTR_NEON_CYCLES               = 0x5A,
99
100         ARMV7_PERFCTR_PMU0_EVENTS               = 0x70,
101         ARMV7_PERFCTR_PMU1_EVENTS               = 0x71,
102         ARMV7_PERFCTR_PMU_EVENTS                = 0x72,
103 };
104
105 /* ARMv7 Cortex-A9 specific event types */
106 enum armv7_a9_perf_types {
107         ARMV7_PERFCTR_JAVA_HW_BYTECODE_EXEC     = 0x40,
108         ARMV7_PERFCTR_JAVA_SW_BYTECODE_EXEC     = 0x41,
109         ARMV7_PERFCTR_JAZELLE_BRANCH_EXEC       = 0x42,
110
111         ARMV7_PERFCTR_COHERENT_LINE_MISS        = 0x50,
112         ARMV7_PERFCTR_COHERENT_LINE_HIT         = 0x51,
113
114         ARMV7_PERFCTR_ICACHE_DEP_STALL_CYCLES   = 0x60,
115         ARMV7_PERFCTR_DCACHE_DEP_STALL_CYCLES   = 0x61,
116         ARMV7_PERFCTR_TLB_MISS_DEP_STALL_CYCLES = 0x62,
117         ARMV7_PERFCTR_STREX_EXECUTED_PASSED     = 0x63,
118         ARMV7_PERFCTR_STREX_EXECUTED_FAILED     = 0x64,
119         ARMV7_PERFCTR_DATA_EVICTION             = 0x65,
120         ARMV7_PERFCTR_ISSUE_STAGE_NO_INST       = 0x66,
121         ARMV7_PERFCTR_ISSUE_STAGE_EMPTY         = 0x67,
122         ARMV7_PERFCTR_INST_OUT_OF_RENAME_STAGE  = 0x68,
123
124         ARMV7_PERFCTR_PREDICTABLE_FUNCT_RETURNS = 0x6E,
125
126         ARMV7_PERFCTR_MAIN_UNIT_EXECUTED_INST   = 0x70,
127         ARMV7_PERFCTR_SECOND_UNIT_EXECUTED_INST = 0x71,
128         ARMV7_PERFCTR_LD_ST_UNIT_EXECUTED_INST  = 0x72,
129         ARMV7_PERFCTR_FP_EXECUTED_INST          = 0x73,
130         ARMV7_PERFCTR_NEON_EXECUTED_INST        = 0x74,
131
132         ARMV7_PERFCTR_PLD_FULL_DEP_STALL_CYCLES = 0x80,
133         ARMV7_PERFCTR_DATA_WR_DEP_STALL_CYCLES  = 0x81,
134         ARMV7_PERFCTR_ITLB_MISS_DEP_STALL_CYCLES        = 0x82,
135         ARMV7_PERFCTR_DTLB_MISS_DEP_STALL_CYCLES        = 0x83,
136         ARMV7_PERFCTR_MICRO_ITLB_MISS_DEP_STALL_CYCLES  = 0x84,
137         ARMV7_PERFCTR_MICRO_DTLB_MISS_DEP_STALL_CYCLES  = 0x85,
138         ARMV7_PERFCTR_DMB_DEP_STALL_CYCLES      = 0x86,
139
140         ARMV7_PERFCTR_INTGR_CLK_ENABLED_CYCLES  = 0x8A,
141         ARMV7_PERFCTR_DATA_ENGINE_CLK_EN_CYCLES = 0x8B,
142
143         ARMV7_PERFCTR_ISB_INST                  = 0x90,
144         ARMV7_PERFCTR_DSB_INST                  = 0x91,
145         ARMV7_PERFCTR_DMB_INST                  = 0x92,
146         ARMV7_PERFCTR_EXT_INTERRUPTS            = 0x93,
147
148         ARMV7_PERFCTR_PLE_CACHE_LINE_RQST_COMPLETED     = 0xA0,
149         ARMV7_PERFCTR_PLE_CACHE_LINE_RQST_SKIPPED       = 0xA1,
150         ARMV7_PERFCTR_PLE_FIFO_FLUSH            = 0xA2,
151         ARMV7_PERFCTR_PLE_RQST_COMPLETED        = 0xA3,
152         ARMV7_PERFCTR_PLE_FIFO_OVERFLOW         = 0xA4,
153         ARMV7_PERFCTR_PLE_RQST_PROG             = 0xA5
154 };
155
156 /* ARMv7 Cortex-A5 specific event types */
157 enum armv7_a5_perf_types {
158         ARMV7_PERFCTR_IRQ_TAKEN                 = 0x86,
159         ARMV7_PERFCTR_FIQ_TAKEN                 = 0x87,
160
161         ARMV7_PERFCTR_EXT_MEM_RQST              = 0xc0,
162         ARMV7_PERFCTR_NC_EXT_MEM_RQST           = 0xc1,
163         ARMV7_PERFCTR_PREFETCH_LINEFILL         = 0xc2,
164         ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP    = 0xc3,
165         ARMV7_PERFCTR_ENTER_READ_ALLOC          = 0xc4,
166         ARMV7_PERFCTR_READ_ALLOC                = 0xc5,
167
168         ARMV7_PERFCTR_STALL_SB_FULL             = 0xc9,
169 };
170
171 /* ARMv7 Cortex-A15 specific event types */
172 enum armv7_a15_perf_types {
173         ARMV7_PERFCTR_L1_DCACHE_READ_ACCESS     = 0x40,
174         ARMV7_PERFCTR_L1_DCACHE_WRITE_ACCESS    = 0x41,
175         ARMV7_PERFCTR_L1_DCACHE_READ_REFILL     = 0x42,
176         ARMV7_PERFCTR_L1_DCACHE_WRITE_REFILL    = 0x43,
177
178         ARMV7_PERFCTR_L1_DTLB_READ_REFILL       = 0x4C,
179         ARMV7_PERFCTR_L1_DTLB_WRITE_REFILL      = 0x4D,
180
181         ARMV7_PERFCTR_L2_DCACHE_READ_ACCESS     = 0x50,
182         ARMV7_PERFCTR_L2_DCACHE_WRITE_ACCESS    = 0x51,
183         ARMV7_PERFCTR_L2_DCACHE_READ_REFILL     = 0x52,
184         ARMV7_PERFCTR_L2_DCACHE_WRITE_REFILL    = 0x53,
185
186         ARMV7_PERFCTR_SPEC_PC_WRITE             = 0x76,
187 };
188
189 /*
190  * Cortex-A8 HW events mapping
191  *
192  * The hardware events that we support. We do support cache operations but
193  * we have harvard caches and no way to combine instruction and data
194  * accesses/misses in hardware.
195  */
196 static const unsigned armv7_a8_perf_map[PERF_COUNT_HW_MAX] = {
197         [PERF_COUNT_HW_CPU_CYCLES]          = ARMV7_PERFCTR_CPU_CYCLES,
198         [PERF_COUNT_HW_INSTRUCTIONS]        = ARMV7_PERFCTR_INSTR_EXECUTED,
199         [PERF_COUNT_HW_CACHE_REFERENCES]    = HW_OP_UNSUPPORTED,
200         [PERF_COUNT_HW_CACHE_MISSES]        = HW_OP_UNSUPPORTED,
201         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
202         [PERF_COUNT_HW_BRANCH_MISSES]       = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
203         [PERF_COUNT_HW_BUS_CYCLES]          = ARMV7_PERFCTR_CLOCK_CYCLES,
204 };
205
206 static const unsigned armv7_a8_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
207                                           [PERF_COUNT_HW_CACHE_OP_MAX]
208                                           [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
209         [C(L1D)] = {
210                 /*
211                  * The performance counters don't differentiate between read
212                  * and write accesses/misses so this isn't strictly correct,
213                  * but it's the best we can do. Writes and reads get
214                  * combined.
215                  */
216                 [C(OP_READ)] = {
217                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_DCACHE_ACCESS,
218                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DCACHE_REFILL,
219                 },
220                 [C(OP_WRITE)] = {
221                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_DCACHE_ACCESS,
222                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DCACHE_REFILL,
223                 },
224                 [C(OP_PREFETCH)] = {
225                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
226                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
227                 },
228         },
229         [C(L1I)] = {
230                 [C(OP_READ)] = {
231                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_INST,
232                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_INST_MISS,
233                 },
234                 [C(OP_WRITE)] = {
235                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_INST,
236                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_INST_MISS,
237                 },
238                 [C(OP_PREFETCH)] = {
239                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
240                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
241                 },
242         },
243         [C(LL)] = {
244                 [C(OP_READ)] = {
245                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L2_ACCESS,
246                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L2_CACH_MISS,
247                 },
248                 [C(OP_WRITE)] = {
249                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L2_ACCESS,
250                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L2_CACH_MISS,
251                 },
252                 [C(OP_PREFETCH)] = {
253                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
254                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
255                 },
256         },
257         [C(DTLB)] = {
258                 [C(OP_READ)] = {
259                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
260                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
261                 },
262                 [C(OP_WRITE)] = {
263                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
264                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
265                 },
266                 [C(OP_PREFETCH)] = {
267                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
268                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
269                 },
270         },
271         [C(ITLB)] = {
272                 [C(OP_READ)] = {
273                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
274                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
275                 },
276                 [C(OP_WRITE)] = {
277                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
278                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
279                 },
280                 [C(OP_PREFETCH)] = {
281                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
282                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
283                 },
284         },
285         [C(BPU)] = {
286                 [C(OP_READ)] = {
287                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_WRITE,
288                         [C(RESULT_MISS)]
289                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
290                 },
291                 [C(OP_WRITE)] = {
292                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_WRITE,
293                         [C(RESULT_MISS)]
294                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
295                 },
296                 [C(OP_PREFETCH)] = {
297                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
298                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
299                 },
300         },
301 };
302
303 /*
304  * Cortex-A9 HW events mapping
305  */
306 static const unsigned armv7_a9_perf_map[PERF_COUNT_HW_MAX] = {
307         [PERF_COUNT_HW_CPU_CYCLES]          = ARMV7_PERFCTR_CPU_CYCLES,
308         [PERF_COUNT_HW_INSTRUCTIONS]        =
309                                         ARMV7_PERFCTR_INST_OUT_OF_RENAME_STAGE,
310         [PERF_COUNT_HW_CACHE_REFERENCES]    = ARMV7_PERFCTR_COHERENT_LINE_HIT,
311         [PERF_COUNT_HW_CACHE_MISSES]        = ARMV7_PERFCTR_COHERENT_LINE_MISS,
312         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
313         [PERF_COUNT_HW_BRANCH_MISSES]       = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
314         [PERF_COUNT_HW_BUS_CYCLES]          = ARMV7_PERFCTR_CLOCK_CYCLES,
315 };
316
317 static const unsigned armv7_a9_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
318                                           [PERF_COUNT_HW_CACHE_OP_MAX]
319                                           [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
320         [C(L1D)] = {
321                 /*
322                  * The performance counters don't differentiate between read
323                  * and write accesses/misses so this isn't strictly correct,
324                  * but it's the best we can do. Writes and reads get
325                  * combined.
326                  */
327                 [C(OP_READ)] = {
328                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_DCACHE_ACCESS,
329                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DCACHE_REFILL,
330                 },
331                 [C(OP_WRITE)] = {
332                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_DCACHE_ACCESS,
333                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DCACHE_REFILL,
334                 },
335                 [C(OP_PREFETCH)] = {
336                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
337                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
338                 },
339         },
340         [C(L1I)] = {
341                 [C(OP_READ)] = {
342                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
343                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
344                 },
345                 [C(OP_WRITE)] = {
346                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
347                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
348                 },
349                 [C(OP_PREFETCH)] = {
350                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
351                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
352                 },
353         },
354         [C(LL)] = {
355                 [C(OP_READ)] = {
356                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
357                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
358                 },
359                 [C(OP_WRITE)] = {
360                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
361                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
362                 },
363                 [C(OP_PREFETCH)] = {
364                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
365                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
366                 },
367         },
368         [C(DTLB)] = {
369                 [C(OP_READ)] = {
370                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
371                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
372                 },
373                 [C(OP_WRITE)] = {
374                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
375                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
376                 },
377                 [C(OP_PREFETCH)] = {
378                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
379                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
380                 },
381         },
382         [C(ITLB)] = {
383                 [C(OP_READ)] = {
384                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
385                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
386                 },
387                 [C(OP_WRITE)] = {
388                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
389                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
390                 },
391                 [C(OP_PREFETCH)] = {
392                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
393                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
394                 },
395         },
396         [C(BPU)] = {
397                 [C(OP_READ)] = {
398                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_WRITE,
399                         [C(RESULT_MISS)]
400                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
401                 },
402                 [C(OP_WRITE)] = {
403                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_WRITE,
404                         [C(RESULT_MISS)]
405                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
406                 },
407                 [C(OP_PREFETCH)] = {
408                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
409                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
410                 },
411         },
412 };
413
414 /*
415  * Cortex-A5 HW events mapping
416  */
417 static const unsigned armv7_a5_perf_map[PERF_COUNT_HW_MAX] = {
418         [PERF_COUNT_HW_CPU_CYCLES]          = ARMV7_PERFCTR_CPU_CYCLES,
419         [PERF_COUNT_HW_INSTRUCTIONS]        = ARMV7_PERFCTR_INSTR_EXECUTED,
420         [PERF_COUNT_HW_CACHE_REFERENCES]    = HW_OP_UNSUPPORTED,
421         [PERF_COUNT_HW_CACHE_MISSES]        = HW_OP_UNSUPPORTED,
422         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
423         [PERF_COUNT_HW_BRANCH_MISSES]       = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
424         [PERF_COUNT_HW_BUS_CYCLES]          = HW_OP_UNSUPPORTED,
425 };
426
427 static const unsigned armv7_a5_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
428                                         [PERF_COUNT_HW_CACHE_OP_MAX]
429                                         [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
430         [C(L1D)] = {
431                 [C(OP_READ)] = {
432                         [C(RESULT_ACCESS)]
433                                         = ARMV7_PERFCTR_DCACHE_ACCESS,
434                         [C(RESULT_MISS)]
435                                         = ARMV7_PERFCTR_DCACHE_REFILL,
436                 },
437                 [C(OP_WRITE)] = {
438                         [C(RESULT_ACCESS)]
439                                         = ARMV7_PERFCTR_DCACHE_ACCESS,
440                         [C(RESULT_MISS)]
441                                         = ARMV7_PERFCTR_DCACHE_REFILL,
442                 },
443                 [C(OP_PREFETCH)] = {
444                         [C(RESULT_ACCESS)]
445                                         = ARMV7_PERFCTR_PREFETCH_LINEFILL,
446                         [C(RESULT_MISS)]
447                                         = ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP,
448                 },
449         },
450         [C(L1I)] = {
451                 [C(OP_READ)] = {
452                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
453                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
454                 },
455                 [C(OP_WRITE)] = {
456                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
457                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
458                 },
459                 /*
460                  * The prefetch counters don't differentiate between the I
461                  * side and the D side.
462                  */
463                 [C(OP_PREFETCH)] = {
464                         [C(RESULT_ACCESS)]
465                                         = ARMV7_PERFCTR_PREFETCH_LINEFILL,
466                         [C(RESULT_MISS)]
467                                         = ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP,
468                 },
469         },
470         [C(LL)] = {
471                 [C(OP_READ)] = {
472                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
473                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
474                 },
475                 [C(OP_WRITE)] = {
476                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
477                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
478                 },
479                 [C(OP_PREFETCH)] = {
480                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
481                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
482                 },
483         },
484         [C(DTLB)] = {
485                 [C(OP_READ)] = {
486                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
487                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
488                 },
489                 [C(OP_WRITE)] = {
490                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
491                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
492                 },
493                 [C(OP_PREFETCH)] = {
494                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
495                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
496                 },
497         },
498         [C(ITLB)] = {
499                 [C(OP_READ)] = {
500                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
501                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
502                 },
503                 [C(OP_WRITE)] = {
504                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
505                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
506                 },
507                 [C(OP_PREFETCH)] = {
508                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
509                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
510                 },
511         },
512         [C(BPU)] = {
513                 [C(OP_READ)] = {
514                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
515                         [C(RESULT_MISS)]
516                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
517                 },
518                 [C(OP_WRITE)] = {
519                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
520                         [C(RESULT_MISS)]
521                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
522                 },
523                 [C(OP_PREFETCH)] = {
524                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
525                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
526                 },
527         },
528 };
529
530 /*
531  * Cortex-A15 HW events mapping
532  */
533 static const unsigned armv7_a15_perf_map[PERF_COUNT_HW_MAX] = {
534         [PERF_COUNT_HW_CPU_CYCLES]          = ARMV7_PERFCTR_CPU_CYCLES,
535         [PERF_COUNT_HW_INSTRUCTIONS]        = ARMV7_PERFCTR_INSTR_EXECUTED,
536         [PERF_COUNT_HW_CACHE_REFERENCES]    = HW_OP_UNSUPPORTED,
537         [PERF_COUNT_HW_CACHE_MISSES]        = HW_OP_UNSUPPORTED,
538         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_SPEC_PC_WRITE,
539         [PERF_COUNT_HW_BRANCH_MISSES]       = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
540         [PERF_COUNT_HW_BUS_CYCLES]          = ARMV7_PERFCTR_BUS_CYCLES,
541 };
542
543 static const unsigned armv7_a15_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
544                                         [PERF_COUNT_HW_CACHE_OP_MAX]
545                                         [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
546         [C(L1D)] = {
547                 [C(OP_READ)] = {
548                         [C(RESULT_ACCESS)]
549                                         = ARMV7_PERFCTR_L1_DCACHE_READ_ACCESS,
550                         [C(RESULT_MISS)]
551                                         = ARMV7_PERFCTR_L1_DCACHE_READ_REFILL,
552                 },
553                 [C(OP_WRITE)] = {
554                         [C(RESULT_ACCESS)]
555                                         = ARMV7_PERFCTR_L1_DCACHE_WRITE_ACCESS,
556                         [C(RESULT_MISS)]
557                                         = ARMV7_PERFCTR_L1_DCACHE_WRITE_REFILL,
558                 },
559                 [C(OP_PREFETCH)] = {
560                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
561                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
562                 },
563         },
564         [C(L1I)] = {
565                 /*
566                  * Not all performance counters differentiate between read
567                  * and write accesses/misses so we're not always strictly
568                  * correct, but it's the best we can do. Writes and reads get
569                  * combined in these cases.
570                  */
571                 [C(OP_READ)] = {
572                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
573                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
574                 },
575                 [C(OP_WRITE)] = {
576                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
577                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
578                 },
579                 [C(OP_PREFETCH)] = {
580                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
581                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
582                 },
583         },
584         [C(LL)] = {
585                 [C(OP_READ)] = {
586                         [C(RESULT_ACCESS)]
587                                         = ARMV7_PERFCTR_L2_DCACHE_READ_ACCESS,
588                         [C(RESULT_MISS)]
589                                         = ARMV7_PERFCTR_L2_DCACHE_READ_REFILL,
590                 },
591                 [C(OP_WRITE)] = {
592                         [C(RESULT_ACCESS)]
593                                         = ARMV7_PERFCTR_L2_DCACHE_WRITE_ACCESS,
594                         [C(RESULT_MISS)]
595                                         = ARMV7_PERFCTR_L2_DCACHE_WRITE_REFILL,
596                 },
597                 [C(OP_PREFETCH)] = {
598                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
599                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
600                 },
601         },
602         [C(DTLB)] = {
603                 [C(OP_READ)] = {
604                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
605                         [C(RESULT_MISS)]
606                                         = ARMV7_PERFCTR_L1_DTLB_READ_REFILL,
607                 },
608                 [C(OP_WRITE)] = {
609                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
610                         [C(RESULT_MISS)]
611                                         = ARMV7_PERFCTR_L1_DTLB_WRITE_REFILL,
612                 },
613                 [C(OP_PREFETCH)] = {
614                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
615                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
616                 },
617         },
618         [C(ITLB)] = {
619                 [C(OP_READ)] = {
620                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
621                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
622                 },
623                 [C(OP_WRITE)] = {
624                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
625                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
626                 },
627                 [C(OP_PREFETCH)] = {
628                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
629                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
630                 },
631         },
632         [C(BPU)] = {
633                 [C(OP_READ)] = {
634                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
635                         [C(RESULT_MISS)]
636                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
637                 },
638                 [C(OP_WRITE)] = {
639                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
640                         [C(RESULT_MISS)]
641                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
642                 },
643                 [C(OP_PREFETCH)] = {
644                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
645                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
646                 },
647         },
648 };
649
650 /*
651  * Perf Events counters
652  */
653 enum armv7_counters {
654         ARMV7_CYCLE_COUNTER             = 1,    /* Cycle counter */
655         ARMV7_COUNTER0                  = 2,    /* First event counter */
656 };
657
658 /*
659  * The cycle counter is ARMV7_CYCLE_COUNTER.
660  * The first event counter is ARMV7_COUNTER0.
661  * The last event counter is (ARMV7_COUNTER0 + armpmu->num_events - 1).
662  */
663 #define ARMV7_COUNTER_LAST      (ARMV7_COUNTER0 + armpmu->num_events - 1)
664
665 /*
666  * ARMv7 low level PMNC access
667  */
668
669 /*
670  * Per-CPU PMNC: config reg
671  */
672 #define ARMV7_PMNC_E            (1 << 0) /* Enable all counters */
673 #define ARMV7_PMNC_P            (1 << 1) /* Reset all counters */
674 #define ARMV7_PMNC_C            (1 << 2) /* Cycle counter reset */
675 #define ARMV7_PMNC_D            (1 << 3) /* CCNT counts every 64th cpu cycle */
676 #define ARMV7_PMNC_X            (1 << 4) /* Export to ETM */
677 #define ARMV7_PMNC_DP           (1 << 5) /* Disable CCNT if non-invasive debug*/
678 #define ARMV7_PMNC_N_SHIFT      11       /* Number of counters supported */
679 #define ARMV7_PMNC_N_MASK       0x1f
680 #define ARMV7_PMNC_MASK         0x3f     /* Mask for writable bits */
681
682 /*
683  * Available counters
684  */
685 #define ARMV7_CNT0              0       /* First event counter */
686 #define ARMV7_CCNT              31      /* Cycle counter */
687
688 /* Perf Event to low level counters mapping */
689 #define ARMV7_EVENT_CNT_TO_CNTx (ARMV7_COUNTER0 - ARMV7_CNT0)
690
691 /*
692  * CNTENS: counters enable reg
693  */
694 #define ARMV7_CNTENS_P(idx)     (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
695 #define ARMV7_CNTENS_C          (1 << ARMV7_CCNT)
696
697 /*
698  * CNTENC: counters disable reg
699  */
700 #define ARMV7_CNTENC_P(idx)     (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
701 #define ARMV7_CNTENC_C          (1 << ARMV7_CCNT)
702
703 /*
704  * INTENS: counters overflow interrupt enable reg
705  */
706 #define ARMV7_INTENS_P(idx)     (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
707 #define ARMV7_INTENS_C          (1 << ARMV7_CCNT)
708
709 /*
710  * INTENC: counters overflow interrupt disable reg
711  */
712 #define ARMV7_INTENC_P(idx)     (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
713 #define ARMV7_INTENC_C          (1 << ARMV7_CCNT)
714
715 /*
716  * EVTSEL: Event selection reg
717  */
718 #define ARMV7_EVTSEL_MASK       0xff            /* Mask for writable bits */
719
720 /*
721  * SELECT: Counter selection reg
722  */
723 #define ARMV7_SELECT_MASK       0x1f            /* Mask for writable bits */
724
725 /*
726  * FLAG: counters overflow flag status reg
727  */
728 #define ARMV7_FLAG_P(idx)       (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
729 #define ARMV7_FLAG_C            (1 << ARMV7_CCNT)
730 #define ARMV7_FLAG_MASK         0xffffffff      /* Mask for writable bits */
731 #define ARMV7_OVERFLOWED_MASK   ARMV7_FLAG_MASK
732
733 static inline unsigned long armv7_pmnc_read(void)
734 {
735         u32 val;
736         asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r"(val));
737         return val;
738 }
739
740 static inline void armv7_pmnc_write(unsigned long val)
741 {
742         val &= ARMV7_PMNC_MASK;
743         isb();
744         asm volatile("mcr p15, 0, %0, c9, c12, 0" : : "r"(val));
745 }
746
747 static inline int armv7_pmnc_has_overflowed(unsigned long pmnc)
748 {
749         return pmnc & ARMV7_OVERFLOWED_MASK;
750 }
751
752 static inline int armv7_pmnc_counter_has_overflowed(unsigned long pmnc,
753                                         enum armv7_counters counter)
754 {
755         int ret = 0;
756
757         if (counter == ARMV7_CYCLE_COUNTER)
758                 ret = pmnc & ARMV7_FLAG_C;
759         else if ((counter >= ARMV7_COUNTER0) && (counter <= ARMV7_COUNTER_LAST))
760                 ret = pmnc & ARMV7_FLAG_P(counter);
761         else
762                 pr_err("CPU%u checking wrong counter %d overflow status\n",
763                         smp_processor_id(), counter);
764
765         return ret;
766 }
767
768 static inline int armv7_pmnc_select_counter(unsigned int idx)
769 {
770         u32 val;
771
772         if ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST)) {
773                 pr_err("CPU%u selecting wrong PMNC counter"
774                         " %d\n", smp_processor_id(), idx);
775                 return -1;
776         }
777
778         val = (idx - ARMV7_EVENT_CNT_TO_CNTx) & ARMV7_SELECT_MASK;
779         asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (val));
780         isb();
781
782         return idx;
783 }
784
785 static inline u32 armv7pmu_read_counter(int idx)
786 {
787         unsigned long value = 0;
788
789         if (idx == ARMV7_CYCLE_COUNTER)
790                 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (value));
791         else if ((idx >= ARMV7_COUNTER0) && (idx <= ARMV7_COUNTER_LAST)) {
792                 if (armv7_pmnc_select_counter(idx) == idx)
793                         asm volatile("mrc p15, 0, %0, c9, c13, 2"
794                                      : "=r" (value));
795         } else
796                 pr_err("CPU%u reading wrong counter %d\n",
797                         smp_processor_id(), idx);
798
799         return value;
800 }
801
802 static inline void armv7pmu_write_counter(int idx, u32 value)
803 {
804         if (idx == ARMV7_CYCLE_COUNTER)
805                 asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" (value));
806         else if ((idx >= ARMV7_COUNTER0) && (idx <= ARMV7_COUNTER_LAST)) {
807                 if (armv7_pmnc_select_counter(idx) == idx)
808                         asm volatile("mcr p15, 0, %0, c9, c13, 2"
809                                      : : "r" (value));
810         } else
811                 pr_err("CPU%u writing wrong counter %d\n",
812                         smp_processor_id(), idx);
813 }
814
815 static inline void armv7_pmnc_write_evtsel(unsigned int idx, u32 val)
816 {
817         if (armv7_pmnc_select_counter(idx) == idx) {
818                 val &= ARMV7_EVTSEL_MASK;
819                 asm volatile("mcr p15, 0, %0, c9, c13, 1" : : "r" (val));
820         }
821 }
822
823 static inline u32 armv7_pmnc_enable_counter(unsigned int idx)
824 {
825         u32 val;
826
827         if ((idx != ARMV7_CYCLE_COUNTER) &&
828             ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) {
829                 pr_err("CPU%u enabling wrong PMNC counter"
830                         " %d\n", smp_processor_id(), idx);
831                 return -1;
832         }
833
834         if (idx == ARMV7_CYCLE_COUNTER)
835                 val = ARMV7_CNTENS_C;
836         else
837                 val = ARMV7_CNTENS_P(idx);
838
839         asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (val));
840
841         return idx;
842 }
843
844 static inline u32 armv7_pmnc_disable_counter(unsigned int idx)
845 {
846         u32 val;
847
848
849         if ((idx != ARMV7_CYCLE_COUNTER) &&
850             ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) {
851                 pr_err("CPU%u disabling wrong PMNC counter"
852                         " %d\n", smp_processor_id(), idx);
853                 return -1;
854         }
855
856         if (idx == ARMV7_CYCLE_COUNTER)
857                 val = ARMV7_CNTENC_C;
858         else
859                 val = ARMV7_CNTENC_P(idx);
860
861         asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (val));
862
863         return idx;
864 }
865
866 static inline u32 armv7_pmnc_enable_intens(unsigned int idx)
867 {
868         u32 val;
869
870         if ((idx != ARMV7_CYCLE_COUNTER) &&
871             ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) {
872                 pr_err("CPU%u enabling wrong PMNC counter"
873                         " interrupt enable %d\n", smp_processor_id(), idx);
874                 return -1;
875         }
876
877         if (idx == ARMV7_CYCLE_COUNTER)
878                 val = ARMV7_INTENS_C;
879         else
880                 val = ARMV7_INTENS_P(idx);
881
882         asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (val));
883
884         return idx;
885 }
886
887 static inline u32 armv7_pmnc_disable_intens(unsigned int idx)
888 {
889         u32 val;
890
891         if ((idx != ARMV7_CYCLE_COUNTER) &&
892             ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) {
893                 pr_err("CPU%u disabling wrong PMNC counter"
894                         " interrupt enable %d\n", smp_processor_id(), idx);
895                 return -1;
896         }
897
898         if (idx == ARMV7_CYCLE_COUNTER)
899                 val = ARMV7_INTENC_C;
900         else
901                 val = ARMV7_INTENC_P(idx);
902
903         asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (val));
904
905         return idx;
906 }
907
908 static inline u32 armv7_pmnc_getreset_flags(void)
909 {
910         u32 val;
911
912         /* Read */
913         asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
914
915         /* Write to clear flags */
916         val &= ARMV7_FLAG_MASK;
917         asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (val));
918
919         return val;
920 }
921
922 #ifdef DEBUG
923 static void armv7_pmnc_dump_regs(void)
924 {
925         u32 val;
926         unsigned int cnt;
927
928         printk(KERN_INFO "PMNC registers dump:\n");
929
930         asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r" (val));
931         printk(KERN_INFO "PMNC  =0x%08x\n", val);
932
933         asm volatile("mrc p15, 0, %0, c9, c12, 1" : "=r" (val));
934         printk(KERN_INFO "CNTENS=0x%08x\n", val);
935
936         asm volatile("mrc p15, 0, %0, c9, c14, 1" : "=r" (val));
937         printk(KERN_INFO "INTENS=0x%08x\n", val);
938
939         asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
940         printk(KERN_INFO "FLAGS =0x%08x\n", val);
941
942         asm volatile("mrc p15, 0, %0, c9, c12, 5" : "=r" (val));
943         printk(KERN_INFO "SELECT=0x%08x\n", val);
944
945         asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val));
946         printk(KERN_INFO "CCNT  =0x%08x\n", val);
947
948         for (cnt = ARMV7_COUNTER0; cnt < ARMV7_COUNTER_LAST; cnt++) {
949                 armv7_pmnc_select_counter(cnt);
950                 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val));
951                 printk(KERN_INFO "CNT[%d] count =0x%08x\n",
952                         cnt-ARMV7_EVENT_CNT_TO_CNTx, val);
953                 asm volatile("mrc p15, 0, %0, c9, c13, 1" : "=r" (val));
954                 printk(KERN_INFO "CNT[%d] evtsel=0x%08x\n",
955                         cnt-ARMV7_EVENT_CNT_TO_CNTx, val);
956         }
957 }
958 #endif
959
960 static void armv7pmu_enable_event(struct hw_perf_event *hwc, int idx)
961 {
962         unsigned long flags;
963
964         /*
965          * Enable counter and interrupt, and set the counter to count
966          * the event that we're interested in.
967          */
968         raw_spin_lock_irqsave(&pmu_lock, flags);
969
970         /*
971          * Disable counter
972          */
973         armv7_pmnc_disable_counter(idx);
974
975         /*
976          * Set event (if destined for PMNx counters)
977          * We don't need to set the event if it's a cycle count
978          */
979         if (idx != ARMV7_CYCLE_COUNTER)
980                 armv7_pmnc_write_evtsel(idx, hwc->config_base);
981
982         /*
983          * Enable interrupt for this counter
984          */
985         armv7_pmnc_enable_intens(idx);
986
987         /*
988          * Enable counter
989          */
990         armv7_pmnc_enable_counter(idx);
991
992         raw_spin_unlock_irqrestore(&pmu_lock, flags);
993 }
994
995 static void armv7pmu_disable_event(struct hw_perf_event *hwc, int idx)
996 {
997         unsigned long flags;
998
999         /*
1000          * Disable counter and interrupt
1001          */
1002         raw_spin_lock_irqsave(&pmu_lock, flags);
1003
1004         /*
1005          * Disable counter
1006          */
1007         armv7_pmnc_disable_counter(idx);
1008
1009         /*
1010          * Disable interrupt for this counter
1011          */
1012         armv7_pmnc_disable_intens(idx);
1013
1014         raw_spin_unlock_irqrestore(&pmu_lock, flags);
1015 }
1016
1017 static irqreturn_t armv7pmu_handle_irq(int irq_num, void *dev)
1018 {
1019         unsigned long pmnc;
1020         struct perf_sample_data data;
1021         struct cpu_hw_events *cpuc;
1022         struct pt_regs *regs;
1023         int idx;
1024
1025         /*
1026          * Get and reset the IRQ flags
1027          */
1028         pmnc = armv7_pmnc_getreset_flags();
1029
1030         /*
1031          * Did an overflow occur?
1032          */
1033         if (!armv7_pmnc_has_overflowed(pmnc))
1034                 return IRQ_NONE;
1035
1036         /*
1037          * Handle the counter(s) overflow(s)
1038          */
1039         regs = get_irq_regs();
1040
1041         perf_sample_data_init(&data, 0);
1042
1043         cpuc = &__get_cpu_var(cpu_hw_events);
1044         for (idx = 0; idx <= armpmu->num_events; ++idx) {
1045                 struct perf_event *event = cpuc->events[idx];
1046                 struct hw_perf_event *hwc;
1047
1048                 if (!test_bit(idx, cpuc->active_mask))
1049                         continue;
1050
1051                 /*
1052                  * We have a single interrupt for all counters. Check that
1053                  * each counter has overflowed before we process it.
1054                  */
1055                 if (!armv7_pmnc_counter_has_overflowed(pmnc, idx))
1056                         continue;
1057
1058                 hwc = &event->hw;
1059                 armpmu_event_update(event, hwc, idx, 1);
1060                 data.period = event->hw.last_period;
1061                 if (!armpmu_event_set_period(event, hwc, idx))
1062                         continue;
1063
1064                 if (perf_event_overflow(event, 0, &data, regs))
1065                         armpmu->disable(hwc, idx);
1066         }
1067
1068         /*
1069          * Handle the pending perf events.
1070          *
1071          * Note: this call *must* be run with interrupts disabled. For
1072          * platforms that can have the PMU interrupts raised as an NMI, this
1073          * will not work.
1074          */
1075         irq_work_run();
1076
1077         return IRQ_HANDLED;
1078 }
1079
1080 static void armv7pmu_start(void)
1081 {
1082         unsigned long flags;
1083
1084         raw_spin_lock_irqsave(&pmu_lock, flags);
1085         /* Enable all counters */
1086         armv7_pmnc_write(armv7_pmnc_read() | ARMV7_PMNC_E);
1087         raw_spin_unlock_irqrestore(&pmu_lock, flags);
1088 }
1089
1090 static void armv7pmu_stop(void)
1091 {
1092         unsigned long flags;
1093
1094         raw_spin_lock_irqsave(&pmu_lock, flags);
1095         /* Disable all counters */
1096         armv7_pmnc_write(armv7_pmnc_read() & ~ARMV7_PMNC_E);
1097         raw_spin_unlock_irqrestore(&pmu_lock, flags);
1098 }
1099
1100 static int armv7pmu_get_event_idx(struct cpu_hw_events *cpuc,
1101                                   struct hw_perf_event *event)
1102 {
1103         int idx;
1104
1105         /* Always place a cycle counter into the cycle counter. */
1106         if (event->config_base == ARMV7_PERFCTR_CPU_CYCLES) {
1107                 if (test_and_set_bit(ARMV7_CYCLE_COUNTER, cpuc->used_mask))
1108                         return -EAGAIN;
1109
1110                 return ARMV7_CYCLE_COUNTER;
1111         } else {
1112                 /*
1113                  * For anything other than a cycle counter, try and use
1114                  * the events counters
1115                  */
1116                 for (idx = ARMV7_COUNTER0; idx <= armpmu->num_events; ++idx) {
1117                         if (!test_and_set_bit(idx, cpuc->used_mask))
1118                                 return idx;
1119                 }
1120
1121                 /* The counters are all in use. */
1122                 return -EAGAIN;
1123         }
1124 }
1125
1126 static void armv7pmu_reset(void *info)
1127 {
1128         u32 idx, nb_cnt = armpmu->num_events;
1129
1130         /* The counter and interrupt enable registers are unknown at reset. */
1131         for (idx = 1; idx < nb_cnt; ++idx)
1132                 armv7pmu_disable_event(NULL, idx);
1133
1134         /* Initialize & Reset PMNC: C and P bits */
1135         armv7_pmnc_write(ARMV7_PMNC_P | ARMV7_PMNC_C);
1136 }
1137
1138 static struct arm_pmu armv7pmu = {
1139         .handle_irq             = armv7pmu_handle_irq,
1140         .enable                 = armv7pmu_enable_event,
1141         .disable                = armv7pmu_disable_event,
1142         .read_counter           = armv7pmu_read_counter,
1143         .write_counter          = armv7pmu_write_counter,
1144         .get_event_idx          = armv7pmu_get_event_idx,
1145         .start                  = armv7pmu_start,
1146         .stop                   = armv7pmu_stop,
1147         .reset                  = armv7pmu_reset,
1148         .raw_event_mask         = 0xFF,
1149         .max_period             = (1LLU << 32) - 1,
1150 };
1151
1152 static u32 __init armv7_read_num_pmnc_events(void)
1153 {
1154         u32 nb_cnt;
1155
1156         /* Read the nb of CNTx counters supported from PMNC */
1157         nb_cnt = (armv7_pmnc_read() >> ARMV7_PMNC_N_SHIFT) & ARMV7_PMNC_N_MASK;
1158
1159         /* Add the CPU cycles counter and return */
1160         return nb_cnt + 1;
1161 }
1162
1163 static const struct arm_pmu *__init armv7_a8_pmu_init(void)
1164 {
1165         armv7pmu.id             = ARM_PERF_PMU_ID_CA8;
1166         armv7pmu.name           = "ARMv7 Cortex-A8";
1167         armv7pmu.cache_map      = &armv7_a8_perf_cache_map;
1168         armv7pmu.event_map      = &armv7_a8_perf_map;
1169         armv7pmu.num_events     = armv7_read_num_pmnc_events();
1170         return &armv7pmu;
1171 }
1172
1173 static const struct arm_pmu *__init armv7_a9_pmu_init(void)
1174 {
1175         armv7pmu.id             = ARM_PERF_PMU_ID_CA9;
1176         armv7pmu.name           = "ARMv7 Cortex-A9";
1177         armv7pmu.cache_map      = &armv7_a9_perf_cache_map;
1178         armv7pmu.event_map      = &armv7_a9_perf_map;
1179         armv7pmu.num_events     = armv7_read_num_pmnc_events();
1180         return &armv7pmu;
1181 }
1182
1183 static const struct arm_pmu *__init armv7_a5_pmu_init(void)
1184 {
1185         armv7pmu.id             = ARM_PERF_PMU_ID_CA5;
1186         armv7pmu.name           = "ARMv7 Cortex-A5";
1187         armv7pmu.cache_map      = &armv7_a5_perf_cache_map;
1188         armv7pmu.event_map      = &armv7_a5_perf_map;
1189         armv7pmu.num_events     = armv7_read_num_pmnc_events();
1190         return &armv7pmu;
1191 }
1192
1193 static const struct arm_pmu *__init armv7_a15_pmu_init(void)
1194 {
1195         armv7pmu.id             = ARM_PERF_PMU_ID_CA15;
1196         armv7pmu.name           = "ARMv7 Cortex-A15";
1197         armv7pmu.cache_map      = &armv7_a15_perf_cache_map;
1198         armv7pmu.event_map      = &armv7_a15_perf_map;
1199         armv7pmu.num_events     = armv7_read_num_pmnc_events();
1200         return &armv7pmu;
1201 }
1202 #else
1203 static const struct arm_pmu *__init armv7_a8_pmu_init(void)
1204 {
1205         return NULL;
1206 }
1207
1208 static const struct arm_pmu *__init armv7_a9_pmu_init(void)
1209 {
1210         return NULL;
1211 }
1212
1213 static const struct arm_pmu *__init armv7_a5_pmu_init(void)
1214 {
1215         return NULL;
1216 }
1217
1218 static const struct arm_pmu *__init armv7_a15_pmu_init(void)
1219 {
1220         return NULL;
1221 }
1222 #endif  /* CONFIG_CPU_V7 */