]> Pileus Git - ~andy/linux/blob - arch/arm/kernel/perf_event_v7.c
ARM: perf: add support for the Cortex-A5 PMU
[~andy/linux] / arch / arm / kernel / perf_event_v7.c
1 /*
2  * ARMv7 Cortex-A8 and Cortex-A9 Performance Events handling code.
3  *
4  * ARMv7 support: Jean Pihet <jpihet@mvista.com>
5  * 2010 (c) MontaVista Software, LLC.
6  *
7  * Copied from ARMv6 code, with the low level code inspired
8  *  by the ARMv7 Oprofile code.
9  *
10  * Cortex-A8 has up to 4 configurable performance counters and
11  *  a single cycle counter.
12  * Cortex-A9 has up to 31 configurable performance counters and
13  *  a single cycle counter.
14  *
15  * All counters can be enabled/disabled and IRQ masked separately. The cycle
16  *  counter and all 4 performance counters together can be reset separately.
17  */
18
19 #ifdef CONFIG_CPU_V7
20 /*
21  * Common ARMv7 event types
22  *
23  * Note: An implementation may not be able to count all of these events
24  * but the encodings are considered to be `reserved' in the case that
25  * they are not available.
26  */
27 enum armv7_perf_types {
28         ARMV7_PERFCTR_PMNC_SW_INCR              = 0x00,
29         ARMV7_PERFCTR_IFETCH_MISS               = 0x01,
30         ARMV7_PERFCTR_ITLB_MISS                 = 0x02,
31         ARMV7_PERFCTR_DCACHE_REFILL             = 0x03, /* L1 */
32         ARMV7_PERFCTR_DCACHE_ACCESS             = 0x04, /* L1 */
33         ARMV7_PERFCTR_DTLB_REFILL               = 0x05,
34         ARMV7_PERFCTR_DREAD                     = 0x06,
35         ARMV7_PERFCTR_DWRITE                    = 0x07,
36         ARMV7_PERFCTR_INSTR_EXECUTED            = 0x08,
37         ARMV7_PERFCTR_EXC_TAKEN                 = 0x09,
38         ARMV7_PERFCTR_EXC_EXECUTED              = 0x0A,
39         ARMV7_PERFCTR_CID_WRITE                 = 0x0B,
40         /* ARMV7_PERFCTR_PC_WRITE is equivalent to HW_BRANCH_INSTRUCTIONS.
41          * It counts:
42          *  - all branch instructions,
43          *  - instructions that explicitly write the PC,
44          *  - exception generating instructions.
45          */
46         ARMV7_PERFCTR_PC_WRITE                  = 0x0C,
47         ARMV7_PERFCTR_PC_IMM_BRANCH             = 0x0D,
48         ARMV7_PERFCTR_PC_PROC_RETURN            = 0x0E,
49         ARMV7_PERFCTR_UNALIGNED_ACCESS          = 0x0F,
50
51         /* These events are defined by the PMUv2 supplement (ARM DDI 0457A). */
52         ARMV7_PERFCTR_PC_BRANCH_MIS_PRED        = 0x10,
53         ARMV7_PERFCTR_CLOCK_CYCLES              = 0x11,
54         ARMV7_PERFCTR_PC_BRANCH_PRED            = 0x12,
55         ARMV7_PERFCTR_MEM_ACCESS                = 0x13,
56         ARMV7_PERFCTR_L1_ICACHE_ACCESS          = 0x14,
57         ARMV7_PERFCTR_L1_DCACHE_WB              = 0x15,
58         ARMV7_PERFCTR_L2_DCACHE_ACCESS          = 0x16,
59         ARMV7_PERFCTR_L2_DCACHE_REFILL          = 0x17,
60         ARMV7_PERFCTR_L2_DCACHE_WB              = 0x18,
61         ARMV7_PERFCTR_BUS_ACCESS                = 0x19,
62         ARMV7_PERFCTR_MEMORY_ERROR              = 0x1A,
63         ARMV7_PERFCTR_INSTR_SPEC                = 0x1B,
64         ARMV7_PERFCTR_TTBR_WRITE                = 0x1C,
65         ARMV7_PERFCTR_BUS_CYCLES                = 0x1D,
66
67         ARMV7_PERFCTR_CPU_CYCLES                = 0xFF
68 };
69
70 /* ARMv7 Cortex-A8 specific event types */
71 enum armv7_a8_perf_types {
72         ARMV7_PERFCTR_WRITE_BUFFER_FULL         = 0x40,
73         ARMV7_PERFCTR_L2_STORE_MERGED           = 0x41,
74         ARMV7_PERFCTR_L2_STORE_BUFF             = 0x42,
75         ARMV7_PERFCTR_L2_ACCESS                 = 0x43,
76         ARMV7_PERFCTR_L2_CACH_MISS              = 0x44,
77         ARMV7_PERFCTR_AXI_READ_CYCLES           = 0x45,
78         ARMV7_PERFCTR_AXI_WRITE_CYCLES          = 0x46,
79         ARMV7_PERFCTR_MEMORY_REPLAY             = 0x47,
80         ARMV7_PERFCTR_UNALIGNED_ACCESS_REPLAY   = 0x48,
81         ARMV7_PERFCTR_L1_DATA_MISS              = 0x49,
82         ARMV7_PERFCTR_L1_INST_MISS              = 0x4A,
83         ARMV7_PERFCTR_L1_DATA_COLORING          = 0x4B,
84         ARMV7_PERFCTR_L1_NEON_DATA              = 0x4C,
85         ARMV7_PERFCTR_L1_NEON_CACH_DATA         = 0x4D,
86         ARMV7_PERFCTR_L2_NEON                   = 0x4E,
87         ARMV7_PERFCTR_L2_NEON_HIT               = 0x4F,
88         ARMV7_PERFCTR_L1_INST                   = 0x50,
89         ARMV7_PERFCTR_PC_RETURN_MIS_PRED        = 0x51,
90         ARMV7_PERFCTR_PC_BRANCH_FAILED          = 0x52,
91         ARMV7_PERFCTR_PC_BRANCH_TAKEN           = 0x53,
92         ARMV7_PERFCTR_PC_BRANCH_EXECUTED        = 0x54,
93         ARMV7_PERFCTR_OP_EXECUTED               = 0x55,
94         ARMV7_PERFCTR_CYCLES_INST_STALL         = 0x56,
95         ARMV7_PERFCTR_CYCLES_INST               = 0x57,
96         ARMV7_PERFCTR_CYCLES_NEON_DATA_STALL    = 0x58,
97         ARMV7_PERFCTR_CYCLES_NEON_INST_STALL    = 0x59,
98         ARMV7_PERFCTR_NEON_CYCLES               = 0x5A,
99
100         ARMV7_PERFCTR_PMU0_EVENTS               = 0x70,
101         ARMV7_PERFCTR_PMU1_EVENTS               = 0x71,
102         ARMV7_PERFCTR_PMU_EVENTS                = 0x72,
103 };
104
105 /* ARMv7 Cortex-A9 specific event types */
106 enum armv7_a9_perf_types {
107         ARMV7_PERFCTR_JAVA_HW_BYTECODE_EXEC     = 0x40,
108         ARMV7_PERFCTR_JAVA_SW_BYTECODE_EXEC     = 0x41,
109         ARMV7_PERFCTR_JAZELLE_BRANCH_EXEC       = 0x42,
110
111         ARMV7_PERFCTR_COHERENT_LINE_MISS        = 0x50,
112         ARMV7_PERFCTR_COHERENT_LINE_HIT         = 0x51,
113
114         ARMV7_PERFCTR_ICACHE_DEP_STALL_CYCLES   = 0x60,
115         ARMV7_PERFCTR_DCACHE_DEP_STALL_CYCLES   = 0x61,
116         ARMV7_PERFCTR_TLB_MISS_DEP_STALL_CYCLES = 0x62,
117         ARMV7_PERFCTR_STREX_EXECUTED_PASSED     = 0x63,
118         ARMV7_PERFCTR_STREX_EXECUTED_FAILED     = 0x64,
119         ARMV7_PERFCTR_DATA_EVICTION             = 0x65,
120         ARMV7_PERFCTR_ISSUE_STAGE_NO_INST       = 0x66,
121         ARMV7_PERFCTR_ISSUE_STAGE_EMPTY         = 0x67,
122         ARMV7_PERFCTR_INST_OUT_OF_RENAME_STAGE  = 0x68,
123
124         ARMV7_PERFCTR_PREDICTABLE_FUNCT_RETURNS = 0x6E,
125
126         ARMV7_PERFCTR_MAIN_UNIT_EXECUTED_INST   = 0x70,
127         ARMV7_PERFCTR_SECOND_UNIT_EXECUTED_INST = 0x71,
128         ARMV7_PERFCTR_LD_ST_UNIT_EXECUTED_INST  = 0x72,
129         ARMV7_PERFCTR_FP_EXECUTED_INST          = 0x73,
130         ARMV7_PERFCTR_NEON_EXECUTED_INST        = 0x74,
131
132         ARMV7_PERFCTR_PLD_FULL_DEP_STALL_CYCLES = 0x80,
133         ARMV7_PERFCTR_DATA_WR_DEP_STALL_CYCLES  = 0x81,
134         ARMV7_PERFCTR_ITLB_MISS_DEP_STALL_CYCLES        = 0x82,
135         ARMV7_PERFCTR_DTLB_MISS_DEP_STALL_CYCLES        = 0x83,
136         ARMV7_PERFCTR_MICRO_ITLB_MISS_DEP_STALL_CYCLES  = 0x84,
137         ARMV7_PERFCTR_MICRO_DTLB_MISS_DEP_STALL_CYCLES  = 0x85,
138         ARMV7_PERFCTR_DMB_DEP_STALL_CYCLES      = 0x86,
139
140         ARMV7_PERFCTR_INTGR_CLK_ENABLED_CYCLES  = 0x8A,
141         ARMV7_PERFCTR_DATA_ENGINE_CLK_EN_CYCLES = 0x8B,
142
143         ARMV7_PERFCTR_ISB_INST                  = 0x90,
144         ARMV7_PERFCTR_DSB_INST                  = 0x91,
145         ARMV7_PERFCTR_DMB_INST                  = 0x92,
146         ARMV7_PERFCTR_EXT_INTERRUPTS            = 0x93,
147
148         ARMV7_PERFCTR_PLE_CACHE_LINE_RQST_COMPLETED     = 0xA0,
149         ARMV7_PERFCTR_PLE_CACHE_LINE_RQST_SKIPPED       = 0xA1,
150         ARMV7_PERFCTR_PLE_FIFO_FLUSH            = 0xA2,
151         ARMV7_PERFCTR_PLE_RQST_COMPLETED        = 0xA3,
152         ARMV7_PERFCTR_PLE_FIFO_OVERFLOW         = 0xA4,
153         ARMV7_PERFCTR_PLE_RQST_PROG             = 0xA5
154 };
155
156 /* ARMv7 Cortex-A5 specific event types */
157 enum armv7_a5_perf_types {
158         ARMV7_PERFCTR_IRQ_TAKEN                 = 0x86,
159         ARMV7_PERFCTR_FIQ_TAKEN                 = 0x87,
160
161         ARMV7_PERFCTR_EXT_MEM_RQST              = 0xc0,
162         ARMV7_PERFCTR_NC_EXT_MEM_RQST           = 0xc1,
163         ARMV7_PERFCTR_PREFETCH_LINEFILL         = 0xc2,
164         ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP    = 0xc3,
165         ARMV7_PERFCTR_ENTER_READ_ALLOC          = 0xc4,
166         ARMV7_PERFCTR_READ_ALLOC                = 0xc5,
167
168         ARMV7_PERFCTR_STALL_SB_FULL             = 0xc9,
169 };
170
171 /*
172  * Cortex-A8 HW events mapping
173  *
174  * The hardware events that we support. We do support cache operations but
175  * we have harvard caches and no way to combine instruction and data
176  * accesses/misses in hardware.
177  */
178 static const unsigned armv7_a8_perf_map[PERF_COUNT_HW_MAX] = {
179         [PERF_COUNT_HW_CPU_CYCLES]          = ARMV7_PERFCTR_CPU_CYCLES,
180         [PERF_COUNT_HW_INSTRUCTIONS]        = ARMV7_PERFCTR_INSTR_EXECUTED,
181         [PERF_COUNT_HW_CACHE_REFERENCES]    = HW_OP_UNSUPPORTED,
182         [PERF_COUNT_HW_CACHE_MISSES]        = HW_OP_UNSUPPORTED,
183         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
184         [PERF_COUNT_HW_BRANCH_MISSES]       = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
185         [PERF_COUNT_HW_BUS_CYCLES]          = ARMV7_PERFCTR_CLOCK_CYCLES,
186 };
187
188 static const unsigned armv7_a8_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
189                                           [PERF_COUNT_HW_CACHE_OP_MAX]
190                                           [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
191         [C(L1D)] = {
192                 /*
193                  * The performance counters don't differentiate between read
194                  * and write accesses/misses so this isn't strictly correct,
195                  * but it's the best we can do. Writes and reads get
196                  * combined.
197                  */
198                 [C(OP_READ)] = {
199                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_DCACHE_ACCESS,
200                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DCACHE_REFILL,
201                 },
202                 [C(OP_WRITE)] = {
203                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_DCACHE_ACCESS,
204                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DCACHE_REFILL,
205                 },
206                 [C(OP_PREFETCH)] = {
207                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
208                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
209                 },
210         },
211         [C(L1I)] = {
212                 [C(OP_READ)] = {
213                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_INST,
214                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_INST_MISS,
215                 },
216                 [C(OP_WRITE)] = {
217                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_INST,
218                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_INST_MISS,
219                 },
220                 [C(OP_PREFETCH)] = {
221                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
222                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
223                 },
224         },
225         [C(LL)] = {
226                 [C(OP_READ)] = {
227                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L2_ACCESS,
228                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L2_CACH_MISS,
229                 },
230                 [C(OP_WRITE)] = {
231                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L2_ACCESS,
232                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_L2_CACH_MISS,
233                 },
234                 [C(OP_PREFETCH)] = {
235                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
236                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
237                 },
238         },
239         [C(DTLB)] = {
240                 [C(OP_READ)] = {
241                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
242                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
243                 },
244                 [C(OP_WRITE)] = {
245                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
246                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
247                 },
248                 [C(OP_PREFETCH)] = {
249                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
250                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
251                 },
252         },
253         [C(ITLB)] = {
254                 [C(OP_READ)] = {
255                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
256                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
257                 },
258                 [C(OP_WRITE)] = {
259                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
260                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
261                 },
262                 [C(OP_PREFETCH)] = {
263                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
264                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
265                 },
266         },
267         [C(BPU)] = {
268                 [C(OP_READ)] = {
269                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_WRITE,
270                         [C(RESULT_MISS)]
271                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
272                 },
273                 [C(OP_WRITE)] = {
274                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_WRITE,
275                         [C(RESULT_MISS)]
276                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
277                 },
278                 [C(OP_PREFETCH)] = {
279                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
280                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
281                 },
282         },
283 };
284
285 /*
286  * Cortex-A9 HW events mapping
287  */
288 static const unsigned armv7_a9_perf_map[PERF_COUNT_HW_MAX] = {
289         [PERF_COUNT_HW_CPU_CYCLES]          = ARMV7_PERFCTR_CPU_CYCLES,
290         [PERF_COUNT_HW_INSTRUCTIONS]        =
291                                         ARMV7_PERFCTR_INST_OUT_OF_RENAME_STAGE,
292         [PERF_COUNT_HW_CACHE_REFERENCES]    = ARMV7_PERFCTR_COHERENT_LINE_HIT,
293         [PERF_COUNT_HW_CACHE_MISSES]        = ARMV7_PERFCTR_COHERENT_LINE_MISS,
294         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
295         [PERF_COUNT_HW_BRANCH_MISSES]       = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
296         [PERF_COUNT_HW_BUS_CYCLES]          = ARMV7_PERFCTR_CLOCK_CYCLES,
297 };
298
299 static const unsigned armv7_a9_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
300                                           [PERF_COUNT_HW_CACHE_OP_MAX]
301                                           [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
302         [C(L1D)] = {
303                 /*
304                  * The performance counters don't differentiate between read
305                  * and write accesses/misses so this isn't strictly correct,
306                  * but it's the best we can do. Writes and reads get
307                  * combined.
308                  */
309                 [C(OP_READ)] = {
310                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_DCACHE_ACCESS,
311                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DCACHE_REFILL,
312                 },
313                 [C(OP_WRITE)] = {
314                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_DCACHE_ACCESS,
315                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DCACHE_REFILL,
316                 },
317                 [C(OP_PREFETCH)] = {
318                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
319                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
320                 },
321         },
322         [C(L1I)] = {
323                 [C(OP_READ)] = {
324                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
325                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
326                 },
327                 [C(OP_WRITE)] = {
328                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
329                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
330                 },
331                 [C(OP_PREFETCH)] = {
332                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
333                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
334                 },
335         },
336         [C(LL)] = {
337                 [C(OP_READ)] = {
338                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
339                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
340                 },
341                 [C(OP_WRITE)] = {
342                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
343                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
344                 },
345                 [C(OP_PREFETCH)] = {
346                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
347                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
348                 },
349         },
350         [C(DTLB)] = {
351                 [C(OP_READ)] = {
352                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
353                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
354                 },
355                 [C(OP_WRITE)] = {
356                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
357                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
358                 },
359                 [C(OP_PREFETCH)] = {
360                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
361                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
362                 },
363         },
364         [C(ITLB)] = {
365                 [C(OP_READ)] = {
366                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
367                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
368                 },
369                 [C(OP_WRITE)] = {
370                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
371                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
372                 },
373                 [C(OP_PREFETCH)] = {
374                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
375                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
376                 },
377         },
378         [C(BPU)] = {
379                 [C(OP_READ)] = {
380                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_WRITE,
381                         [C(RESULT_MISS)]
382                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
383                 },
384                 [C(OP_WRITE)] = {
385                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_WRITE,
386                         [C(RESULT_MISS)]
387                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
388                 },
389                 [C(OP_PREFETCH)] = {
390                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
391                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
392                 },
393         },
394 };
395
396 /*
397  * Cortex-A5 HW events mapping
398  */
399 static const unsigned armv7_a5_perf_map[PERF_COUNT_HW_MAX] = {
400         [PERF_COUNT_HW_CPU_CYCLES]          = ARMV7_PERFCTR_CPU_CYCLES,
401         [PERF_COUNT_HW_INSTRUCTIONS]        = ARMV7_PERFCTR_INSTR_EXECUTED,
402         [PERF_COUNT_HW_CACHE_REFERENCES]    = HW_OP_UNSUPPORTED,
403         [PERF_COUNT_HW_CACHE_MISSES]        = HW_OP_UNSUPPORTED,
404         [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
405         [PERF_COUNT_HW_BRANCH_MISSES]       = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
406         [PERF_COUNT_HW_BUS_CYCLES]          = HW_OP_UNSUPPORTED,
407 };
408
409 static const unsigned armv7_a5_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
410                                         [PERF_COUNT_HW_CACHE_OP_MAX]
411                                         [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
412         [C(L1D)] = {
413                 [C(OP_READ)] = {
414                         [C(RESULT_ACCESS)]
415                                         = ARMV7_PERFCTR_DCACHE_ACCESS,
416                         [C(RESULT_MISS)]
417                                         = ARMV7_PERFCTR_DCACHE_REFILL,
418                 },
419                 [C(OP_WRITE)] = {
420                         [C(RESULT_ACCESS)]
421                                         = ARMV7_PERFCTR_DCACHE_ACCESS,
422                         [C(RESULT_MISS)]
423                                         = ARMV7_PERFCTR_DCACHE_REFILL,
424                 },
425                 [C(OP_PREFETCH)] = {
426                         [C(RESULT_ACCESS)]
427                                         = ARMV7_PERFCTR_PREFETCH_LINEFILL,
428                         [C(RESULT_MISS)]
429                                         = ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP,
430                 },
431         },
432         [C(L1I)] = {
433                 [C(OP_READ)] = {
434                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
435                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
436                 },
437                 [C(OP_WRITE)] = {
438                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
439                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
440                 },
441                 /*
442                  * The prefetch counters don't differentiate between the I
443                  * side and the D side.
444                  */
445                 [C(OP_PREFETCH)] = {
446                         [C(RESULT_ACCESS)]
447                                         = ARMV7_PERFCTR_PREFETCH_LINEFILL,
448                         [C(RESULT_MISS)]
449                                         = ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP,
450                 },
451         },
452         [C(LL)] = {
453                 [C(OP_READ)] = {
454                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
455                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
456                 },
457                 [C(OP_WRITE)] = {
458                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
459                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
460                 },
461                 [C(OP_PREFETCH)] = {
462                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
463                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
464                 },
465         },
466         [C(DTLB)] = {
467                 [C(OP_READ)] = {
468                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
469                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
470                 },
471                 [C(OP_WRITE)] = {
472                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
473                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
474                 },
475                 [C(OP_PREFETCH)] = {
476                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
477                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
478                 },
479         },
480         [C(ITLB)] = {
481                 [C(OP_READ)] = {
482                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
483                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
484                 },
485                 [C(OP_WRITE)] = {
486                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
487                         [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
488                 },
489                 [C(OP_PREFETCH)] = {
490                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
491                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
492                 },
493         },
494         [C(BPU)] = {
495                 [C(OP_READ)] = {
496                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
497                         [C(RESULT_MISS)]
498                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
499                 },
500                 [C(OP_WRITE)] = {
501                         [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
502                         [C(RESULT_MISS)]
503                                         = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
504                 },
505                 [C(OP_PREFETCH)] = {
506                         [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
507                         [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
508                 },
509         },
510 };
511
512 /*
513  * Perf Events counters
514  */
515 enum armv7_counters {
516         ARMV7_CYCLE_COUNTER             = 1,    /* Cycle counter */
517         ARMV7_COUNTER0                  = 2,    /* First event counter */
518 };
519
520 /*
521  * The cycle counter is ARMV7_CYCLE_COUNTER.
522  * The first event counter is ARMV7_COUNTER0.
523  * The last event counter is (ARMV7_COUNTER0 + armpmu->num_events - 1).
524  */
525 #define ARMV7_COUNTER_LAST      (ARMV7_COUNTER0 + armpmu->num_events - 1)
526
527 /*
528  * ARMv7 low level PMNC access
529  */
530
531 /*
532  * Per-CPU PMNC: config reg
533  */
534 #define ARMV7_PMNC_E            (1 << 0) /* Enable all counters */
535 #define ARMV7_PMNC_P            (1 << 1) /* Reset all counters */
536 #define ARMV7_PMNC_C            (1 << 2) /* Cycle counter reset */
537 #define ARMV7_PMNC_D            (1 << 3) /* CCNT counts every 64th cpu cycle */
538 #define ARMV7_PMNC_X            (1 << 4) /* Export to ETM */
539 #define ARMV7_PMNC_DP           (1 << 5) /* Disable CCNT if non-invasive debug*/
540 #define ARMV7_PMNC_N_SHIFT      11       /* Number of counters supported */
541 #define ARMV7_PMNC_N_MASK       0x1f
542 #define ARMV7_PMNC_MASK         0x3f     /* Mask for writable bits */
543
544 /*
545  * Available counters
546  */
547 #define ARMV7_CNT0              0       /* First event counter */
548 #define ARMV7_CCNT              31      /* Cycle counter */
549
550 /* Perf Event to low level counters mapping */
551 #define ARMV7_EVENT_CNT_TO_CNTx (ARMV7_COUNTER0 - ARMV7_CNT0)
552
553 /*
554  * CNTENS: counters enable reg
555  */
556 #define ARMV7_CNTENS_P(idx)     (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
557 #define ARMV7_CNTENS_C          (1 << ARMV7_CCNT)
558
559 /*
560  * CNTENC: counters disable reg
561  */
562 #define ARMV7_CNTENC_P(idx)     (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
563 #define ARMV7_CNTENC_C          (1 << ARMV7_CCNT)
564
565 /*
566  * INTENS: counters overflow interrupt enable reg
567  */
568 #define ARMV7_INTENS_P(idx)     (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
569 #define ARMV7_INTENS_C          (1 << ARMV7_CCNT)
570
571 /*
572  * INTENC: counters overflow interrupt disable reg
573  */
574 #define ARMV7_INTENC_P(idx)     (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
575 #define ARMV7_INTENC_C          (1 << ARMV7_CCNT)
576
577 /*
578  * EVTSEL: Event selection reg
579  */
580 #define ARMV7_EVTSEL_MASK       0xff            /* Mask for writable bits */
581
582 /*
583  * SELECT: Counter selection reg
584  */
585 #define ARMV7_SELECT_MASK       0x1f            /* Mask for writable bits */
586
587 /*
588  * FLAG: counters overflow flag status reg
589  */
590 #define ARMV7_FLAG_P(idx)       (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
591 #define ARMV7_FLAG_C            (1 << ARMV7_CCNT)
592 #define ARMV7_FLAG_MASK         0xffffffff      /* Mask for writable bits */
593 #define ARMV7_OVERFLOWED_MASK   ARMV7_FLAG_MASK
594
595 static inline unsigned long armv7_pmnc_read(void)
596 {
597         u32 val;
598         asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r"(val));
599         return val;
600 }
601
602 static inline void armv7_pmnc_write(unsigned long val)
603 {
604         val &= ARMV7_PMNC_MASK;
605         isb();
606         asm volatile("mcr p15, 0, %0, c9, c12, 0" : : "r"(val));
607 }
608
609 static inline int armv7_pmnc_has_overflowed(unsigned long pmnc)
610 {
611         return pmnc & ARMV7_OVERFLOWED_MASK;
612 }
613
614 static inline int armv7_pmnc_counter_has_overflowed(unsigned long pmnc,
615                                         enum armv7_counters counter)
616 {
617         int ret = 0;
618
619         if (counter == ARMV7_CYCLE_COUNTER)
620                 ret = pmnc & ARMV7_FLAG_C;
621         else if ((counter >= ARMV7_COUNTER0) && (counter <= ARMV7_COUNTER_LAST))
622                 ret = pmnc & ARMV7_FLAG_P(counter);
623         else
624                 pr_err("CPU%u checking wrong counter %d overflow status\n",
625                         smp_processor_id(), counter);
626
627         return ret;
628 }
629
630 static inline int armv7_pmnc_select_counter(unsigned int idx)
631 {
632         u32 val;
633
634         if ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST)) {
635                 pr_err("CPU%u selecting wrong PMNC counter"
636                         " %d\n", smp_processor_id(), idx);
637                 return -1;
638         }
639
640         val = (idx - ARMV7_EVENT_CNT_TO_CNTx) & ARMV7_SELECT_MASK;
641         asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (val));
642         isb();
643
644         return idx;
645 }
646
647 static inline u32 armv7pmu_read_counter(int idx)
648 {
649         unsigned long value = 0;
650
651         if (idx == ARMV7_CYCLE_COUNTER)
652                 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (value));
653         else if ((idx >= ARMV7_COUNTER0) && (idx <= ARMV7_COUNTER_LAST)) {
654                 if (armv7_pmnc_select_counter(idx) == idx)
655                         asm volatile("mrc p15, 0, %0, c9, c13, 2"
656                                      : "=r" (value));
657         } else
658                 pr_err("CPU%u reading wrong counter %d\n",
659                         smp_processor_id(), idx);
660
661         return value;
662 }
663
664 static inline void armv7pmu_write_counter(int idx, u32 value)
665 {
666         if (idx == ARMV7_CYCLE_COUNTER)
667                 asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" (value));
668         else if ((idx >= ARMV7_COUNTER0) && (idx <= ARMV7_COUNTER_LAST)) {
669                 if (armv7_pmnc_select_counter(idx) == idx)
670                         asm volatile("mcr p15, 0, %0, c9, c13, 2"
671                                      : : "r" (value));
672         } else
673                 pr_err("CPU%u writing wrong counter %d\n",
674                         smp_processor_id(), idx);
675 }
676
677 static inline void armv7_pmnc_write_evtsel(unsigned int idx, u32 val)
678 {
679         if (armv7_pmnc_select_counter(idx) == idx) {
680                 val &= ARMV7_EVTSEL_MASK;
681                 asm volatile("mcr p15, 0, %0, c9, c13, 1" : : "r" (val));
682         }
683 }
684
685 static inline u32 armv7_pmnc_enable_counter(unsigned int idx)
686 {
687         u32 val;
688
689         if ((idx != ARMV7_CYCLE_COUNTER) &&
690             ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) {
691                 pr_err("CPU%u enabling wrong PMNC counter"
692                         " %d\n", smp_processor_id(), idx);
693                 return -1;
694         }
695
696         if (idx == ARMV7_CYCLE_COUNTER)
697                 val = ARMV7_CNTENS_C;
698         else
699                 val = ARMV7_CNTENS_P(idx);
700
701         asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (val));
702
703         return idx;
704 }
705
706 static inline u32 armv7_pmnc_disable_counter(unsigned int idx)
707 {
708         u32 val;
709
710
711         if ((idx != ARMV7_CYCLE_COUNTER) &&
712             ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) {
713                 pr_err("CPU%u disabling wrong PMNC counter"
714                         " %d\n", smp_processor_id(), idx);
715                 return -1;
716         }
717
718         if (idx == ARMV7_CYCLE_COUNTER)
719                 val = ARMV7_CNTENC_C;
720         else
721                 val = ARMV7_CNTENC_P(idx);
722
723         asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (val));
724
725         return idx;
726 }
727
728 static inline u32 armv7_pmnc_enable_intens(unsigned int idx)
729 {
730         u32 val;
731
732         if ((idx != ARMV7_CYCLE_COUNTER) &&
733             ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) {
734                 pr_err("CPU%u enabling wrong PMNC counter"
735                         " interrupt enable %d\n", smp_processor_id(), idx);
736                 return -1;
737         }
738
739         if (idx == ARMV7_CYCLE_COUNTER)
740                 val = ARMV7_INTENS_C;
741         else
742                 val = ARMV7_INTENS_P(idx);
743
744         asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (val));
745
746         return idx;
747 }
748
749 static inline u32 armv7_pmnc_disable_intens(unsigned int idx)
750 {
751         u32 val;
752
753         if ((idx != ARMV7_CYCLE_COUNTER) &&
754             ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) {
755                 pr_err("CPU%u disabling wrong PMNC counter"
756                         " interrupt enable %d\n", smp_processor_id(), idx);
757                 return -1;
758         }
759
760         if (idx == ARMV7_CYCLE_COUNTER)
761                 val = ARMV7_INTENC_C;
762         else
763                 val = ARMV7_INTENC_P(idx);
764
765         asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (val));
766
767         return idx;
768 }
769
770 static inline u32 armv7_pmnc_getreset_flags(void)
771 {
772         u32 val;
773
774         /* Read */
775         asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
776
777         /* Write to clear flags */
778         val &= ARMV7_FLAG_MASK;
779         asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (val));
780
781         return val;
782 }
783
784 #ifdef DEBUG
785 static void armv7_pmnc_dump_regs(void)
786 {
787         u32 val;
788         unsigned int cnt;
789
790         printk(KERN_INFO "PMNC registers dump:\n");
791
792         asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r" (val));
793         printk(KERN_INFO "PMNC  =0x%08x\n", val);
794
795         asm volatile("mrc p15, 0, %0, c9, c12, 1" : "=r" (val));
796         printk(KERN_INFO "CNTENS=0x%08x\n", val);
797
798         asm volatile("mrc p15, 0, %0, c9, c14, 1" : "=r" (val));
799         printk(KERN_INFO "INTENS=0x%08x\n", val);
800
801         asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
802         printk(KERN_INFO "FLAGS =0x%08x\n", val);
803
804         asm volatile("mrc p15, 0, %0, c9, c12, 5" : "=r" (val));
805         printk(KERN_INFO "SELECT=0x%08x\n", val);
806
807         asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val));
808         printk(KERN_INFO "CCNT  =0x%08x\n", val);
809
810         for (cnt = ARMV7_COUNTER0; cnt < ARMV7_COUNTER_LAST; cnt++) {
811                 armv7_pmnc_select_counter(cnt);
812                 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val));
813                 printk(KERN_INFO "CNT[%d] count =0x%08x\n",
814                         cnt-ARMV7_EVENT_CNT_TO_CNTx, val);
815                 asm volatile("mrc p15, 0, %0, c9, c13, 1" : "=r" (val));
816                 printk(KERN_INFO "CNT[%d] evtsel=0x%08x\n",
817                         cnt-ARMV7_EVENT_CNT_TO_CNTx, val);
818         }
819 }
820 #endif
821
822 static void armv7pmu_enable_event(struct hw_perf_event *hwc, int idx)
823 {
824         unsigned long flags;
825
826         /*
827          * Enable counter and interrupt, and set the counter to count
828          * the event that we're interested in.
829          */
830         raw_spin_lock_irqsave(&pmu_lock, flags);
831
832         /*
833          * Disable counter
834          */
835         armv7_pmnc_disable_counter(idx);
836
837         /*
838          * Set event (if destined for PMNx counters)
839          * We don't need to set the event if it's a cycle count
840          */
841         if (idx != ARMV7_CYCLE_COUNTER)
842                 armv7_pmnc_write_evtsel(idx, hwc->config_base);
843
844         /*
845          * Enable interrupt for this counter
846          */
847         armv7_pmnc_enable_intens(idx);
848
849         /*
850          * Enable counter
851          */
852         armv7_pmnc_enable_counter(idx);
853
854         raw_spin_unlock_irqrestore(&pmu_lock, flags);
855 }
856
857 static void armv7pmu_disable_event(struct hw_perf_event *hwc, int idx)
858 {
859         unsigned long flags;
860
861         /*
862          * Disable counter and interrupt
863          */
864         raw_spin_lock_irqsave(&pmu_lock, flags);
865
866         /*
867          * Disable counter
868          */
869         armv7_pmnc_disable_counter(idx);
870
871         /*
872          * Disable interrupt for this counter
873          */
874         armv7_pmnc_disable_intens(idx);
875
876         raw_spin_unlock_irqrestore(&pmu_lock, flags);
877 }
878
879 static irqreturn_t armv7pmu_handle_irq(int irq_num, void *dev)
880 {
881         unsigned long pmnc;
882         struct perf_sample_data data;
883         struct cpu_hw_events *cpuc;
884         struct pt_regs *regs;
885         int idx;
886
887         /*
888          * Get and reset the IRQ flags
889          */
890         pmnc = armv7_pmnc_getreset_flags();
891
892         /*
893          * Did an overflow occur?
894          */
895         if (!armv7_pmnc_has_overflowed(pmnc))
896                 return IRQ_NONE;
897
898         /*
899          * Handle the counter(s) overflow(s)
900          */
901         regs = get_irq_regs();
902
903         perf_sample_data_init(&data, 0);
904
905         cpuc = &__get_cpu_var(cpu_hw_events);
906         for (idx = 0; idx <= armpmu->num_events; ++idx) {
907                 struct perf_event *event = cpuc->events[idx];
908                 struct hw_perf_event *hwc;
909
910                 if (!test_bit(idx, cpuc->active_mask))
911                         continue;
912
913                 /*
914                  * We have a single interrupt for all counters. Check that
915                  * each counter has overflowed before we process it.
916                  */
917                 if (!armv7_pmnc_counter_has_overflowed(pmnc, idx))
918                         continue;
919
920                 hwc = &event->hw;
921                 armpmu_event_update(event, hwc, idx, 1);
922                 data.period = event->hw.last_period;
923                 if (!armpmu_event_set_period(event, hwc, idx))
924                         continue;
925
926                 if (perf_event_overflow(event, 0, &data, regs))
927                         armpmu->disable(hwc, idx);
928         }
929
930         /*
931          * Handle the pending perf events.
932          *
933          * Note: this call *must* be run with interrupts disabled. For
934          * platforms that can have the PMU interrupts raised as an NMI, this
935          * will not work.
936          */
937         irq_work_run();
938
939         return IRQ_HANDLED;
940 }
941
942 static void armv7pmu_start(void)
943 {
944         unsigned long flags;
945
946         raw_spin_lock_irqsave(&pmu_lock, flags);
947         /* Enable all counters */
948         armv7_pmnc_write(armv7_pmnc_read() | ARMV7_PMNC_E);
949         raw_spin_unlock_irqrestore(&pmu_lock, flags);
950 }
951
952 static void armv7pmu_stop(void)
953 {
954         unsigned long flags;
955
956         raw_spin_lock_irqsave(&pmu_lock, flags);
957         /* Disable all counters */
958         armv7_pmnc_write(armv7_pmnc_read() & ~ARMV7_PMNC_E);
959         raw_spin_unlock_irqrestore(&pmu_lock, flags);
960 }
961
962 static int armv7pmu_get_event_idx(struct cpu_hw_events *cpuc,
963                                   struct hw_perf_event *event)
964 {
965         int idx;
966
967         /* Always place a cycle counter into the cycle counter. */
968         if (event->config_base == ARMV7_PERFCTR_CPU_CYCLES) {
969                 if (test_and_set_bit(ARMV7_CYCLE_COUNTER, cpuc->used_mask))
970                         return -EAGAIN;
971
972                 return ARMV7_CYCLE_COUNTER;
973         } else {
974                 /*
975                  * For anything other than a cycle counter, try and use
976                  * the events counters
977                  */
978                 for (idx = ARMV7_COUNTER0; idx <= armpmu->num_events; ++idx) {
979                         if (!test_and_set_bit(idx, cpuc->used_mask))
980                                 return idx;
981                 }
982
983                 /* The counters are all in use. */
984                 return -EAGAIN;
985         }
986 }
987
988 static void armv7pmu_reset(void *info)
989 {
990         u32 idx, nb_cnt = armpmu->num_events;
991
992         /* The counter and interrupt enable registers are unknown at reset. */
993         for (idx = 1; idx < nb_cnt; ++idx)
994                 armv7pmu_disable_event(NULL, idx);
995
996         /* Initialize & Reset PMNC: C and P bits */
997         armv7_pmnc_write(ARMV7_PMNC_P | ARMV7_PMNC_C);
998 }
999
1000 static struct arm_pmu armv7pmu = {
1001         .handle_irq             = armv7pmu_handle_irq,
1002         .enable                 = armv7pmu_enable_event,
1003         .disable                = armv7pmu_disable_event,
1004         .read_counter           = armv7pmu_read_counter,
1005         .write_counter          = armv7pmu_write_counter,
1006         .get_event_idx          = armv7pmu_get_event_idx,
1007         .start                  = armv7pmu_start,
1008         .stop                   = armv7pmu_stop,
1009         .reset                  = armv7pmu_reset,
1010         .raw_event_mask         = 0xFF,
1011         .max_period             = (1LLU << 32) - 1,
1012 };
1013
1014 static u32 __init armv7_read_num_pmnc_events(void)
1015 {
1016         u32 nb_cnt;
1017
1018         /* Read the nb of CNTx counters supported from PMNC */
1019         nb_cnt = (armv7_pmnc_read() >> ARMV7_PMNC_N_SHIFT) & ARMV7_PMNC_N_MASK;
1020
1021         /* Add the CPU cycles counter and return */
1022         return nb_cnt + 1;
1023 }
1024
1025 static const struct arm_pmu *__init armv7_a8_pmu_init(void)
1026 {
1027         armv7pmu.id             = ARM_PERF_PMU_ID_CA8;
1028         armv7pmu.name           = "ARMv7 Cortex-A8";
1029         armv7pmu.cache_map      = &armv7_a8_perf_cache_map;
1030         armv7pmu.event_map      = &armv7_a8_perf_map;
1031         armv7pmu.num_events     = armv7_read_num_pmnc_events();
1032         return &armv7pmu;
1033 }
1034
1035 static const struct arm_pmu *__init armv7_a9_pmu_init(void)
1036 {
1037         armv7pmu.id             = ARM_PERF_PMU_ID_CA9;
1038         armv7pmu.name           = "ARMv7 Cortex-A9";
1039         armv7pmu.cache_map      = &armv7_a9_perf_cache_map;
1040         armv7pmu.event_map      = &armv7_a9_perf_map;
1041         armv7pmu.num_events     = armv7_read_num_pmnc_events();
1042         return &armv7pmu;
1043 }
1044
1045 static const struct arm_pmu *__init armv7_a5_pmu_init(void)
1046 {
1047         armv7pmu.id             = ARM_PERF_PMU_ID_CA5;
1048         armv7pmu.name           = "ARMv7 Cortex-A5";
1049         armv7pmu.cache_map      = &armv7_a5_perf_cache_map;
1050         armv7pmu.event_map      = &armv7_a5_perf_map;
1051         armv7pmu.num_events     = armv7_read_num_pmnc_events();
1052         return &armv7pmu;
1053 }
1054 #else
1055 static const struct arm_pmu *__init armv7_a8_pmu_init(void)
1056 {
1057         return NULL;
1058 }
1059
1060 static const struct arm_pmu *__init armv7_a9_pmu_init(void)
1061 {
1062         return NULL;
1063 }
1064
1065 static const struct arm_pmu *__init armv7_a5_pmu_init(void)
1066 {
1067         return NULL;
1068 }
1069 #endif  /* CONFIG_CPU_V7 */