perf_event.c 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839
  1. /*
  2. * PMU support
  3. *
  4. * Copyright (C) 2012 ARM Limited
  5. * Author: Will Deacon <will.deacon@arm.com>
  6. *
  7. * This code is based heavily on the ARMv7 perf event code.
  8. *
  9. * This program is free software; you can redistribute it and/or modify
  10. * it under the terms of the GNU General Public License version 2 as
  11. * published by the Free Software Foundation.
  12. *
  13. * This program is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16. * GNU General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU General Public License
  19. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  20. */
  21. #include <asm/irq_regs.h>
  22. #include <linux/of.h>
  23. #include <linux/perf/arm_pmu.h>
  24. #include <linux/platform_device.h>
  25. /*
  26. * ARMv8 PMUv3 Performance Events handling code.
  27. * Common event types.
  28. */
  29. /* Required events. */
  30. #define ARMV8_PMUV3_PERFCTR_PMNC_SW_INCR 0x00
  31. #define ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL 0x03
  32. #define ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS 0x04
  33. #define ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED 0x10
  34. #define ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES 0x11
  35. #define ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED 0x12
  36. /* At least one of the following is required. */
  37. #define ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED 0x08
  38. #define ARMV8_PMUV3_PERFCTR_OP_SPEC 0x1B
  39. /* Common architectural events. */
  40. #define ARMV8_PMUV3_PERFCTR_MEM_READ 0x06
  41. #define ARMV8_PMUV3_PERFCTR_MEM_WRITE 0x07
  42. #define ARMV8_PMUV3_PERFCTR_EXC_TAKEN 0x09
  43. #define ARMV8_PMUV3_PERFCTR_EXC_EXECUTED 0x0A
  44. #define ARMV8_PMUV3_PERFCTR_CID_WRITE 0x0B
  45. #define ARMV8_PMUV3_PERFCTR_PC_WRITE 0x0C
  46. #define ARMV8_PMUV3_PERFCTR_PC_IMM_BRANCH 0x0D
  47. #define ARMV8_PMUV3_PERFCTR_PC_PROC_RETURN 0x0E
  48. #define ARMV8_PMUV3_PERFCTR_MEM_UNALIGNED_ACCESS 0x0F
  49. #define ARMV8_PMUV3_PERFCTR_TTBR_WRITE 0x1C
  50. #define ARMV8_PMUV3_PERFCTR_CHAIN 0x1E
  51. #define ARMV8_PMUV3_PERFCTR_BR_RETIRED 0x21
  52. /* Common microarchitectural events. */
  53. #define ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL 0x01
  54. #define ARMV8_PMUV3_PERFCTR_ITLB_REFILL 0x02
  55. #define ARMV8_PMUV3_PERFCTR_DTLB_REFILL 0x05
  56. #define ARMV8_PMUV3_PERFCTR_MEM_ACCESS 0x13
  57. #define ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS 0x14
  58. #define ARMV8_PMUV3_PERFCTR_L1_DCACHE_WB 0x15
  59. #define ARMV8_PMUV3_PERFCTR_L2_CACHE_ACCESS 0x16
  60. #define ARMV8_PMUV3_PERFCTR_L2_CACHE_REFILL 0x17
  61. #define ARMV8_PMUV3_PERFCTR_L2_CACHE_WB 0x18
  62. #define ARMV8_PMUV3_PERFCTR_BUS_ACCESS 0x19
  63. #define ARMV8_PMUV3_PERFCTR_MEM_ERROR 0x1A
  64. #define ARMV8_PMUV3_PERFCTR_BUS_CYCLES 0x1D
  65. #define ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE 0x1F
  66. #define ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE 0x20
  67. #define ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED 0x22
  68. #define ARMV8_PMUV3_PERFCTR_STALL_FRONTEND 0x23
  69. #define ARMV8_PMUV3_PERFCTR_STALL_BACKEND 0x24
  70. #define ARMV8_PMUV3_PERFCTR_L1D_TLB 0x25
  71. #define ARMV8_PMUV3_PERFCTR_L1I_TLB 0x26
  72. #define ARMV8_PMUV3_PERFCTR_L2I_CACHE 0x27
  73. #define ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL 0x28
  74. #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE 0x29
  75. #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL 0x2A
  76. #define ARMV8_PMUV3_PERFCTR_L3D_CACHE 0x2B
  77. #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB 0x2C
  78. #define ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL 0x2D
  79. #define ARMV8_PMUV3_PERFCTR_L21_TLB_REFILL 0x2E
  80. #define ARMV8_PMUV3_PERFCTR_L2D_TLB 0x2F
  81. #define ARMV8_PMUV3_PERFCTR_L21_TLB 0x30
  82. /* ARMv8 Cortex-A53 specific event types. */
  83. #define ARMV8_A53_PERFCTR_PREFETCH_LINEFILL 0xC2
  84. /* ARMv8 Cortex-A57 and Cortex-A72 specific event types. */
  85. #define ARMV8_A57_PERFCTR_L1_DCACHE_ACCESS_LD 0x40
  86. #define ARMV8_A57_PERFCTR_L1_DCACHE_ACCESS_ST 0x41
  87. #define ARMV8_A57_PERFCTR_L1_DCACHE_REFILL_LD 0x42
  88. #define ARMV8_A57_PERFCTR_L1_DCACHE_REFILL_ST 0x43
  89. #define ARMV8_A57_PERFCTR_DTLB_REFILL_LD 0x4c
  90. #define ARMV8_A57_PERFCTR_DTLB_REFILL_ST 0x4d
  91. /* PMUv3 HW events mapping. */
  92. static const unsigned armv8_pmuv3_perf_map[PERF_COUNT_HW_MAX] = {
  93. PERF_MAP_ALL_UNSUPPORTED,
  94. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES,
  95. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED,
  96. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
  97. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
  98. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
  99. };
  100. /* ARM Cortex-A53 HW events mapping. */
  101. static const unsigned armv8_a53_perf_map[PERF_COUNT_HW_MAX] = {
  102. PERF_MAP_ALL_UNSUPPORTED,
  103. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES,
  104. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED,
  105. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
  106. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
  107. [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_PC_WRITE,
  108. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
  109. [PERF_COUNT_HW_BUS_CYCLES] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
  110. };
  111. /* ARM Cortex-A57 and Cortex-A72 events mapping. */
  112. static const unsigned armv8_a57_perf_map[PERF_COUNT_HW_MAX] = {
  113. PERF_MAP_ALL_UNSUPPORTED,
  114. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES,
  115. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED,
  116. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
  117. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
  118. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
  119. [PERF_COUNT_HW_BUS_CYCLES] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
  120. };
  121. static const unsigned armv8_pmuv3_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  122. [PERF_COUNT_HW_CACHE_OP_MAX]
  123. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  124. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  125. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
  126. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
  127. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
  128. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
  129. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
  130. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
  131. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
  132. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
  133. };
  134. static const unsigned armv8_a53_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  135. [PERF_COUNT_HW_CACHE_OP_MAX]
  136. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  137. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  138. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
  139. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
  140. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS,
  141. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL,
  142. [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV8_A53_PERFCTR_PREFETCH_LINEFILL,
  143. [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS,
  144. [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL,
  145. [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_ITLB_REFILL,
  146. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
  147. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
  148. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
  149. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
  150. };
  151. static const unsigned armv8_a57_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  152. [PERF_COUNT_HW_CACHE_OP_MAX]
  153. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  154. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  155. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_A57_PERFCTR_L1_DCACHE_ACCESS_LD,
  156. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_A57_PERFCTR_L1_DCACHE_REFILL_LD,
  157. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_A57_PERFCTR_L1_DCACHE_ACCESS_ST,
  158. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_A57_PERFCTR_L1_DCACHE_REFILL_ST,
  159. [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS,
  160. [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL,
  161. [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_A57_PERFCTR_DTLB_REFILL_LD,
  162. [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_A57_PERFCTR_DTLB_REFILL_ST,
  163. [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_ITLB_REFILL,
  164. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
  165. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
  166. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED,
  167. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED,
  168. };
  169. #define ARMV8_EVENT_ATTR_RESOLVE(m) #m
  170. #define ARMV8_EVENT_ATTR(name, config) \
  171. PMU_EVENT_ATTR_STRING(name, armv8_event_attr_##name, \
  172. "event=" ARMV8_EVENT_ATTR_RESOLVE(config))
  173. ARMV8_EVENT_ATTR(sw_incr, ARMV8_PMUV3_PERFCTR_PMNC_SW_INCR);
  174. ARMV8_EVENT_ATTR(l1i_cache_refill, ARMV8_PMUV3_PERFCTR_L1_ICACHE_REFILL);
  175. ARMV8_EVENT_ATTR(l1i_tlb_refill, ARMV8_PMUV3_PERFCTR_ITLB_REFILL);
  176. ARMV8_EVENT_ATTR(l1d_cache_refill, ARMV8_PMUV3_PERFCTR_L1_DCACHE_REFILL);
  177. ARMV8_EVENT_ATTR(l1d_cache, ARMV8_PMUV3_PERFCTR_L1_DCACHE_ACCESS);
  178. ARMV8_EVENT_ATTR(l1d_tlb_refill, ARMV8_PMUV3_PERFCTR_DTLB_REFILL);
  179. ARMV8_EVENT_ATTR(ld_retired, ARMV8_PMUV3_PERFCTR_MEM_READ);
  180. ARMV8_EVENT_ATTR(st_retired, ARMV8_PMUV3_PERFCTR_MEM_WRITE);
  181. ARMV8_EVENT_ATTR(inst_retired, ARMV8_PMUV3_PERFCTR_INSTR_EXECUTED);
  182. ARMV8_EVENT_ATTR(exc_taken, ARMV8_PMUV3_PERFCTR_EXC_TAKEN);
  183. ARMV8_EVENT_ATTR(exc_return, ARMV8_PMUV3_PERFCTR_EXC_EXECUTED);
  184. ARMV8_EVENT_ATTR(cid_write_retired, ARMV8_PMUV3_PERFCTR_CID_WRITE);
  185. ARMV8_EVENT_ATTR(pc_write_retired, ARMV8_PMUV3_PERFCTR_PC_WRITE);
  186. ARMV8_EVENT_ATTR(br_immed_retired, ARMV8_PMUV3_PERFCTR_PC_IMM_BRANCH);
  187. ARMV8_EVENT_ATTR(br_return_retired, ARMV8_PMUV3_PERFCTR_PC_PROC_RETURN);
  188. ARMV8_EVENT_ATTR(unaligned_ldst_retired, ARMV8_PMUV3_PERFCTR_MEM_UNALIGNED_ACCESS);
  189. ARMV8_EVENT_ATTR(br_mis_pred, ARMV8_PMUV3_PERFCTR_PC_BRANCH_MIS_PRED);
  190. ARMV8_EVENT_ATTR(cpu_cycles, ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES);
  191. ARMV8_EVENT_ATTR(br_pred, ARMV8_PMUV3_PERFCTR_PC_BRANCH_PRED);
  192. ARMV8_EVENT_ATTR(mem_access, ARMV8_PMUV3_PERFCTR_MEM_ACCESS);
  193. ARMV8_EVENT_ATTR(l1i_cache, ARMV8_PMUV3_PERFCTR_L1_ICACHE_ACCESS);
  194. ARMV8_EVENT_ATTR(l1d_cache_wb, ARMV8_PMUV3_PERFCTR_L1_DCACHE_WB);
  195. ARMV8_EVENT_ATTR(l2d_cache, ARMV8_PMUV3_PERFCTR_L2_CACHE_ACCESS);
  196. ARMV8_EVENT_ATTR(l2d_cache_refill, ARMV8_PMUV3_PERFCTR_L2_CACHE_REFILL);
  197. ARMV8_EVENT_ATTR(l2d_cache_wb, ARMV8_PMUV3_PERFCTR_L2_CACHE_WB);
  198. ARMV8_EVENT_ATTR(bus_access, ARMV8_PMUV3_PERFCTR_BUS_ACCESS);
  199. ARMV8_EVENT_ATTR(memory_error, ARMV8_PMUV3_PERFCTR_MEM_ERROR);
  200. ARMV8_EVENT_ATTR(inst_spec, ARMV8_PMUV3_PERFCTR_OP_SPEC);
  201. ARMV8_EVENT_ATTR(ttbr_write_retired, ARMV8_PMUV3_PERFCTR_TTBR_WRITE);
  202. ARMV8_EVENT_ATTR(bus_cycles, ARMV8_PMUV3_PERFCTR_BUS_CYCLES);
  203. ARMV8_EVENT_ATTR(chain, ARMV8_PMUV3_PERFCTR_CHAIN);
  204. ARMV8_EVENT_ATTR(l1d_cache_allocate, ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE);
  205. ARMV8_EVENT_ATTR(l2d_cache_allocate, ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE);
  206. ARMV8_EVENT_ATTR(br_retired, ARMV8_PMUV3_PERFCTR_BR_RETIRED);
  207. ARMV8_EVENT_ATTR(br_mis_pred_retired, ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED);
  208. ARMV8_EVENT_ATTR(stall_frontend, ARMV8_PMUV3_PERFCTR_STALL_FRONTEND);
  209. ARMV8_EVENT_ATTR(stall_backend, ARMV8_PMUV3_PERFCTR_STALL_BACKEND);
  210. ARMV8_EVENT_ATTR(l1d_tlb, ARMV8_PMUV3_PERFCTR_L1D_TLB);
  211. ARMV8_EVENT_ATTR(l1i_tlb, ARMV8_PMUV3_PERFCTR_L1I_TLB);
  212. ARMV8_EVENT_ATTR(l2i_cache, ARMV8_PMUV3_PERFCTR_L2I_CACHE);
  213. ARMV8_EVENT_ATTR(l2i_cache_refill, ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL);
  214. ARMV8_EVENT_ATTR(l3d_cache_allocate, ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE);
  215. ARMV8_EVENT_ATTR(l3d_cache_refill, ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL);
  216. ARMV8_EVENT_ATTR(l3d_cache, ARMV8_PMUV3_PERFCTR_L3D_CACHE);
  217. ARMV8_EVENT_ATTR(l3d_cache_wb, ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB);
  218. ARMV8_EVENT_ATTR(l2d_tlb_refill, ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL);
  219. ARMV8_EVENT_ATTR(l21_tlb_refill, ARMV8_PMUV3_PERFCTR_L21_TLB_REFILL);
  220. ARMV8_EVENT_ATTR(l2d_tlb, ARMV8_PMUV3_PERFCTR_L2D_TLB);
  221. ARMV8_EVENT_ATTR(l21_tlb, ARMV8_PMUV3_PERFCTR_L21_TLB);
  222. static struct attribute *armv8_pmuv3_event_attrs[] = {
  223. &armv8_event_attr_sw_incr.attr.attr,
  224. &armv8_event_attr_l1i_cache_refill.attr.attr,
  225. &armv8_event_attr_l1i_tlb_refill.attr.attr,
  226. &armv8_event_attr_l1d_cache_refill.attr.attr,
  227. &armv8_event_attr_l1d_cache.attr.attr,
  228. &armv8_event_attr_l1d_tlb_refill.attr.attr,
  229. &armv8_event_attr_ld_retired.attr.attr,
  230. &armv8_event_attr_st_retired.attr.attr,
  231. &armv8_event_attr_inst_retired.attr.attr,
  232. &armv8_event_attr_exc_taken.attr.attr,
  233. &armv8_event_attr_exc_return.attr.attr,
  234. &armv8_event_attr_cid_write_retired.attr.attr,
  235. &armv8_event_attr_pc_write_retired.attr.attr,
  236. &armv8_event_attr_br_immed_retired.attr.attr,
  237. &armv8_event_attr_br_return_retired.attr.attr,
  238. &armv8_event_attr_unaligned_ldst_retired.attr.attr,
  239. &armv8_event_attr_br_mis_pred.attr.attr,
  240. &armv8_event_attr_cpu_cycles.attr.attr,
  241. &armv8_event_attr_br_pred.attr.attr,
  242. &armv8_event_attr_mem_access.attr.attr,
  243. &armv8_event_attr_l1i_cache.attr.attr,
  244. &armv8_event_attr_l1d_cache_wb.attr.attr,
  245. &armv8_event_attr_l2d_cache.attr.attr,
  246. &armv8_event_attr_l2d_cache_refill.attr.attr,
  247. &armv8_event_attr_l2d_cache_wb.attr.attr,
  248. &armv8_event_attr_bus_access.attr.attr,
  249. &armv8_event_attr_memory_error.attr.attr,
  250. &armv8_event_attr_inst_spec.attr.attr,
  251. &armv8_event_attr_ttbr_write_retired.attr.attr,
  252. &armv8_event_attr_bus_cycles.attr.attr,
  253. &armv8_event_attr_chain.attr.attr,
  254. &armv8_event_attr_l1d_cache_allocate.attr.attr,
  255. &armv8_event_attr_l2d_cache_allocate.attr.attr,
  256. &armv8_event_attr_br_retired.attr.attr,
  257. &armv8_event_attr_br_mis_pred_retired.attr.attr,
  258. &armv8_event_attr_stall_frontend.attr.attr,
  259. &armv8_event_attr_stall_backend.attr.attr,
  260. &armv8_event_attr_l1d_tlb.attr.attr,
  261. &armv8_event_attr_l1i_tlb.attr.attr,
  262. &armv8_event_attr_l2i_cache.attr.attr,
  263. &armv8_event_attr_l2i_cache_refill.attr.attr,
  264. &armv8_event_attr_l3d_cache_allocate.attr.attr,
  265. &armv8_event_attr_l3d_cache_refill.attr.attr,
  266. &armv8_event_attr_l3d_cache.attr.attr,
  267. &armv8_event_attr_l3d_cache_wb.attr.attr,
  268. &armv8_event_attr_l2d_tlb_refill.attr.attr,
  269. &armv8_event_attr_l21_tlb_refill.attr.attr,
  270. &armv8_event_attr_l2d_tlb.attr.attr,
  271. &armv8_event_attr_l21_tlb.attr.attr,
  272. NULL,
  273. };
  274. static struct attribute_group armv8_pmuv3_events_attr_group = {
  275. .name = "events",
  276. .attrs = armv8_pmuv3_event_attrs,
  277. };
  278. PMU_FORMAT_ATTR(event, "config:0-9");
  279. static struct attribute *armv8_pmuv3_format_attrs[] = {
  280. &format_attr_event.attr,
  281. NULL,
  282. };
  283. static struct attribute_group armv8_pmuv3_format_attr_group = {
  284. .name = "format",
  285. .attrs = armv8_pmuv3_format_attrs,
  286. };
  287. static const struct attribute_group *armv8_pmuv3_attr_groups[] = {
  288. &armv8_pmuv3_events_attr_group,
  289. &armv8_pmuv3_format_attr_group,
  290. NULL,
  291. };
  292. /*
  293. * Perf Events' indices
  294. */
  295. #define ARMV8_IDX_CYCLE_COUNTER 0
  296. #define ARMV8_IDX_COUNTER0 1
  297. #define ARMV8_IDX_COUNTER_LAST(cpu_pmu) \
  298. (ARMV8_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1)
  299. #define ARMV8_MAX_COUNTERS 32
  300. #define ARMV8_COUNTER_MASK (ARMV8_MAX_COUNTERS - 1)
  301. /*
  302. * ARMv8 low level PMU access
  303. */
  304. /*
  305. * Perf Event to low level counters mapping
  306. */
  307. #define ARMV8_IDX_TO_COUNTER(x) \
  308. (((x) - ARMV8_IDX_COUNTER0) & ARMV8_COUNTER_MASK)
  309. /*
  310. * Per-CPU PMCR: config reg
  311. */
  312. #define ARMV8_PMCR_E (1 << 0) /* Enable all counters */
  313. #define ARMV8_PMCR_P (1 << 1) /* Reset all counters */
  314. #define ARMV8_PMCR_C (1 << 2) /* Cycle counter reset */
  315. #define ARMV8_PMCR_D (1 << 3) /* CCNT counts every 64th cpu cycle */
  316. #define ARMV8_PMCR_X (1 << 4) /* Export to ETM */
  317. #define ARMV8_PMCR_DP (1 << 5) /* Disable CCNT if non-invasive debug*/
  318. #define ARMV8_PMCR_N_SHIFT 11 /* Number of counters supported */
  319. #define ARMV8_PMCR_N_MASK 0x1f
  320. #define ARMV8_PMCR_MASK 0x3f /* Mask for writable bits */
  321. /*
  322. * PMOVSR: counters overflow flag status reg
  323. */
  324. #define ARMV8_OVSR_MASK 0xffffffff /* Mask for writable bits */
  325. #define ARMV8_OVERFLOWED_MASK ARMV8_OVSR_MASK
  326. /*
  327. * PMXEVTYPER: Event selection reg
  328. */
  329. #define ARMV8_EVTYPE_MASK 0xc80003ff /* Mask for writable bits */
  330. #define ARMV8_EVTYPE_EVENT 0x3ff /* Mask for EVENT bits */
  331. /*
  332. * Event filters for PMUv3
  333. */
  334. #define ARMV8_EXCLUDE_EL1 (1 << 31)
  335. #define ARMV8_EXCLUDE_EL0 (1 << 30)
  336. #define ARMV8_INCLUDE_EL2 (1 << 27)
  337. static inline u32 armv8pmu_pmcr_read(void)
  338. {
  339. u32 val;
  340. asm volatile("mrs %0, pmcr_el0" : "=r" (val));
  341. return val;
  342. }
  343. static inline void armv8pmu_pmcr_write(u32 val)
  344. {
  345. val &= ARMV8_PMCR_MASK;
  346. isb();
  347. asm volatile("msr pmcr_el0, %0" :: "r" (val));
  348. }
  349. static inline int armv8pmu_has_overflowed(u32 pmovsr)
  350. {
  351. return pmovsr & ARMV8_OVERFLOWED_MASK;
  352. }
  353. static inline int armv8pmu_counter_valid(struct arm_pmu *cpu_pmu, int idx)
  354. {
  355. return idx >= ARMV8_IDX_CYCLE_COUNTER &&
  356. idx <= ARMV8_IDX_COUNTER_LAST(cpu_pmu);
  357. }
  358. static inline int armv8pmu_counter_has_overflowed(u32 pmnc, int idx)
  359. {
  360. return pmnc & BIT(ARMV8_IDX_TO_COUNTER(idx));
  361. }
  362. static inline int armv8pmu_select_counter(int idx)
  363. {
  364. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  365. asm volatile("msr pmselr_el0, %0" :: "r" (counter));
  366. isb();
  367. return idx;
  368. }
  369. static inline u32 armv8pmu_read_counter(struct perf_event *event)
  370. {
  371. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  372. struct hw_perf_event *hwc = &event->hw;
  373. int idx = hwc->idx;
  374. u32 value = 0;
  375. if (!armv8pmu_counter_valid(cpu_pmu, idx))
  376. pr_err("CPU%u reading wrong counter %d\n",
  377. smp_processor_id(), idx);
  378. else if (idx == ARMV8_IDX_CYCLE_COUNTER)
  379. asm volatile("mrs %0, pmccntr_el0" : "=r" (value));
  380. else if (armv8pmu_select_counter(idx) == idx)
  381. asm volatile("mrs %0, pmxevcntr_el0" : "=r" (value));
  382. return value;
  383. }
  384. static inline void armv8pmu_write_counter(struct perf_event *event, u32 value)
  385. {
  386. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  387. struct hw_perf_event *hwc = &event->hw;
  388. int idx = hwc->idx;
  389. if (!armv8pmu_counter_valid(cpu_pmu, idx))
  390. pr_err("CPU%u writing wrong counter %d\n",
  391. smp_processor_id(), idx);
  392. else if (idx == ARMV8_IDX_CYCLE_COUNTER)
  393. asm volatile("msr pmccntr_el0, %0" :: "r" (value));
  394. else if (armv8pmu_select_counter(idx) == idx)
  395. asm volatile("msr pmxevcntr_el0, %0" :: "r" (value));
  396. }
  397. static inline void armv8pmu_write_evtype(int idx, u32 val)
  398. {
  399. if (armv8pmu_select_counter(idx) == idx) {
  400. val &= ARMV8_EVTYPE_MASK;
  401. asm volatile("msr pmxevtyper_el0, %0" :: "r" (val));
  402. }
  403. }
  404. static inline int armv8pmu_enable_counter(int idx)
  405. {
  406. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  407. asm volatile("msr pmcntenset_el0, %0" :: "r" (BIT(counter)));
  408. return idx;
  409. }
  410. static inline int armv8pmu_disable_counter(int idx)
  411. {
  412. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  413. asm volatile("msr pmcntenclr_el0, %0" :: "r" (BIT(counter)));
  414. return idx;
  415. }
  416. static inline int armv8pmu_enable_intens(int idx)
  417. {
  418. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  419. asm volatile("msr pmintenset_el1, %0" :: "r" (BIT(counter)));
  420. return idx;
  421. }
  422. static inline int armv8pmu_disable_intens(int idx)
  423. {
  424. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  425. asm volatile("msr pmintenclr_el1, %0" :: "r" (BIT(counter)));
  426. isb();
  427. /* Clear the overflow flag in case an interrupt is pending. */
  428. asm volatile("msr pmovsclr_el0, %0" :: "r" (BIT(counter)));
  429. isb();
  430. return idx;
  431. }
  432. static inline u32 armv8pmu_getreset_flags(void)
  433. {
  434. u32 value;
  435. /* Read */
  436. asm volatile("mrs %0, pmovsclr_el0" : "=r" (value));
  437. /* Write to clear flags */
  438. value &= ARMV8_OVSR_MASK;
  439. asm volatile("msr pmovsclr_el0, %0" :: "r" (value));
  440. return value;
  441. }
  442. static void armv8pmu_enable_event(struct perf_event *event)
  443. {
  444. unsigned long flags;
  445. struct hw_perf_event *hwc = &event->hw;
  446. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  447. struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
  448. int idx = hwc->idx;
  449. /*
  450. * Enable counter and interrupt, and set the counter to count
  451. * the event that we're interested in.
  452. */
  453. raw_spin_lock_irqsave(&events->pmu_lock, flags);
  454. /*
  455. * Disable counter
  456. */
  457. armv8pmu_disable_counter(idx);
  458. /*
  459. * Set event (if destined for PMNx counters).
  460. */
  461. armv8pmu_write_evtype(idx, hwc->config_base);
  462. /*
  463. * Enable interrupt for this counter
  464. */
  465. armv8pmu_enable_intens(idx);
  466. /*
  467. * Enable counter
  468. */
  469. armv8pmu_enable_counter(idx);
  470. raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
  471. }
  472. static void armv8pmu_disable_event(struct perf_event *event)
  473. {
  474. unsigned long flags;
  475. struct hw_perf_event *hwc = &event->hw;
  476. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  477. struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
  478. int idx = hwc->idx;
  479. /*
  480. * Disable counter and interrupt
  481. */
  482. raw_spin_lock_irqsave(&events->pmu_lock, flags);
  483. /*
  484. * Disable counter
  485. */
  486. armv8pmu_disable_counter(idx);
  487. /*
  488. * Disable interrupt for this counter
  489. */
  490. armv8pmu_disable_intens(idx);
  491. raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
  492. }
  493. static irqreturn_t armv8pmu_handle_irq(int irq_num, void *dev)
  494. {
  495. u32 pmovsr;
  496. struct perf_sample_data data;
  497. struct arm_pmu *cpu_pmu = (struct arm_pmu *)dev;
  498. struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events);
  499. struct pt_regs *regs;
  500. int idx;
  501. /*
  502. * Get and reset the IRQ flags
  503. */
  504. pmovsr = armv8pmu_getreset_flags();
  505. /*
  506. * Did an overflow occur?
  507. */
  508. if (!armv8pmu_has_overflowed(pmovsr))
  509. return IRQ_NONE;
  510. /*
  511. * Handle the counter(s) overflow(s)
  512. */
  513. regs = get_irq_regs();
  514. for (idx = 0; idx < cpu_pmu->num_events; ++idx) {
  515. struct perf_event *event = cpuc->events[idx];
  516. struct hw_perf_event *hwc;
  517. /* Ignore if we don't have an event. */
  518. if (!event)
  519. continue;
  520. /*
  521. * We have a single interrupt for all counters. Check that
  522. * each counter has overflowed before we process it.
  523. */
  524. if (!armv8pmu_counter_has_overflowed(pmovsr, idx))
  525. continue;
  526. hwc = &event->hw;
  527. armpmu_event_update(event);
  528. perf_sample_data_init(&data, 0, hwc->last_period);
  529. if (!armpmu_event_set_period(event))
  530. continue;
  531. if (perf_event_overflow(event, &data, regs))
  532. cpu_pmu->disable(event);
  533. }
  534. /*
  535. * Handle the pending perf events.
  536. *
  537. * Note: this call *must* be run with interrupts disabled. For
  538. * platforms that can have the PMU interrupts raised as an NMI, this
  539. * will not work.
  540. */
  541. irq_work_run();
  542. return IRQ_HANDLED;
  543. }
  544. static void armv8pmu_start(struct arm_pmu *cpu_pmu)
  545. {
  546. unsigned long flags;
  547. struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
  548. raw_spin_lock_irqsave(&events->pmu_lock, flags);
  549. /* Enable all counters */
  550. armv8pmu_pmcr_write(armv8pmu_pmcr_read() | ARMV8_PMCR_E);
  551. raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
  552. }
  553. static void armv8pmu_stop(struct arm_pmu *cpu_pmu)
  554. {
  555. unsigned long flags;
  556. struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
  557. raw_spin_lock_irqsave(&events->pmu_lock, flags);
  558. /* Disable all counters */
  559. armv8pmu_pmcr_write(armv8pmu_pmcr_read() & ~ARMV8_PMCR_E);
  560. raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
  561. }
  562. static int armv8pmu_get_event_idx(struct pmu_hw_events *cpuc,
  563. struct perf_event *event)
  564. {
  565. int idx;
  566. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  567. struct hw_perf_event *hwc = &event->hw;
  568. unsigned long evtype = hwc->config_base & ARMV8_EVTYPE_EVENT;
  569. /* Always place a cycle counter into the cycle counter. */
  570. if (evtype == ARMV8_PMUV3_PERFCTR_CLOCK_CYCLES) {
  571. if (test_and_set_bit(ARMV8_IDX_CYCLE_COUNTER, cpuc->used_mask))
  572. return -EAGAIN;
  573. return ARMV8_IDX_CYCLE_COUNTER;
  574. }
  575. /*
  576. * For anything other than a cycle counter, try and use
  577. * the events counters
  578. */
  579. for (idx = ARMV8_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) {
  580. if (!test_and_set_bit(idx, cpuc->used_mask))
  581. return idx;
  582. }
  583. /* The counters are all in use. */
  584. return -EAGAIN;
  585. }
  586. /*
  587. * Add an event filter to a given event. This will only work for PMUv2 PMUs.
  588. */
  589. static int armv8pmu_set_event_filter(struct hw_perf_event *event,
  590. struct perf_event_attr *attr)
  591. {
  592. unsigned long config_base = 0;
  593. if (attr->exclude_idle)
  594. return -EPERM;
  595. if (attr->exclude_user)
  596. config_base |= ARMV8_EXCLUDE_EL0;
  597. if (attr->exclude_kernel)
  598. config_base |= ARMV8_EXCLUDE_EL1;
  599. if (!attr->exclude_hv)
  600. config_base |= ARMV8_INCLUDE_EL2;
  601. /*
  602. * Install the filter into config_base as this is used to
  603. * construct the event type.
  604. */
  605. event->config_base = config_base;
  606. return 0;
  607. }
  608. static void armv8pmu_reset(void *info)
  609. {
  610. struct arm_pmu *cpu_pmu = (struct arm_pmu *)info;
  611. u32 idx, nb_cnt = cpu_pmu->num_events;
  612. /* The counter and interrupt enable registers are unknown at reset. */
  613. for (idx = ARMV8_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
  614. armv8pmu_disable_counter(idx);
  615. armv8pmu_disable_intens(idx);
  616. }
  617. /* Initialize & Reset PMNC: C and P bits. */
  618. armv8pmu_pmcr_write(ARMV8_PMCR_P | ARMV8_PMCR_C);
  619. }
  620. static int armv8_pmuv3_map_event(struct perf_event *event)
  621. {
  622. return armpmu_map_event(event, &armv8_pmuv3_perf_map,
  623. &armv8_pmuv3_perf_cache_map,
  624. ARMV8_EVTYPE_EVENT);
  625. }
  626. static int armv8_a53_map_event(struct perf_event *event)
  627. {
  628. return armpmu_map_event(event, &armv8_a53_perf_map,
  629. &armv8_a53_perf_cache_map,
  630. ARMV8_EVTYPE_EVENT);
  631. }
  632. static int armv8_a57_map_event(struct perf_event *event)
  633. {
  634. return armpmu_map_event(event, &armv8_a57_perf_map,
  635. &armv8_a57_perf_cache_map,
  636. ARMV8_EVTYPE_EVENT);
  637. }
  638. static void armv8pmu_read_num_pmnc_events(void *info)
  639. {
  640. int *nb_cnt = info;
  641. /* Read the nb of CNTx counters supported from PMNC */
  642. *nb_cnt = (armv8pmu_pmcr_read() >> ARMV8_PMCR_N_SHIFT) & ARMV8_PMCR_N_MASK;
  643. /* Add the CPU cycles counter */
  644. *nb_cnt += 1;
  645. }
  646. static int armv8pmu_probe_num_events(struct arm_pmu *arm_pmu)
  647. {
  648. return smp_call_function_any(&arm_pmu->supported_cpus,
  649. armv8pmu_read_num_pmnc_events,
  650. &arm_pmu->num_events, 1);
  651. }
  652. static void armv8_pmu_init(struct arm_pmu *cpu_pmu)
  653. {
  654. cpu_pmu->handle_irq = armv8pmu_handle_irq,
  655. cpu_pmu->enable = armv8pmu_enable_event,
  656. cpu_pmu->disable = armv8pmu_disable_event,
  657. cpu_pmu->read_counter = armv8pmu_read_counter,
  658. cpu_pmu->write_counter = armv8pmu_write_counter,
  659. cpu_pmu->get_event_idx = armv8pmu_get_event_idx,
  660. cpu_pmu->start = armv8pmu_start,
  661. cpu_pmu->stop = armv8pmu_stop,
  662. cpu_pmu->reset = armv8pmu_reset,
  663. cpu_pmu->max_period = (1LLU << 32) - 1,
  664. cpu_pmu->set_event_filter = armv8pmu_set_event_filter;
  665. }
  666. static int armv8_pmuv3_init(struct arm_pmu *cpu_pmu)
  667. {
  668. armv8_pmu_init(cpu_pmu);
  669. cpu_pmu->name = "armv8_pmuv3";
  670. cpu_pmu->map_event = armv8_pmuv3_map_event;
  671. return armv8pmu_probe_num_events(cpu_pmu);
  672. }
  673. static int armv8_a53_pmu_init(struct arm_pmu *cpu_pmu)
  674. {
  675. armv8_pmu_init(cpu_pmu);
  676. cpu_pmu->name = "armv8_cortex_a53";
  677. cpu_pmu->map_event = armv8_a53_map_event;
  678. cpu_pmu->pmu.attr_groups = armv8_pmuv3_attr_groups;
  679. return armv8pmu_probe_num_events(cpu_pmu);
  680. }
  681. static int armv8_a57_pmu_init(struct arm_pmu *cpu_pmu)
  682. {
  683. armv8_pmu_init(cpu_pmu);
  684. cpu_pmu->name = "armv8_cortex_a57";
  685. cpu_pmu->map_event = armv8_a57_map_event;
  686. cpu_pmu->pmu.attr_groups = armv8_pmuv3_attr_groups;
  687. return armv8pmu_probe_num_events(cpu_pmu);
  688. }
  689. static int armv8_a72_pmu_init(struct arm_pmu *cpu_pmu)
  690. {
  691. armv8_pmu_init(cpu_pmu);
  692. cpu_pmu->name = "armv8_cortex_a72";
  693. cpu_pmu->map_event = armv8_a57_map_event;
  694. cpu_pmu->pmu.attr_groups = armv8_pmuv3_attr_groups;
  695. return armv8pmu_probe_num_events(cpu_pmu);
  696. }
  697. static const struct of_device_id armv8_pmu_of_device_ids[] = {
  698. {.compatible = "arm,armv8-pmuv3", .data = armv8_pmuv3_init},
  699. {.compatible = "arm,cortex-a53-pmu", .data = armv8_a53_pmu_init},
  700. {.compatible = "arm,cortex-a57-pmu", .data = armv8_a57_pmu_init},
  701. {.compatible = "arm,cortex-a72-pmu", .data = armv8_a72_pmu_init},
  702. {},
  703. };
  704. static int armv8_pmu_device_probe(struct platform_device *pdev)
  705. {
  706. return arm_pmu_device_probe(pdev, armv8_pmu_of_device_ids, NULL);
  707. }
  708. static struct platform_driver armv8_pmu_driver = {
  709. .driver = {
  710. .name = "armv8-pmu",
  711. .of_match_table = armv8_pmu_of_device_ids,
  712. },
  713. .probe = armv8_pmu_device_probe,
  714. };
  715. static int __init register_armv8_pmu_driver(void)
  716. {
  717. return platform_driver_register(&armv8_pmu_driver);
  718. }
  719. device_initcall(register_armv8_pmu_driver);