perf_event.c 37 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064
  1. /*
  2. * PMU support
  3. *
  4. * Copyright (C) 2012 ARM Limited
  5. * Author: Will Deacon <will.deacon@arm.com>
  6. *
  7. * This code is based heavily on the ARMv7 perf event code.
  8. *
  9. * This program is free software; you can redistribute it and/or modify
  10. * it under the terms of the GNU General Public License version 2 as
  11. * published by the Free Software Foundation.
  12. *
  13. * This program is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16. * GNU General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU General Public License
  19. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  20. */
  21. #include <asm/irq_regs.h>
  22. #include <asm/perf_event.h>
  23. #include <asm/sysreg.h>
  24. #include <asm/virt.h>
  25. #include <linux/of.h>
  26. #include <linux/perf/arm_pmu.h>
  27. #include <linux/platform_device.h>
  28. /*
  29. * ARMv8 PMUv3 Performance Events handling code.
  30. * Common event types.
  31. */
  32. /* Required events. */
  33. #define ARMV8_PMUV3_PERFCTR_SW_INCR 0x00
  34. #define ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL 0x03
  35. #define ARMV8_PMUV3_PERFCTR_L1D_CACHE 0x04
  36. #define ARMV8_PMUV3_PERFCTR_BR_MIS_PRED 0x10
  37. #define ARMV8_PMUV3_PERFCTR_CPU_CYCLES 0x11
  38. #define ARMV8_PMUV3_PERFCTR_BR_PRED 0x12
  39. /* At least one of the following is required. */
  40. #define ARMV8_PMUV3_PERFCTR_INST_RETIRED 0x08
  41. #define ARMV8_PMUV3_PERFCTR_INST_SPEC 0x1B
  42. /* Common architectural events. */
  43. #define ARMV8_PMUV3_PERFCTR_LD_RETIRED 0x06
  44. #define ARMV8_PMUV3_PERFCTR_ST_RETIRED 0x07
  45. #define ARMV8_PMUV3_PERFCTR_EXC_TAKEN 0x09
  46. #define ARMV8_PMUV3_PERFCTR_EXC_RETURN 0x0A
  47. #define ARMV8_PMUV3_PERFCTR_CID_WRITE_RETIRED 0x0B
  48. #define ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED 0x0C
  49. #define ARMV8_PMUV3_PERFCTR_BR_IMMED_RETIRED 0x0D
  50. #define ARMV8_PMUV3_PERFCTR_BR_RETURN_RETIRED 0x0E
  51. #define ARMV8_PMUV3_PERFCTR_UNALIGNED_LDST_RETIRED 0x0F
  52. #define ARMV8_PMUV3_PERFCTR_TTBR_WRITE_RETIRED 0x1C
  53. #define ARMV8_PMUV3_PERFCTR_CHAIN 0x1E
  54. #define ARMV8_PMUV3_PERFCTR_BR_RETIRED 0x21
  55. /* Common microarchitectural events. */
  56. #define ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL 0x01
  57. #define ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL 0x02
  58. #define ARMV8_PMUV3_PERFCTR_L1D_TLB_REFILL 0x05
  59. #define ARMV8_PMUV3_PERFCTR_MEM_ACCESS 0x13
  60. #define ARMV8_PMUV3_PERFCTR_L1I_CACHE 0x14
  61. #define ARMV8_PMUV3_PERFCTR_L1D_CACHE_WB 0x15
  62. #define ARMV8_PMUV3_PERFCTR_L2D_CACHE 0x16
  63. #define ARMV8_PMUV3_PERFCTR_L2D_CACHE_REFILL 0x17
  64. #define ARMV8_PMUV3_PERFCTR_L2D_CACHE_WB 0x18
  65. #define ARMV8_PMUV3_PERFCTR_BUS_ACCESS 0x19
  66. #define ARMV8_PMUV3_PERFCTR_MEMORY_ERROR 0x1A
  67. #define ARMV8_PMUV3_PERFCTR_BUS_CYCLES 0x1D
  68. #define ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE 0x1F
  69. #define ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE 0x20
  70. #define ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED 0x22
  71. #define ARMV8_PMUV3_PERFCTR_STALL_FRONTEND 0x23
  72. #define ARMV8_PMUV3_PERFCTR_STALL_BACKEND 0x24
  73. #define ARMV8_PMUV3_PERFCTR_L1D_TLB 0x25
  74. #define ARMV8_PMUV3_PERFCTR_L1I_TLB 0x26
  75. #define ARMV8_PMUV3_PERFCTR_L2I_CACHE 0x27
  76. #define ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL 0x28
  77. #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE 0x29
  78. #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL 0x2A
  79. #define ARMV8_PMUV3_PERFCTR_L3D_CACHE 0x2B
  80. #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB 0x2C
  81. #define ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL 0x2D
  82. #define ARMV8_PMUV3_PERFCTR_L2I_TLB_REFILL 0x2E
  83. #define ARMV8_PMUV3_PERFCTR_L2D_TLB 0x2F
  84. #define ARMV8_PMUV3_PERFCTR_L2I_TLB 0x30
  85. /* ARMv8 recommended implementation defined event types */
  86. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD 0x40
  87. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR 0x41
  88. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD 0x42
  89. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_WR 0x43
  90. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_INNER 0x44
  91. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_OUTER 0x45
  92. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WB_VICTIM 0x46
  93. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WB_CLEAN 0x47
  94. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_INVAL 0x48
  95. #define ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD 0x4C
  96. #define ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR 0x4D
  97. #define ARMV8_IMPDEF_PERFCTR_L1D_TLB_RD 0x4E
  98. #define ARMV8_IMPDEF_PERFCTR_L1D_TLB_WR 0x4F
  99. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_RD 0x50
  100. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_WR 0x51
  101. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_REFILL_RD 0x52
  102. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_REFILL_WR 0x53
  103. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_WB_VICTIM 0x56
  104. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_WB_CLEAN 0x57
  105. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_INVAL 0x58
  106. #define ARMV8_IMPDEF_PERFCTR_L2D_TLB_REFILL_RD 0x5C
  107. #define ARMV8_IMPDEF_PERFCTR_L2D_TLB_REFILL_WR 0x5D
  108. #define ARMV8_IMPDEF_PERFCTR_L2D_TLB_RD 0x5E
  109. #define ARMV8_IMPDEF_PERFCTR_L2D_TLB_WR 0x5F
  110. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_RD 0x60
  111. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_WR 0x61
  112. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_SHARED 0x62
  113. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_NOT_SHARED 0x63
  114. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_NORMAL 0x64
  115. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_PERIPH 0x65
  116. #define ARMV8_IMPDEF_PERFCTR_MEM_ACCESS_RD 0x66
  117. #define ARMV8_IMPDEF_PERFCTR_MEM_ACCESS_WR 0x67
  118. #define ARMV8_IMPDEF_PERFCTR_UNALIGNED_LD_SPEC 0x68
  119. #define ARMV8_IMPDEF_PERFCTR_UNALIGNED_ST_SPEC 0x69
  120. #define ARMV8_IMPDEF_PERFCTR_UNALIGNED_LDST_SPEC 0x6A
  121. #define ARMV8_IMPDEF_PERFCTR_LDREX_SPEC 0x6C
  122. #define ARMV8_IMPDEF_PERFCTR_STREX_PASS_SPEC 0x6D
  123. #define ARMV8_IMPDEF_PERFCTR_STREX_FAIL_SPEC 0x6E
  124. #define ARMV8_IMPDEF_PERFCTR_STREX_SPEC 0x6F
  125. #define ARMV8_IMPDEF_PERFCTR_LD_SPEC 0x70
  126. #define ARMV8_IMPDEF_PERFCTR_ST_SPEC 0x71
  127. #define ARMV8_IMPDEF_PERFCTR_LDST_SPEC 0x72
  128. #define ARMV8_IMPDEF_PERFCTR_DP_SPEC 0x73
  129. #define ARMV8_IMPDEF_PERFCTR_ASE_SPEC 0x74
  130. #define ARMV8_IMPDEF_PERFCTR_VFP_SPEC 0x75
  131. #define ARMV8_IMPDEF_PERFCTR_PC_WRITE_SPEC 0x76
  132. #define ARMV8_IMPDEF_PERFCTR_CRYPTO_SPEC 0x77
  133. #define ARMV8_IMPDEF_PERFCTR_BR_IMMED_SPEC 0x78
  134. #define ARMV8_IMPDEF_PERFCTR_BR_RETURN_SPEC 0x79
  135. #define ARMV8_IMPDEF_PERFCTR_BR_INDIRECT_SPEC 0x7A
  136. #define ARMV8_IMPDEF_PERFCTR_ISB_SPEC 0x7C
  137. #define ARMV8_IMPDEF_PERFCTR_DSB_SPEC 0x7D
  138. #define ARMV8_IMPDEF_PERFCTR_DMB_SPEC 0x7E
  139. #define ARMV8_IMPDEF_PERFCTR_EXC_UNDEF 0x81
  140. #define ARMV8_IMPDEF_PERFCTR_EXC_SVC 0x82
  141. #define ARMV8_IMPDEF_PERFCTR_EXC_PABORT 0x83
  142. #define ARMV8_IMPDEF_PERFCTR_EXC_DABORT 0x84
  143. #define ARMV8_IMPDEF_PERFCTR_EXC_IRQ 0x86
  144. #define ARMV8_IMPDEF_PERFCTR_EXC_FIQ 0x87
  145. #define ARMV8_IMPDEF_PERFCTR_EXC_SMC 0x88
  146. #define ARMV8_IMPDEF_PERFCTR_EXC_HVC 0x8A
  147. #define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_PABORT 0x8B
  148. #define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_DABORT 0x8C
  149. #define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_OTHER 0x8D
  150. #define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_IRQ 0x8E
  151. #define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_FIQ 0x8F
  152. #define ARMV8_IMPDEF_PERFCTR_RC_LD_SPEC 0x90
  153. #define ARMV8_IMPDEF_PERFCTR_RC_ST_SPEC 0x91
  154. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_RD 0xA0
  155. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_WR 0xA1
  156. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_REFILL_RD 0xA2
  157. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_REFILL_WR 0xA3
  158. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_WB_VICTIM 0xA6
  159. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_WB_CLEAN 0xA7
  160. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_INVAL 0xA8
  161. /* ARMv8 Cortex-A53 specific event types. */
  162. #define ARMV8_A53_PERFCTR_PREF_LINEFILL 0xC2
  163. /* ARMv8 Cavium ThunderX specific event types. */
  164. #define ARMV8_THUNDER_PERFCTR_L1D_CACHE_MISS_ST 0xE9
  165. #define ARMV8_THUNDER_PERFCTR_L1D_CACHE_PREF_ACCESS 0xEA
  166. #define ARMV8_THUNDER_PERFCTR_L1D_CACHE_PREF_MISS 0xEB
  167. #define ARMV8_THUNDER_PERFCTR_L1I_CACHE_PREF_ACCESS 0xEC
  168. #define ARMV8_THUNDER_PERFCTR_L1I_CACHE_PREF_MISS 0xED
  169. /* PMUv3 HW events mapping. */
  170. static const unsigned armv8_pmuv3_perf_map[PERF_COUNT_HW_MAX] = {
  171. PERF_MAP_ALL_UNSUPPORTED,
  172. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES,
  173. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INST_RETIRED,
  174. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  175. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  176. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  177. };
  178. /* ARM Cortex-A53 HW events mapping. */
  179. static const unsigned armv8_a53_perf_map[PERF_COUNT_HW_MAX] = {
  180. PERF_MAP_ALL_UNSUPPORTED,
  181. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES,
  182. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INST_RETIRED,
  183. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  184. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  185. [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED,
  186. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  187. [PERF_COUNT_HW_BUS_CYCLES] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
  188. };
  189. /* ARM Cortex-A57 and Cortex-A72 events mapping. */
  190. static const unsigned armv8_a57_perf_map[PERF_COUNT_HW_MAX] = {
  191. PERF_MAP_ALL_UNSUPPORTED,
  192. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES,
  193. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INST_RETIRED,
  194. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  195. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  196. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  197. [PERF_COUNT_HW_BUS_CYCLES] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
  198. };
  199. static const unsigned armv8_thunder_perf_map[PERF_COUNT_HW_MAX] = {
  200. PERF_MAP_ALL_UNSUPPORTED,
  201. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES,
  202. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INST_RETIRED,
  203. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  204. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  205. [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED,
  206. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  207. [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV8_PMUV3_PERFCTR_STALL_FRONTEND,
  208. [PERF_COUNT_HW_STALLED_CYCLES_BACKEND] = ARMV8_PMUV3_PERFCTR_STALL_BACKEND,
  209. };
  210. /* Broadcom Vulcan events mapping */
  211. static const unsigned armv8_vulcan_perf_map[PERF_COUNT_HW_MAX] = {
  212. PERF_MAP_ALL_UNSUPPORTED,
  213. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES,
  214. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INST_RETIRED,
  215. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  216. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  217. [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_BR_RETIRED,
  218. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  219. [PERF_COUNT_HW_BUS_CYCLES] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
  220. [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV8_PMUV3_PERFCTR_STALL_FRONTEND,
  221. [PERF_COUNT_HW_STALLED_CYCLES_BACKEND] = ARMV8_PMUV3_PERFCTR_STALL_BACKEND,
  222. };
  223. static const unsigned armv8_pmuv3_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  224. [PERF_COUNT_HW_CACHE_OP_MAX]
  225. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  226. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  227. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  228. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  229. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  230. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  231. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  232. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  233. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  234. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  235. };
  236. static const unsigned armv8_a53_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  237. [PERF_COUNT_HW_CACHE_OP_MAX]
  238. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  239. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  240. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  241. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  242. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  243. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  244. [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV8_A53_PERFCTR_PREF_LINEFILL,
  245. [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE,
  246. [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL,
  247. [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL,
  248. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  249. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  250. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  251. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  252. };
  253. static const unsigned armv8_a57_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  254. [PERF_COUNT_HW_CACHE_OP_MAX]
  255. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  256. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  257. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD,
  258. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD,
  259. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR,
  260. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_WR,
  261. [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE,
  262. [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL,
  263. [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD,
  264. [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR,
  265. [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL,
  266. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  267. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  268. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  269. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  270. };
  271. static const unsigned armv8_thunder_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  272. [PERF_COUNT_HW_CACHE_OP_MAX]
  273. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  274. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  275. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD,
  276. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD,
  277. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR,
  278. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_THUNDER_PERFCTR_L1D_CACHE_MISS_ST,
  279. [C(L1D)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV8_THUNDER_PERFCTR_L1D_CACHE_PREF_ACCESS,
  280. [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV8_THUNDER_PERFCTR_L1D_CACHE_PREF_MISS,
  281. [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE,
  282. [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL,
  283. [C(L1I)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV8_THUNDER_PERFCTR_L1I_CACHE_PREF_ACCESS,
  284. [C(L1I)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV8_THUNDER_PERFCTR_L1I_CACHE_PREF_MISS,
  285. [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_RD,
  286. [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD,
  287. [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_WR,
  288. [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR,
  289. [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL,
  290. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  291. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  292. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  293. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  294. };
  295. static const unsigned armv8_vulcan_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  296. [PERF_COUNT_HW_CACHE_OP_MAX]
  297. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  298. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  299. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD,
  300. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD,
  301. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR,
  302. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_WR,
  303. [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE,
  304. [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL,
  305. [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL,
  306. [C(ITLB)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB,
  307. [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_RD,
  308. [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_WR,
  309. [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD,
  310. [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR,
  311. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  312. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  313. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  314. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  315. [C(NODE)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_RD,
  316. [C(NODE)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_WR,
  317. };
  318. static ssize_t
  319. armv8pmu_events_sysfs_show(struct device *dev,
  320. struct device_attribute *attr, char *page)
  321. {
  322. struct perf_pmu_events_attr *pmu_attr;
  323. pmu_attr = container_of(attr, struct perf_pmu_events_attr, attr);
  324. return sprintf(page, "event=0x%03llx\n", pmu_attr->id);
  325. }
  326. #define ARMV8_EVENT_ATTR_RESOLVE(m) #m
  327. #define ARMV8_EVENT_ATTR(name, config) \
  328. PMU_EVENT_ATTR(name, armv8_event_attr_##name, \
  329. config, armv8pmu_events_sysfs_show)
  330. ARMV8_EVENT_ATTR(sw_incr, ARMV8_PMUV3_PERFCTR_SW_INCR);
  331. ARMV8_EVENT_ATTR(l1i_cache_refill, ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL);
  332. ARMV8_EVENT_ATTR(l1i_tlb_refill, ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL);
  333. ARMV8_EVENT_ATTR(l1d_cache_refill, ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL);
  334. ARMV8_EVENT_ATTR(l1d_cache, ARMV8_PMUV3_PERFCTR_L1D_CACHE);
  335. ARMV8_EVENT_ATTR(l1d_tlb_refill, ARMV8_PMUV3_PERFCTR_L1D_TLB_REFILL);
  336. ARMV8_EVENT_ATTR(ld_retired, ARMV8_PMUV3_PERFCTR_LD_RETIRED);
  337. ARMV8_EVENT_ATTR(st_retired, ARMV8_PMUV3_PERFCTR_ST_RETIRED);
  338. ARMV8_EVENT_ATTR(inst_retired, ARMV8_PMUV3_PERFCTR_INST_RETIRED);
  339. ARMV8_EVENT_ATTR(exc_taken, ARMV8_PMUV3_PERFCTR_EXC_TAKEN);
  340. ARMV8_EVENT_ATTR(exc_return, ARMV8_PMUV3_PERFCTR_EXC_RETURN);
  341. ARMV8_EVENT_ATTR(cid_write_retired, ARMV8_PMUV3_PERFCTR_CID_WRITE_RETIRED);
  342. ARMV8_EVENT_ATTR(pc_write_retired, ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED);
  343. ARMV8_EVENT_ATTR(br_immed_retired, ARMV8_PMUV3_PERFCTR_BR_IMMED_RETIRED);
  344. ARMV8_EVENT_ATTR(br_return_retired, ARMV8_PMUV3_PERFCTR_BR_RETURN_RETIRED);
  345. ARMV8_EVENT_ATTR(unaligned_ldst_retired, ARMV8_PMUV3_PERFCTR_UNALIGNED_LDST_RETIRED);
  346. ARMV8_EVENT_ATTR(br_mis_pred, ARMV8_PMUV3_PERFCTR_BR_MIS_PRED);
  347. ARMV8_EVENT_ATTR(cpu_cycles, ARMV8_PMUV3_PERFCTR_CPU_CYCLES);
  348. ARMV8_EVENT_ATTR(br_pred, ARMV8_PMUV3_PERFCTR_BR_PRED);
  349. ARMV8_EVENT_ATTR(mem_access, ARMV8_PMUV3_PERFCTR_MEM_ACCESS);
  350. ARMV8_EVENT_ATTR(l1i_cache, ARMV8_PMUV3_PERFCTR_L1I_CACHE);
  351. ARMV8_EVENT_ATTR(l1d_cache_wb, ARMV8_PMUV3_PERFCTR_L1D_CACHE_WB);
  352. ARMV8_EVENT_ATTR(l2d_cache, ARMV8_PMUV3_PERFCTR_L2D_CACHE);
  353. ARMV8_EVENT_ATTR(l2d_cache_refill, ARMV8_PMUV3_PERFCTR_L2D_CACHE_REFILL);
  354. ARMV8_EVENT_ATTR(l2d_cache_wb, ARMV8_PMUV3_PERFCTR_L2D_CACHE_WB);
  355. ARMV8_EVENT_ATTR(bus_access, ARMV8_PMUV3_PERFCTR_BUS_ACCESS);
  356. ARMV8_EVENT_ATTR(memory_error, ARMV8_PMUV3_PERFCTR_MEMORY_ERROR);
  357. ARMV8_EVENT_ATTR(inst_spec, ARMV8_PMUV3_PERFCTR_INST_SPEC);
  358. ARMV8_EVENT_ATTR(ttbr_write_retired, ARMV8_PMUV3_PERFCTR_TTBR_WRITE_RETIRED);
  359. ARMV8_EVENT_ATTR(bus_cycles, ARMV8_PMUV3_PERFCTR_BUS_CYCLES);
  360. /* Don't expose the chain event in /sys, since it's useless in isolation */
  361. ARMV8_EVENT_ATTR(l1d_cache_allocate, ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE);
  362. ARMV8_EVENT_ATTR(l2d_cache_allocate, ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE);
  363. ARMV8_EVENT_ATTR(br_retired, ARMV8_PMUV3_PERFCTR_BR_RETIRED);
  364. ARMV8_EVENT_ATTR(br_mis_pred_retired, ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED);
  365. ARMV8_EVENT_ATTR(stall_frontend, ARMV8_PMUV3_PERFCTR_STALL_FRONTEND);
  366. ARMV8_EVENT_ATTR(stall_backend, ARMV8_PMUV3_PERFCTR_STALL_BACKEND);
  367. ARMV8_EVENT_ATTR(l1d_tlb, ARMV8_PMUV3_PERFCTR_L1D_TLB);
  368. ARMV8_EVENT_ATTR(l1i_tlb, ARMV8_PMUV3_PERFCTR_L1I_TLB);
  369. ARMV8_EVENT_ATTR(l2i_cache, ARMV8_PMUV3_PERFCTR_L2I_CACHE);
  370. ARMV8_EVENT_ATTR(l2i_cache_refill, ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL);
  371. ARMV8_EVENT_ATTR(l3d_cache_allocate, ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE);
  372. ARMV8_EVENT_ATTR(l3d_cache_refill, ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL);
  373. ARMV8_EVENT_ATTR(l3d_cache, ARMV8_PMUV3_PERFCTR_L3D_CACHE);
  374. ARMV8_EVENT_ATTR(l3d_cache_wb, ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB);
  375. ARMV8_EVENT_ATTR(l2d_tlb_refill, ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL);
  376. ARMV8_EVENT_ATTR(l2i_tlb_refill, ARMV8_PMUV3_PERFCTR_L2I_TLB_REFILL);
  377. ARMV8_EVENT_ATTR(l2d_tlb, ARMV8_PMUV3_PERFCTR_L2D_TLB);
  378. ARMV8_EVENT_ATTR(l2i_tlb, ARMV8_PMUV3_PERFCTR_L2I_TLB);
  379. static struct attribute *armv8_pmuv3_event_attrs[] = {
  380. &armv8_event_attr_sw_incr.attr.attr,
  381. &armv8_event_attr_l1i_cache_refill.attr.attr,
  382. &armv8_event_attr_l1i_tlb_refill.attr.attr,
  383. &armv8_event_attr_l1d_cache_refill.attr.attr,
  384. &armv8_event_attr_l1d_cache.attr.attr,
  385. &armv8_event_attr_l1d_tlb_refill.attr.attr,
  386. &armv8_event_attr_ld_retired.attr.attr,
  387. &armv8_event_attr_st_retired.attr.attr,
  388. &armv8_event_attr_inst_retired.attr.attr,
  389. &armv8_event_attr_exc_taken.attr.attr,
  390. &armv8_event_attr_exc_return.attr.attr,
  391. &armv8_event_attr_cid_write_retired.attr.attr,
  392. &armv8_event_attr_pc_write_retired.attr.attr,
  393. &armv8_event_attr_br_immed_retired.attr.attr,
  394. &armv8_event_attr_br_return_retired.attr.attr,
  395. &armv8_event_attr_unaligned_ldst_retired.attr.attr,
  396. &armv8_event_attr_br_mis_pred.attr.attr,
  397. &armv8_event_attr_cpu_cycles.attr.attr,
  398. &armv8_event_attr_br_pred.attr.attr,
  399. &armv8_event_attr_mem_access.attr.attr,
  400. &armv8_event_attr_l1i_cache.attr.attr,
  401. &armv8_event_attr_l1d_cache_wb.attr.attr,
  402. &armv8_event_attr_l2d_cache.attr.attr,
  403. &armv8_event_attr_l2d_cache_refill.attr.attr,
  404. &armv8_event_attr_l2d_cache_wb.attr.attr,
  405. &armv8_event_attr_bus_access.attr.attr,
  406. &armv8_event_attr_memory_error.attr.attr,
  407. &armv8_event_attr_inst_spec.attr.attr,
  408. &armv8_event_attr_ttbr_write_retired.attr.attr,
  409. &armv8_event_attr_bus_cycles.attr.attr,
  410. &armv8_event_attr_l1d_cache_allocate.attr.attr,
  411. &armv8_event_attr_l2d_cache_allocate.attr.attr,
  412. &armv8_event_attr_br_retired.attr.attr,
  413. &armv8_event_attr_br_mis_pred_retired.attr.attr,
  414. &armv8_event_attr_stall_frontend.attr.attr,
  415. &armv8_event_attr_stall_backend.attr.attr,
  416. &armv8_event_attr_l1d_tlb.attr.attr,
  417. &armv8_event_attr_l1i_tlb.attr.attr,
  418. &armv8_event_attr_l2i_cache.attr.attr,
  419. &armv8_event_attr_l2i_cache_refill.attr.attr,
  420. &armv8_event_attr_l3d_cache_allocate.attr.attr,
  421. &armv8_event_attr_l3d_cache_refill.attr.attr,
  422. &armv8_event_attr_l3d_cache.attr.attr,
  423. &armv8_event_attr_l3d_cache_wb.attr.attr,
  424. &armv8_event_attr_l2d_tlb_refill.attr.attr,
  425. &armv8_event_attr_l2i_tlb_refill.attr.attr,
  426. &armv8_event_attr_l2d_tlb.attr.attr,
  427. &armv8_event_attr_l2i_tlb.attr.attr,
  428. NULL,
  429. };
  430. static umode_t
  431. armv8pmu_event_attr_is_visible(struct kobject *kobj,
  432. struct attribute *attr, int unused)
  433. {
  434. struct device *dev = kobj_to_dev(kobj);
  435. struct pmu *pmu = dev_get_drvdata(dev);
  436. struct arm_pmu *cpu_pmu = container_of(pmu, struct arm_pmu, pmu);
  437. struct perf_pmu_events_attr *pmu_attr;
  438. pmu_attr = container_of(attr, struct perf_pmu_events_attr, attr.attr);
  439. if (test_bit(pmu_attr->id, cpu_pmu->pmceid_bitmap))
  440. return attr->mode;
  441. return 0;
  442. }
  443. static struct attribute_group armv8_pmuv3_events_attr_group = {
  444. .name = "events",
  445. .attrs = armv8_pmuv3_event_attrs,
  446. .is_visible = armv8pmu_event_attr_is_visible,
  447. };
  448. PMU_FORMAT_ATTR(event, "config:0-9");
  449. static struct attribute *armv8_pmuv3_format_attrs[] = {
  450. &format_attr_event.attr,
  451. NULL,
  452. };
  453. static struct attribute_group armv8_pmuv3_format_attr_group = {
  454. .name = "format",
  455. .attrs = armv8_pmuv3_format_attrs,
  456. };
  457. static const struct attribute_group *armv8_pmuv3_attr_groups[] = {
  458. &armv8_pmuv3_events_attr_group,
  459. &armv8_pmuv3_format_attr_group,
  460. NULL,
  461. };
  462. /*
  463. * Perf Events' indices
  464. */
  465. #define ARMV8_IDX_CYCLE_COUNTER 0
  466. #define ARMV8_IDX_COUNTER0 1
  467. #define ARMV8_IDX_COUNTER_LAST(cpu_pmu) \
  468. (ARMV8_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1)
  469. /*
  470. * ARMv8 low level PMU access
  471. */
  472. /*
  473. * Perf Event to low level counters mapping
  474. */
  475. #define ARMV8_IDX_TO_COUNTER(x) \
  476. (((x) - ARMV8_IDX_COUNTER0) & ARMV8_PMU_COUNTER_MASK)
  477. static inline u32 armv8pmu_pmcr_read(void)
  478. {
  479. return read_sysreg(pmcr_el0);
  480. }
  481. static inline void armv8pmu_pmcr_write(u32 val)
  482. {
  483. val &= ARMV8_PMU_PMCR_MASK;
  484. isb();
  485. write_sysreg(val, pmcr_el0);
  486. }
  487. static inline int armv8pmu_has_overflowed(u32 pmovsr)
  488. {
  489. return pmovsr & ARMV8_PMU_OVERFLOWED_MASK;
  490. }
  491. static inline int armv8pmu_counter_valid(struct arm_pmu *cpu_pmu, int idx)
  492. {
  493. return idx >= ARMV8_IDX_CYCLE_COUNTER &&
  494. idx <= ARMV8_IDX_COUNTER_LAST(cpu_pmu);
  495. }
  496. static inline int armv8pmu_counter_has_overflowed(u32 pmnc, int idx)
  497. {
  498. return pmnc & BIT(ARMV8_IDX_TO_COUNTER(idx));
  499. }
  500. static inline int armv8pmu_select_counter(int idx)
  501. {
  502. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  503. write_sysreg(counter, pmselr_el0);
  504. isb();
  505. return idx;
  506. }
  507. static inline u32 armv8pmu_read_counter(struct perf_event *event)
  508. {
  509. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  510. struct hw_perf_event *hwc = &event->hw;
  511. int idx = hwc->idx;
  512. u32 value = 0;
  513. if (!armv8pmu_counter_valid(cpu_pmu, idx))
  514. pr_err("CPU%u reading wrong counter %d\n",
  515. smp_processor_id(), idx);
  516. else if (idx == ARMV8_IDX_CYCLE_COUNTER)
  517. value = read_sysreg(pmccntr_el0);
  518. else if (armv8pmu_select_counter(idx) == idx)
  519. value = read_sysreg(pmxevcntr_el0);
  520. return value;
  521. }
  522. static inline void armv8pmu_write_counter(struct perf_event *event, u32 value)
  523. {
  524. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  525. struct hw_perf_event *hwc = &event->hw;
  526. int idx = hwc->idx;
  527. if (!armv8pmu_counter_valid(cpu_pmu, idx))
  528. pr_err("CPU%u writing wrong counter %d\n",
  529. smp_processor_id(), idx);
  530. else if (idx == ARMV8_IDX_CYCLE_COUNTER) {
  531. /*
  532. * Set the upper 32bits as this is a 64bit counter but we only
  533. * count using the lower 32bits and we want an interrupt when
  534. * it overflows.
  535. */
  536. u64 value64 = 0xffffffff00000000ULL | value;
  537. write_sysreg(value64, pmccntr_el0);
  538. } else if (armv8pmu_select_counter(idx) == idx)
  539. write_sysreg(value, pmxevcntr_el0);
  540. }
  541. static inline void armv8pmu_write_evtype(int idx, u32 val)
  542. {
  543. if (armv8pmu_select_counter(idx) == idx) {
  544. val &= ARMV8_PMU_EVTYPE_MASK;
  545. write_sysreg(val, pmxevtyper_el0);
  546. }
  547. }
  548. static inline int armv8pmu_enable_counter(int idx)
  549. {
  550. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  551. write_sysreg(BIT(counter), pmcntenset_el0);
  552. return idx;
  553. }
  554. static inline int armv8pmu_disable_counter(int idx)
  555. {
  556. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  557. write_sysreg(BIT(counter), pmcntenclr_el0);
  558. return idx;
  559. }
  560. static inline int armv8pmu_enable_intens(int idx)
  561. {
  562. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  563. write_sysreg(BIT(counter), pmintenset_el1);
  564. return idx;
  565. }
  566. static inline int armv8pmu_disable_intens(int idx)
  567. {
  568. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  569. write_sysreg(BIT(counter), pmintenclr_el1);
  570. isb();
  571. /* Clear the overflow flag in case an interrupt is pending. */
  572. write_sysreg(BIT(counter), pmovsclr_el0);
  573. isb();
  574. return idx;
  575. }
  576. static inline u32 armv8pmu_getreset_flags(void)
  577. {
  578. u32 value;
  579. /* Read */
  580. value = read_sysreg(pmovsclr_el0);
  581. /* Write to clear flags */
  582. value &= ARMV8_PMU_OVSR_MASK;
  583. write_sysreg(value, pmovsclr_el0);
  584. return value;
  585. }
  586. static void armv8pmu_enable_event(struct perf_event *event)
  587. {
  588. unsigned long flags;
  589. struct hw_perf_event *hwc = &event->hw;
  590. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  591. struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
  592. int idx = hwc->idx;
  593. /*
  594. * Enable counter and interrupt, and set the counter to count
  595. * the event that we're interested in.
  596. */
  597. raw_spin_lock_irqsave(&events->pmu_lock, flags);
  598. /*
  599. * Disable counter
  600. */
  601. armv8pmu_disable_counter(idx);
  602. /*
  603. * Set event (if destined for PMNx counters).
  604. */
  605. armv8pmu_write_evtype(idx, hwc->config_base);
  606. /*
  607. * Enable interrupt for this counter
  608. */
  609. armv8pmu_enable_intens(idx);
  610. /*
  611. * Enable counter
  612. */
  613. armv8pmu_enable_counter(idx);
  614. raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
  615. }
  616. static void armv8pmu_disable_event(struct perf_event *event)
  617. {
  618. unsigned long flags;
  619. struct hw_perf_event *hwc = &event->hw;
  620. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  621. struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
  622. int idx = hwc->idx;
  623. /*
  624. * Disable counter and interrupt
  625. */
  626. raw_spin_lock_irqsave(&events->pmu_lock, flags);
  627. /*
  628. * Disable counter
  629. */
  630. armv8pmu_disable_counter(idx);
  631. /*
  632. * Disable interrupt for this counter
  633. */
  634. armv8pmu_disable_intens(idx);
  635. raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
  636. }
  637. static irqreturn_t armv8pmu_handle_irq(int irq_num, void *dev)
  638. {
  639. u32 pmovsr;
  640. struct perf_sample_data data;
  641. struct arm_pmu *cpu_pmu = (struct arm_pmu *)dev;
  642. struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events);
  643. struct pt_regs *regs;
  644. int idx;
  645. /*
  646. * Get and reset the IRQ flags
  647. */
  648. pmovsr = armv8pmu_getreset_flags();
  649. /*
  650. * Did an overflow occur?
  651. */
  652. if (!armv8pmu_has_overflowed(pmovsr))
  653. return IRQ_NONE;
  654. /*
  655. * Handle the counter(s) overflow(s)
  656. */
  657. regs = get_irq_regs();
  658. for (idx = 0; idx < cpu_pmu->num_events; ++idx) {
  659. struct perf_event *event = cpuc->events[idx];
  660. struct hw_perf_event *hwc;
  661. /* Ignore if we don't have an event. */
  662. if (!event)
  663. continue;
  664. /*
  665. * We have a single interrupt for all counters. Check that
  666. * each counter has overflowed before we process it.
  667. */
  668. if (!armv8pmu_counter_has_overflowed(pmovsr, idx))
  669. continue;
  670. hwc = &event->hw;
  671. armpmu_event_update(event);
  672. perf_sample_data_init(&data, 0, hwc->last_period);
  673. if (!armpmu_event_set_period(event))
  674. continue;
  675. if (perf_event_overflow(event, &data, regs))
  676. cpu_pmu->disable(event);
  677. }
  678. /*
  679. * Handle the pending perf events.
  680. *
  681. * Note: this call *must* be run with interrupts disabled. For
  682. * platforms that can have the PMU interrupts raised as an NMI, this
  683. * will not work.
  684. */
  685. irq_work_run();
  686. return IRQ_HANDLED;
  687. }
  688. static void armv8pmu_start(struct arm_pmu *cpu_pmu)
  689. {
  690. unsigned long flags;
  691. struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
  692. raw_spin_lock_irqsave(&events->pmu_lock, flags);
  693. /* Enable all counters */
  694. armv8pmu_pmcr_write(armv8pmu_pmcr_read() | ARMV8_PMU_PMCR_E);
  695. raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
  696. }
  697. static void armv8pmu_stop(struct arm_pmu *cpu_pmu)
  698. {
  699. unsigned long flags;
  700. struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
  701. raw_spin_lock_irqsave(&events->pmu_lock, flags);
  702. /* Disable all counters */
  703. armv8pmu_pmcr_write(armv8pmu_pmcr_read() & ~ARMV8_PMU_PMCR_E);
  704. raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
  705. }
  706. static int armv8pmu_get_event_idx(struct pmu_hw_events *cpuc,
  707. struct perf_event *event)
  708. {
  709. int idx;
  710. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  711. struct hw_perf_event *hwc = &event->hw;
  712. unsigned long evtype = hwc->config_base & ARMV8_PMU_EVTYPE_EVENT;
  713. /* Always place a cycle counter into the cycle counter. */
  714. if (evtype == ARMV8_PMUV3_PERFCTR_CPU_CYCLES) {
  715. if (test_and_set_bit(ARMV8_IDX_CYCLE_COUNTER, cpuc->used_mask))
  716. return -EAGAIN;
  717. return ARMV8_IDX_CYCLE_COUNTER;
  718. }
  719. /*
  720. * For anything other than a cycle counter, try and use
  721. * the events counters
  722. */
  723. for (idx = ARMV8_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) {
  724. if (!test_and_set_bit(idx, cpuc->used_mask))
  725. return idx;
  726. }
  727. /* The counters are all in use. */
  728. return -EAGAIN;
  729. }
  730. /*
  731. * Add an event filter to a given event. This will only work for PMUv2 PMUs.
  732. */
  733. static int armv8pmu_set_event_filter(struct hw_perf_event *event,
  734. struct perf_event_attr *attr)
  735. {
  736. unsigned long config_base = 0;
  737. if (attr->exclude_idle)
  738. return -EPERM;
  739. if (is_kernel_in_hyp_mode() &&
  740. attr->exclude_kernel != attr->exclude_hv)
  741. return -EINVAL;
  742. if (attr->exclude_user)
  743. config_base |= ARMV8_PMU_EXCLUDE_EL0;
  744. if (!is_kernel_in_hyp_mode() && attr->exclude_kernel)
  745. config_base |= ARMV8_PMU_EXCLUDE_EL1;
  746. if (!attr->exclude_hv)
  747. config_base |= ARMV8_PMU_INCLUDE_EL2;
  748. /*
  749. * Install the filter into config_base as this is used to
  750. * construct the event type.
  751. */
  752. event->config_base = config_base;
  753. return 0;
  754. }
  755. static void armv8pmu_reset(void *info)
  756. {
  757. struct arm_pmu *cpu_pmu = (struct arm_pmu *)info;
  758. u32 idx, nb_cnt = cpu_pmu->num_events;
  759. /* The counter and interrupt enable registers are unknown at reset. */
  760. for (idx = ARMV8_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
  761. armv8pmu_disable_counter(idx);
  762. armv8pmu_disable_intens(idx);
  763. }
  764. /*
  765. * Initialize & Reset PMNC. Request overflow interrupt for
  766. * 64 bit cycle counter but cheat in armv8pmu_write_counter().
  767. */
  768. armv8pmu_pmcr_write(ARMV8_PMU_PMCR_P | ARMV8_PMU_PMCR_C |
  769. ARMV8_PMU_PMCR_LC);
  770. }
  771. static int armv8_pmuv3_map_event(struct perf_event *event)
  772. {
  773. return armpmu_map_event(event, &armv8_pmuv3_perf_map,
  774. &armv8_pmuv3_perf_cache_map,
  775. ARMV8_PMU_EVTYPE_EVENT);
  776. }
  777. static int armv8_a53_map_event(struct perf_event *event)
  778. {
  779. return armpmu_map_event(event, &armv8_a53_perf_map,
  780. &armv8_a53_perf_cache_map,
  781. ARMV8_PMU_EVTYPE_EVENT);
  782. }
  783. static int armv8_a57_map_event(struct perf_event *event)
  784. {
  785. return armpmu_map_event(event, &armv8_a57_perf_map,
  786. &armv8_a57_perf_cache_map,
  787. ARMV8_PMU_EVTYPE_EVENT);
  788. }
  789. static int armv8_thunder_map_event(struct perf_event *event)
  790. {
  791. return armpmu_map_event(event, &armv8_thunder_perf_map,
  792. &armv8_thunder_perf_cache_map,
  793. ARMV8_PMU_EVTYPE_EVENT);
  794. }
  795. static int armv8_vulcan_map_event(struct perf_event *event)
  796. {
  797. return armpmu_map_event(event, &armv8_vulcan_perf_map,
  798. &armv8_vulcan_perf_cache_map,
  799. ARMV8_PMU_EVTYPE_EVENT);
  800. }
  801. static void __armv8pmu_probe_pmu(void *info)
  802. {
  803. struct arm_pmu *cpu_pmu = info;
  804. u32 pmceid[2];
  805. /* Read the nb of CNTx counters supported from PMNC */
  806. cpu_pmu->num_events = (armv8pmu_pmcr_read() >> ARMV8_PMU_PMCR_N_SHIFT)
  807. & ARMV8_PMU_PMCR_N_MASK;
  808. /* Add the CPU cycles counter */
  809. cpu_pmu->num_events += 1;
  810. pmceid[0] = read_sysreg(pmceid0_el0);
  811. pmceid[1] = read_sysreg(pmceid1_el0);
  812. bitmap_from_u32array(cpu_pmu->pmceid_bitmap,
  813. ARMV8_PMUV3_MAX_COMMON_EVENTS, pmceid,
  814. ARRAY_SIZE(pmceid));
  815. }
  816. static int armv8pmu_probe_pmu(struct arm_pmu *cpu_pmu)
  817. {
  818. return smp_call_function_any(&cpu_pmu->supported_cpus,
  819. __armv8pmu_probe_pmu,
  820. cpu_pmu, 1);
  821. }
  822. static void armv8_pmu_init(struct arm_pmu *cpu_pmu)
  823. {
  824. cpu_pmu->handle_irq = armv8pmu_handle_irq,
  825. cpu_pmu->enable = armv8pmu_enable_event,
  826. cpu_pmu->disable = armv8pmu_disable_event,
  827. cpu_pmu->read_counter = armv8pmu_read_counter,
  828. cpu_pmu->write_counter = armv8pmu_write_counter,
  829. cpu_pmu->get_event_idx = armv8pmu_get_event_idx,
  830. cpu_pmu->start = armv8pmu_start,
  831. cpu_pmu->stop = armv8pmu_stop,
  832. cpu_pmu->reset = armv8pmu_reset,
  833. cpu_pmu->max_period = (1LLU << 32) - 1,
  834. cpu_pmu->set_event_filter = armv8pmu_set_event_filter;
  835. }
  836. static int armv8_pmuv3_init(struct arm_pmu *cpu_pmu)
  837. {
  838. armv8_pmu_init(cpu_pmu);
  839. cpu_pmu->name = "armv8_pmuv3";
  840. cpu_pmu->map_event = armv8_pmuv3_map_event;
  841. cpu_pmu->pmu.attr_groups = armv8_pmuv3_attr_groups;
  842. return armv8pmu_probe_pmu(cpu_pmu);
  843. }
  844. static int armv8_a53_pmu_init(struct arm_pmu *cpu_pmu)
  845. {
  846. armv8_pmu_init(cpu_pmu);
  847. cpu_pmu->name = "armv8_cortex_a53";
  848. cpu_pmu->map_event = armv8_a53_map_event;
  849. cpu_pmu->pmu.attr_groups = armv8_pmuv3_attr_groups;
  850. return armv8pmu_probe_pmu(cpu_pmu);
  851. }
  852. static int armv8_a57_pmu_init(struct arm_pmu *cpu_pmu)
  853. {
  854. armv8_pmu_init(cpu_pmu);
  855. cpu_pmu->name = "armv8_cortex_a57";
  856. cpu_pmu->map_event = armv8_a57_map_event;
  857. cpu_pmu->pmu.attr_groups = armv8_pmuv3_attr_groups;
  858. return armv8pmu_probe_pmu(cpu_pmu);
  859. }
  860. static int armv8_a72_pmu_init(struct arm_pmu *cpu_pmu)
  861. {
  862. armv8_pmu_init(cpu_pmu);
  863. cpu_pmu->name = "armv8_cortex_a72";
  864. cpu_pmu->map_event = armv8_a57_map_event;
  865. cpu_pmu->pmu.attr_groups = armv8_pmuv3_attr_groups;
  866. return armv8pmu_probe_pmu(cpu_pmu);
  867. }
  868. static int armv8_thunder_pmu_init(struct arm_pmu *cpu_pmu)
  869. {
  870. armv8_pmu_init(cpu_pmu);
  871. cpu_pmu->name = "armv8_cavium_thunder";
  872. cpu_pmu->map_event = armv8_thunder_map_event;
  873. cpu_pmu->pmu.attr_groups = armv8_pmuv3_attr_groups;
  874. return armv8pmu_probe_pmu(cpu_pmu);
  875. }
  876. static int armv8_vulcan_pmu_init(struct arm_pmu *cpu_pmu)
  877. {
  878. armv8_pmu_init(cpu_pmu);
  879. cpu_pmu->name = "armv8_brcm_vulcan";
  880. cpu_pmu->map_event = armv8_vulcan_map_event;
  881. cpu_pmu->pmu.attr_groups = armv8_pmuv3_attr_groups;
  882. return armv8pmu_probe_pmu(cpu_pmu);
  883. }
  884. static const struct of_device_id armv8_pmu_of_device_ids[] = {
  885. {.compatible = "arm,armv8-pmuv3", .data = armv8_pmuv3_init},
  886. {.compatible = "arm,cortex-a53-pmu", .data = armv8_a53_pmu_init},
  887. {.compatible = "arm,cortex-a57-pmu", .data = armv8_a57_pmu_init},
  888. {.compatible = "arm,cortex-a72-pmu", .data = armv8_a72_pmu_init},
  889. {.compatible = "cavium,thunder-pmu", .data = armv8_thunder_pmu_init},
  890. {.compatible = "brcm,vulcan-pmu", .data = armv8_vulcan_pmu_init},
  891. {},
  892. };
  893. static int armv8_pmu_device_probe(struct platform_device *pdev)
  894. {
  895. return arm_pmu_device_probe(pdev, armv8_pmu_of_device_ids, NULL);
  896. }
  897. static struct platform_driver armv8_pmu_driver = {
  898. .driver = {
  899. .name = "armv8-pmu",
  900. .of_match_table = armv8_pmu_of_device_ids,
  901. },
  902. .probe = armv8_pmu_device_probe,
  903. };
  904. static int __init register_armv8_pmu_driver(void)
  905. {
  906. return platform_driver_register(&armv8_pmu_driver);
  907. }
  908. device_initcall(register_armv8_pmu_driver);