Thomas Gleixner | 2874c5f | 2019-05-27 08:55:01 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 2 | /* |
| 3 | * Common Performance counter support functions for PowerISA v2.07 processors. |
| 4 | * |
| 5 | * Copyright 2009 Paul Mackerras, IBM Corporation. |
| 6 | * Copyright 2013 Michael Ellerman, IBM Corporation. |
| 7 | * Copyright 2016 Madhavan Srinivasan, IBM Corporation. |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 8 | */ |
| 9 | #include "isa207-common.h" |
| 10 | |
Madhavan Srinivasan | 60b0002 | 2016-12-02 06:04:59 +0530 | [diff] [blame] | 11 | PMU_FORMAT_ATTR(event, "config:0-49"); |
| 12 | PMU_FORMAT_ATTR(pmcxsel, "config:0-7"); |
| 13 | PMU_FORMAT_ATTR(mark, "config:8"); |
| 14 | PMU_FORMAT_ATTR(combine, "config:11"); |
| 15 | PMU_FORMAT_ATTR(unit, "config:12-15"); |
| 16 | PMU_FORMAT_ATTR(pmc, "config:16-19"); |
| 17 | PMU_FORMAT_ATTR(cache_sel, "config:20-23"); |
| 18 | PMU_FORMAT_ATTR(sample_mode, "config:24-28"); |
| 19 | PMU_FORMAT_ATTR(thresh_sel, "config:29-31"); |
| 20 | PMU_FORMAT_ATTR(thresh_stop, "config:32-35"); |
| 21 | PMU_FORMAT_ATTR(thresh_start, "config:36-39"); |
| 22 | PMU_FORMAT_ATTR(thresh_cmp, "config:40-49"); |
| 23 | |
Bixuan Cui | 107dadb | 2021-04-09 17:01:19 +0800 | [diff] [blame] | 24 | static struct attribute *isa207_pmu_format_attr[] = { |
Madhavan Srinivasan | 60b0002 | 2016-12-02 06:04:59 +0530 | [diff] [blame] | 25 | &format_attr_event.attr, |
| 26 | &format_attr_pmcxsel.attr, |
| 27 | &format_attr_mark.attr, |
| 28 | &format_attr_combine.attr, |
| 29 | &format_attr_unit.attr, |
| 30 | &format_attr_pmc.attr, |
| 31 | &format_attr_cache_sel.attr, |
| 32 | &format_attr_sample_mode.attr, |
| 33 | &format_attr_thresh_sel.attr, |
| 34 | &format_attr_thresh_stop.attr, |
| 35 | &format_attr_thresh_start.attr, |
| 36 | &format_attr_thresh_cmp.attr, |
| 37 | NULL, |
| 38 | }; |
| 39 | |
| 40 | struct attribute_group isa207_pmu_format_group = { |
| 41 | .name = "format", |
| 42 | .attrs = isa207_pmu_format_attr, |
| 43 | }; |
| 44 | |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 45 | static inline bool event_is_fab_match(u64 event) |
| 46 | { |
| 47 | /* Only check pmc, unit and pmcxsel, ignore the edge bit (0) */ |
| 48 | event &= 0xff0fe; |
| 49 | |
| 50 | /* PM_MRK_FAB_RSP_MATCH & PM_MRK_FAB_RSP_MATCH_CYC */ |
| 51 | return (event == 0x30056 || event == 0x4f052); |
| 52 | } |
| 53 | |
Madhavan Srinivasan | c7c3f56 | 2016-12-02 06:05:02 +0530 | [diff] [blame] | 54 | static bool is_event_valid(u64 event) |
| 55 | { |
| 56 | u64 valid_mask = EVENT_VALID_MASK; |
| 57 | |
Athira Rajeev | a64e697 | 2020-07-17 10:38:20 -0400 | [diff] [blame] | 58 | if (cpu_has_feature(CPU_FTR_ARCH_31)) |
| 59 | valid_mask = p10_EVENT_VALID_MASK; |
| 60 | else if (cpu_has_feature(CPU_FTR_ARCH_300)) |
Madhavan Srinivasan | c7c3f56 | 2016-12-02 06:05:02 +0530 | [diff] [blame] | 61 | valid_mask = p9_EVENT_VALID_MASK; |
| 62 | |
| 63 | return !(event & ~valid_mask); |
| 64 | } |
| 65 | |
Madhavan Srinivasan | 78b4416 | 2017-02-20 19:29:03 +0530 | [diff] [blame] | 66 | static inline bool is_event_marked(u64 event) |
Madhavan Srinivasan | c7c3f56 | 2016-12-02 06:05:02 +0530 | [diff] [blame] | 67 | { |
Madhavan Srinivasan | 78b4416 | 2017-02-20 19:29:03 +0530 | [diff] [blame] | 68 | if (event & EVENT_IS_MARKED) |
| 69 | return true; |
Madhavan Srinivasan | c7c3f56 | 2016-12-02 06:05:02 +0530 | [diff] [blame] | 70 | |
Madhavan Srinivasan | 78b4416 | 2017-02-20 19:29:03 +0530 | [diff] [blame] | 71 | return false; |
| 72 | } |
| 73 | |
Athira Rajeev | a64e697 | 2020-07-17 10:38:20 -0400 | [diff] [blame] | 74 | static unsigned long sdar_mod_val(u64 event) |
| 75 | { |
| 76 | if (cpu_has_feature(CPU_FTR_ARCH_31)) |
| 77 | return p10_SDAR_MODE(event); |
| 78 | |
| 79 | return p9_SDAR_MODE(event); |
| 80 | } |
| 81 | |
Madhavan Srinivasan | 78b4416 | 2017-02-20 19:29:03 +0530 | [diff] [blame] | 82 | static void mmcra_sdar_mode(u64 event, unsigned long *mmcra) |
| 83 | { |
| 84 | /* |
| 85 | * MMCRA[SDAR_MODE] specifices how the SDAR should be updated in |
| 86 | * continous sampling mode. |
| 87 | * |
| 88 | * Incase of Power8: |
| 89 | * MMCRA[SDAR_MODE] will be programmed as "0b01" for continous sampling |
| 90 | * mode and will be un-changed when setting MMCRA[63] (Marked events). |
| 91 | * |
Athira Rajeev | a64e697 | 2020-07-17 10:38:20 -0400 | [diff] [blame] | 92 | * Incase of Power9/power10: |
Madhavan Srinivasan | 78b4416 | 2017-02-20 19:29:03 +0530 | [diff] [blame] | 93 | * Marked event: MMCRA[SDAR_MODE] will be set to 0b00 ('No Updates'), |
| 94 | * or if group already have any marked events. |
Madhavan Srinivasan | 78b4416 | 2017-02-20 19:29:03 +0530 | [diff] [blame] | 95 | * For rest |
| 96 | * MMCRA[SDAR_MODE] will be set from event code. |
Madhavan Srinivasan | 20dd4c6 | 2017-07-11 16:27:49 +0530 | [diff] [blame] | 97 | * If sdar_mode from event is zero, default to 0b01. Hardware |
| 98 | * requires that we set a non-zero value. |
Madhavan Srinivasan | 78b4416 | 2017-02-20 19:29:03 +0530 | [diff] [blame] | 99 | */ |
| 100 | if (cpu_has_feature(CPU_FTR_ARCH_300)) { |
| 101 | if (is_event_marked(event) || (*mmcra & MMCRA_SAMPLE_ENABLE)) |
| 102 | *mmcra &= MMCRA_SDAR_MODE_NO_UPDATES; |
Athira Rajeev | a64e697 | 2020-07-17 10:38:20 -0400 | [diff] [blame] | 103 | else if (sdar_mod_val(event)) |
| 104 | *mmcra |= sdar_mod_val(event) << MMCRA_SDAR_MODE_SHIFT; |
Madhavan Srinivasan | 20dd4c6 | 2017-07-11 16:27:49 +0530 | [diff] [blame] | 105 | else |
Madhavan Srinivasan | 7aa345d | 2017-07-25 11:05:51 +0530 | [diff] [blame] | 106 | *mmcra |= MMCRA_SDAR_MODE_DCACHE; |
Madhavan Srinivasan | 78b4416 | 2017-02-20 19:29:03 +0530 | [diff] [blame] | 107 | } else |
| 108 | *mmcra |= MMCRA_SDAR_MODE_TLB; |
Madhavan Srinivasan | c7c3f56 | 2016-12-02 06:05:02 +0530 | [diff] [blame] | 109 | } |
| 110 | |
Kajol Jain | 82d2c16 | 2021-02-09 15:22:34 +0530 | [diff] [blame] | 111 | static u64 p10_thresh_cmp_val(u64 value) |
| 112 | { |
| 113 | int exp = 0; |
| 114 | u64 result = value; |
| 115 | |
| 116 | if (!value) |
| 117 | return value; |
| 118 | |
| 119 | /* |
| 120 | * Incase of P10, thresh_cmp value is not part of raw event code |
| 121 | * and provided via attr.config1 parameter. To program threshold in MMCRA, |
| 122 | * take a 18 bit number N and shift right 2 places and increment |
| 123 | * the exponent E by 1 until the upper 10 bits of N are zero. |
| 124 | * Write E to the threshold exponent and write the lower 8 bits of N |
| 125 | * to the threshold mantissa. |
| 126 | * The max threshold that can be written is 261120. |
| 127 | */ |
| 128 | if (cpu_has_feature(CPU_FTR_ARCH_31)) { |
| 129 | if (value > 261120) |
| 130 | value = 261120; |
| 131 | while ((64 - __builtin_clzl(value)) > 8) { |
| 132 | exp++; |
| 133 | value >>= 2; |
| 134 | } |
| 135 | |
| 136 | /* |
| 137 | * Note that it is invalid to write a mantissa with the |
| 138 | * upper 2 bits of mantissa being zero, unless the |
| 139 | * exponent is also zero. |
| 140 | */ |
| 141 | if (!(value & 0xC0) && exp) |
| 142 | result = 0; |
| 143 | else |
| 144 | result = (exp << 8) | value; |
| 145 | } |
| 146 | return result; |
| 147 | } |
| 148 | |
Madhavan Srinivasan | c7c3f56 | 2016-12-02 06:05:02 +0530 | [diff] [blame] | 149 | static u64 thresh_cmp_val(u64 value) |
| 150 | { |
Kajol Jain | 82d2c16 | 2021-02-09 15:22:34 +0530 | [diff] [blame] | 151 | if (cpu_has_feature(CPU_FTR_ARCH_31)) |
| 152 | value = p10_thresh_cmp_val(value); |
| 153 | |
| 154 | /* |
| 155 | * Since location of threshold compare bits in MMCRA |
| 156 | * is different for p8, using different shift value. |
| 157 | */ |
Nicholas Piggin | 2bf1071 | 2018-07-05 18:47:00 +1000 | [diff] [blame] | 158 | if (cpu_has_feature(CPU_FTR_ARCH_300)) |
Madhavan Srinivasan | c7c3f56 | 2016-12-02 06:05:02 +0530 | [diff] [blame] | 159 | return value << p9_MMCRA_THR_CMP_SHIFT; |
Kajol Jain | 82d2c16 | 2021-02-09 15:22:34 +0530 | [diff] [blame] | 160 | else |
| 161 | return value << MMCRA_THR_CMP_SHIFT; |
Madhavan Srinivasan | c7c3f56 | 2016-12-02 06:05:02 +0530 | [diff] [blame] | 162 | } |
| 163 | |
| 164 | static unsigned long combine_from_event(u64 event) |
| 165 | { |
Nicholas Piggin | 2bf1071 | 2018-07-05 18:47:00 +1000 | [diff] [blame] | 166 | if (cpu_has_feature(CPU_FTR_ARCH_300)) |
Madhavan Srinivasan | c7c3f56 | 2016-12-02 06:05:02 +0530 | [diff] [blame] | 167 | return p9_EVENT_COMBINE(event); |
| 168 | |
| 169 | return EVENT_COMBINE(event); |
| 170 | } |
| 171 | |
| 172 | static unsigned long combine_shift(unsigned long pmc) |
| 173 | { |
Nicholas Piggin | 2bf1071 | 2018-07-05 18:47:00 +1000 | [diff] [blame] | 174 | if (cpu_has_feature(CPU_FTR_ARCH_300)) |
Madhavan Srinivasan | c7c3f56 | 2016-12-02 06:05:02 +0530 | [diff] [blame] | 175 | return p9_MMCR1_COMBINE_SHIFT(pmc); |
| 176 | |
| 177 | return MMCR1_COMBINE_SHIFT(pmc); |
| 178 | } |
| 179 | |
Madhavan Srinivasan | 78a16d9 | 2017-02-13 17:02:54 +0530 | [diff] [blame] | 180 | static inline bool event_is_threshold(u64 event) |
| 181 | { |
| 182 | return (event >> EVENT_THR_SEL_SHIFT) & EVENT_THR_SEL_MASK; |
| 183 | } |
| 184 | |
| 185 | static bool is_thresh_cmp_valid(u64 event) |
| 186 | { |
| 187 | unsigned int cmp, exp; |
| 188 | |
Kajol Jain | 82d2c16 | 2021-02-09 15:22:34 +0530 | [diff] [blame] | 189 | if (cpu_has_feature(CPU_FTR_ARCH_31)) |
| 190 | return p10_thresh_cmp_val(event) != 0; |
| 191 | |
Madhavan Srinivasan | 78a16d9 | 2017-02-13 17:02:54 +0530 | [diff] [blame] | 192 | /* |
| 193 | * Check the mantissa upper two bits are not zero, unless the |
| 194 | * exponent is also zero. See the THRESH_CMP_MANTISSA doc. |
| 195 | */ |
Athira Rajeev | a64e697 | 2020-07-17 10:38:20 -0400 | [diff] [blame] | 196 | |
Madhavan Srinivasan | 78a16d9 | 2017-02-13 17:02:54 +0530 | [diff] [blame] | 197 | cmp = (event >> EVENT_THR_CMP_SHIFT) & EVENT_THR_CMP_MASK; |
| 198 | exp = cmp >> 7; |
| 199 | |
| 200 | if (exp && (cmp & 0x60) == 0) |
| 201 | return false; |
| 202 | |
| 203 | return true; |
| 204 | } |
| 205 | |
Madhavan Srinivasan | 2d46d48 | 2017-10-09 19:42:40 +0530 | [diff] [blame] | 206 | static unsigned int dc_ic_rld_quad_l1_sel(u64 event) |
| 207 | { |
| 208 | unsigned int cache; |
| 209 | |
| 210 | cache = (event >> EVENT_CACHE_SEL_SHIFT) & MMCR1_DC_IC_QUAL_MASK; |
| 211 | return cache; |
| 212 | } |
| 213 | |
Madhavan Srinivasan | 79e96f8 | 2017-04-11 07:21:06 +0530 | [diff] [blame] | 214 | static inline u64 isa207_find_source(u64 idx, u32 sub_idx) |
| 215 | { |
| 216 | u64 ret = PERF_MEM_NA; |
| 217 | |
| 218 | switch(idx) { |
| 219 | case 0: |
| 220 | /* Nothing to do */ |
| 221 | break; |
| 222 | case 1: |
Kajol Jain | 4a20ee1 | 2021-12-06 14:47:48 +0530 | [diff] [blame] | 223 | ret = PH(LVL, L1) | LEVEL(L1) | P(SNOOP, HIT); |
Madhavan Srinivasan | 79e96f8 | 2017-04-11 07:21:06 +0530 | [diff] [blame] | 224 | break; |
| 225 | case 2: |
Kajol Jain | 4a20ee1 | 2021-12-06 14:47:48 +0530 | [diff] [blame] | 226 | ret = PH(LVL, L2) | LEVEL(L2) | P(SNOOP, HIT); |
Madhavan Srinivasan | 79e96f8 | 2017-04-11 07:21:06 +0530 | [diff] [blame] | 227 | break; |
| 228 | case 3: |
Kajol Jain | 4a20ee1 | 2021-12-06 14:47:48 +0530 | [diff] [blame] | 229 | ret = PH(LVL, L3) | LEVEL(L3) | P(SNOOP, HIT); |
Madhavan Srinivasan | 79e96f8 | 2017-04-11 07:21:06 +0530 | [diff] [blame] | 230 | break; |
| 231 | case 4: |
Kajol Jain | 6ed05a8 | 2021-12-06 14:47:49 +0530 | [diff] [blame] | 232 | if (cpu_has_feature(CPU_FTR_ARCH_31)) { |
| 233 | ret = P(SNOOP, HIT); |
| 234 | |
| 235 | if (sub_idx == 1) |
| 236 | ret |= PH(LVL, LOC_RAM) | LEVEL(RAM); |
| 237 | else if (sub_idx == 2 || sub_idx == 3) |
| 238 | ret |= P(LVL, HIT) | LEVEL(PMEM); |
| 239 | else if (sub_idx == 4) |
| 240 | ret |= PH(LVL, REM_RAM1) | REM | LEVEL(RAM) | P(HOPS, 2); |
| 241 | else if (sub_idx == 5 || sub_idx == 7) |
| 242 | ret |= P(LVL, HIT) | LEVEL(PMEM) | REM; |
| 243 | else if (sub_idx == 6) |
| 244 | ret |= PH(LVL, REM_RAM2) | REM | LEVEL(RAM) | P(HOPS, 3); |
| 245 | } else { |
| 246 | if (sub_idx <= 1) |
| 247 | ret = PH(LVL, LOC_RAM); |
| 248 | else if (sub_idx > 1 && sub_idx <= 2) |
| 249 | ret = PH(LVL, REM_RAM1); |
| 250 | else |
| 251 | ret = PH(LVL, REM_RAM2); |
| 252 | ret |= P(SNOOP, HIT); |
| 253 | } |
Madhavan Srinivasan | 79e96f8 | 2017-04-11 07:21:06 +0530 | [diff] [blame] | 254 | break; |
| 255 | case 5: |
Kajol Jain | 26da4ab | 2021-10-06 19:36:54 +0530 | [diff] [blame] | 256 | if (cpu_has_feature(CPU_FTR_ARCH_31)) { |
| 257 | ret = REM | P(HOPS, 0); |
| 258 | |
| 259 | if (sub_idx == 0 || sub_idx == 4) |
| 260 | ret |= PH(LVL, L2) | LEVEL(L2) | P(SNOOP, HIT); |
| 261 | else if (sub_idx == 1 || sub_idx == 5) |
| 262 | ret |= PH(LVL, L2) | LEVEL(L2) | P(SNOOP, HITM); |
| 263 | else if (sub_idx == 2 || sub_idx == 6) |
| 264 | ret |= PH(LVL, L3) | LEVEL(L3) | P(SNOOP, HIT); |
| 265 | else if (sub_idx == 3 || sub_idx == 7) |
| 266 | ret |= PH(LVL, L3) | LEVEL(L3) | P(SNOOP, HITM); |
| 267 | } else { |
| 268 | if (sub_idx == 0) |
| 269 | ret = PH(LVL, L2) | LEVEL(L2) | REM | P(SNOOP, HIT) | P(HOPS, 0); |
| 270 | else if (sub_idx == 1) |
| 271 | ret = PH(LVL, L2) | LEVEL(L2) | REM | P(SNOOP, HITM) | P(HOPS, 0); |
| 272 | else if (sub_idx == 2 || sub_idx == 4) |
| 273 | ret = PH(LVL, L3) | LEVEL(L3) | REM | P(SNOOP, HIT) | P(HOPS, 0); |
| 274 | else if (sub_idx == 3 || sub_idx == 5) |
| 275 | ret = PH(LVL, L3) | LEVEL(L3) | REM | P(SNOOP, HITM) | P(HOPS, 0); |
| 276 | } |
Madhavan Srinivasan | 79e96f8 | 2017-04-11 07:21:06 +0530 | [diff] [blame] | 277 | break; |
| 278 | case 6: |
Kajol Jain | 6ed05a8 | 2021-12-06 14:47:49 +0530 | [diff] [blame] | 279 | if (cpu_has_feature(CPU_FTR_ARCH_31)) { |
| 280 | if (sub_idx == 0) |
| 281 | ret = PH(LVL, REM_CCE1) | LEVEL(ANY_CACHE) | REM | |
| 282 | P(SNOOP, HIT) | P(HOPS, 2); |
| 283 | else if (sub_idx == 1) |
| 284 | ret = PH(LVL, REM_CCE1) | LEVEL(ANY_CACHE) | REM | |
| 285 | P(SNOOP, HITM) | P(HOPS, 2); |
| 286 | else if (sub_idx == 2) |
| 287 | ret = PH(LVL, REM_CCE2) | LEVEL(ANY_CACHE) | REM | |
| 288 | P(SNOOP, HIT) | P(HOPS, 3); |
| 289 | else if (sub_idx == 3) |
| 290 | ret = PH(LVL, REM_CCE2) | LEVEL(ANY_CACHE) | REM | |
| 291 | P(SNOOP, HITM) | P(HOPS, 3); |
| 292 | } else { |
| 293 | ret = PH(LVL, REM_CCE2); |
| 294 | if (sub_idx == 0 || sub_idx == 2) |
| 295 | ret |= P(SNOOP, HIT); |
| 296 | else if (sub_idx == 1 || sub_idx == 3) |
| 297 | ret |= P(SNOOP, HITM); |
| 298 | } |
Madhavan Srinivasan | 79e96f8 | 2017-04-11 07:21:06 +0530 | [diff] [blame] | 299 | break; |
| 300 | case 7: |
| 301 | ret = PM(LVL, L1); |
| 302 | break; |
| 303 | } |
| 304 | |
| 305 | return ret; |
| 306 | } |
| 307 | |
| 308 | void isa207_get_mem_data_src(union perf_mem_data_src *dsrc, u32 flags, |
| 309 | struct pt_regs *regs) |
| 310 | { |
| 311 | u64 idx; |
| 312 | u32 sub_idx; |
| 313 | u64 sier; |
| 314 | u64 val; |
| 315 | |
| 316 | /* Skip if no SIER support */ |
| 317 | if (!(flags & PPMU_HAS_SIER)) { |
| 318 | dsrc->val = 0; |
| 319 | return; |
| 320 | } |
| 321 | |
| 322 | sier = mfspr(SPRN_SIER); |
| 323 | val = (sier & ISA207_SIER_TYPE_MASK) >> ISA207_SIER_TYPE_SHIFT; |
Athira Rajeev | b4ded42 | 2021-03-04 06:55:37 -0500 | [diff] [blame] | 324 | if (val != 1 && val != 2 && !(val == 7 && cpu_has_feature(CPU_FTR_ARCH_31))) |
| 325 | return; |
Madhavan Srinivasan | 79e96f8 | 2017-04-11 07:21:06 +0530 | [diff] [blame] | 326 | |
Athira Rajeev | b4ded42 | 2021-03-04 06:55:37 -0500 | [diff] [blame] | 327 | idx = (sier & ISA207_SIER_LDST_MASK) >> ISA207_SIER_LDST_SHIFT; |
| 328 | sub_idx = (sier & ISA207_SIER_DATA_SRC_MASK) >> ISA207_SIER_DATA_SRC_SHIFT; |
| 329 | |
| 330 | dsrc->val = isa207_find_source(idx, sub_idx); |
| 331 | if (val == 7) { |
| 332 | u64 mmcra; |
| 333 | u32 op_type; |
| 334 | |
| 335 | /* |
| 336 | * Type 0b111 denotes either larx or stcx instruction. Use the |
| 337 | * MMCRA sampling bits [57:59] along with the type value |
| 338 | * to determine the exact instruction type. If the sampling |
| 339 | * criteria is neither load or store, set the type as default |
| 340 | * to NA. |
| 341 | */ |
| 342 | mmcra = mfspr(SPRN_MMCRA); |
| 343 | |
| 344 | op_type = (mmcra >> MMCRA_SAMP_ELIG_SHIFT) & MMCRA_SAMP_ELIG_MASK; |
| 345 | switch (op_type) { |
| 346 | case 5: |
| 347 | dsrc->val |= P(OP, LOAD); |
| 348 | break; |
| 349 | case 7: |
| 350 | dsrc->val |= P(OP, STORE); |
| 351 | break; |
| 352 | default: |
| 353 | dsrc->val |= P(OP, NA); |
| 354 | break; |
| 355 | } |
| 356 | } else { |
Madhavan Srinivasan | 79e96f8 | 2017-04-11 07:21:06 +0530 | [diff] [blame] | 357 | dsrc->val |= (val == 1) ? P(OP, LOAD) : P(OP, STORE); |
| 358 | } |
| 359 | } |
| 360 | |
Athira Rajeev | af31fd0 | 2021-03-22 10:57:23 -0400 | [diff] [blame] | 361 | void isa207_get_mem_weight(u64 *weight, u64 type) |
Madhavan Srinivasan | 170a315 | 2017-04-11 07:21:07 +0530 | [diff] [blame] | 362 | { |
Athira Rajeev | af31fd0 | 2021-03-22 10:57:23 -0400 | [diff] [blame] | 363 | union perf_sample_weight *weight_fields; |
| 364 | u64 weight_lat; |
Madhavan Srinivasan | 170a315 | 2017-04-11 07:21:07 +0530 | [diff] [blame] | 365 | u64 mmcra = mfspr(SPRN_MMCRA); |
| 366 | u64 exp = MMCRA_THR_CTR_EXP(mmcra); |
| 367 | u64 mantissa = MMCRA_THR_CTR_MANT(mmcra); |
Madhavan Srinivasan | 17cfccc | 2018-12-09 14:48:15 +0530 | [diff] [blame] | 368 | u64 sier = mfspr(SPRN_SIER); |
| 369 | u64 val = (sier & ISA207_SIER_TYPE_MASK) >> ISA207_SIER_TYPE_SHIFT; |
Madhavan Srinivasan | 170a315 | 2017-04-11 07:21:07 +0530 | [diff] [blame] | 370 | |
Madhavan Srinivasan | ef0e3b6 | 2020-12-15 03:56:18 -0500 | [diff] [blame] | 371 | if (cpu_has_feature(CPU_FTR_ARCH_31)) |
| 372 | mantissa = P10_MMCRA_THR_CTR_MANT(mmcra); |
| 373 | |
Athira Rajeev | b4ded42 | 2021-03-04 06:55:37 -0500 | [diff] [blame] | 374 | if (val == 0 || (val == 7 && !cpu_has_feature(CPU_FTR_ARCH_31))) |
Athira Rajeev | af31fd0 | 2021-03-22 10:57:23 -0400 | [diff] [blame] | 375 | weight_lat = 0; |
Madhavan Srinivasan | 17cfccc | 2018-12-09 14:48:15 +0530 | [diff] [blame] | 376 | else |
Athira Rajeev | af31fd0 | 2021-03-22 10:57:23 -0400 | [diff] [blame] | 377 | weight_lat = mantissa << (2 * exp); |
| 378 | |
| 379 | /* |
| 380 | * Use 64 bit weight field (full) if sample type is |
| 381 | * WEIGHT. |
| 382 | * |
| 383 | * if sample type is WEIGHT_STRUCT: |
| 384 | * - store memory latency in the lower 32 bits. |
| 385 | * - For ISA v3.1, use remaining two 16 bit fields of |
| 386 | * perf_sample_weight to store cycle counter values |
| 387 | * from sier2. |
| 388 | */ |
| 389 | weight_fields = (union perf_sample_weight *)weight; |
| 390 | if (type & PERF_SAMPLE_WEIGHT) |
| 391 | weight_fields->full = weight_lat; |
| 392 | else { |
| 393 | weight_fields->var1_dw = (u32)weight_lat; |
| 394 | if (cpu_has_feature(CPU_FTR_ARCH_31)) { |
| 395 | weight_fields->var2_w = P10_SIER2_FINISH_CYC(mfspr(SPRN_SIER2)); |
| 396 | weight_fields->var3_w = P10_SIER2_DISPATCH_CYC(mfspr(SPRN_SIER2)); |
| 397 | } |
| 398 | } |
Madhavan Srinivasan | 170a315 | 2017-04-11 07:21:07 +0530 | [diff] [blame] | 399 | } |
Madhavan Srinivasan | 79e96f8 | 2017-04-11 07:21:06 +0530 | [diff] [blame] | 400 | |
Kajol Jain | 82d2c16 | 2021-02-09 15:22:34 +0530 | [diff] [blame] | 401 | int isa207_get_constraint(u64 event, unsigned long *maskp, unsigned long *valp, u64 event_config1) |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 402 | { |
| 403 | unsigned int unit, pmc, cache, ebb; |
| 404 | unsigned long mask, value; |
| 405 | |
| 406 | mask = value = 0; |
| 407 | |
Madhavan Srinivasan | c7c3f56 | 2016-12-02 06:05:02 +0530 | [diff] [blame] | 408 | if (!is_event_valid(event)) |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 409 | return -1; |
| 410 | |
| 411 | pmc = (event >> EVENT_PMC_SHIFT) & EVENT_PMC_MASK; |
| 412 | unit = (event >> EVENT_UNIT_SHIFT) & EVENT_UNIT_MASK; |
Athira Rajeev | a64e697 | 2020-07-17 10:38:20 -0400 | [diff] [blame] | 413 | if (cpu_has_feature(CPU_FTR_ARCH_31)) |
| 414 | cache = (event >> EVENT_CACHE_SEL_SHIFT) & |
| 415 | p10_EVENT_CACHE_SEL_MASK; |
| 416 | else |
| 417 | cache = (event >> EVENT_CACHE_SEL_SHIFT) & |
| 418 | EVENT_CACHE_SEL_MASK; |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 419 | ebb = (event >> EVENT_EBB_SHIFT) & EVENT_EBB_MASK; |
| 420 | |
| 421 | if (pmc) { |
| 422 | u64 base_event; |
| 423 | |
| 424 | if (pmc > 6) |
| 425 | return -1; |
| 426 | |
| 427 | /* Ignore Linux defined bits when checking event below */ |
| 428 | base_event = event & ~EVENT_LINUX_MASK; |
| 429 | |
| 430 | if (pmc >= 5 && base_event != 0x500fa && |
| 431 | base_event != 0x600f4) |
| 432 | return -1; |
| 433 | |
| 434 | mask |= CNST_PMC_MASK(pmc); |
| 435 | value |= CNST_PMC_VAL(pmc); |
Athira Rajeev | 3b6c3ad | 2020-09-21 03:10:04 -0400 | [diff] [blame] | 436 | |
| 437 | /* |
| 438 | * PMC5 and PMC6 are used to count cycles and instructions and |
| 439 | * they do not support most of the constraint bits. Add a check |
| 440 | * to exclude PMC5/6 from most of the constraints except for |
| 441 | * EBB/BHRB. |
| 442 | */ |
| 443 | if (pmc >= 5) |
| 444 | goto ebb_bhrb; |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 445 | } |
| 446 | |
| 447 | if (pmc <= 4) { |
| 448 | /* |
| 449 | * Add to number of counters in use. Note this includes events with |
| 450 | * a PMC of 0 - they still need a PMC, it's just assigned later. |
| 451 | * Don't count events on PMC 5 & 6, there is only one valid event |
| 452 | * on each of those counters, and they are handled above. |
| 453 | */ |
| 454 | mask |= CNST_NC_MASK; |
| 455 | value |= CNST_NC_VAL; |
| 456 | } |
| 457 | |
| 458 | if (unit >= 6 && unit <= 9) { |
Athira Rajeev | e924be7 | 2020-11-26 11:54:39 -0500 | [diff] [blame] | 459 | if (cpu_has_feature(CPU_FTR_ARCH_31)) { |
| 460 | if (unit == 6) { |
| 461 | mask |= CNST_L2L3_GROUP_MASK; |
| 462 | value |= CNST_L2L3_GROUP_VAL(event >> p10_L2L3_EVENT_SHIFT); |
| 463 | } |
Athira Rajeev | a64e697 | 2020-07-17 10:38:20 -0400 | [diff] [blame] | 464 | } else if (cpu_has_feature(CPU_FTR_ARCH_300)) { |
Madhavan Srinivasan | 5902913 | 2018-06-10 19:57:01 +0530 | [diff] [blame] | 465 | mask |= CNST_CACHE_GROUP_MASK; |
| 466 | value |= CNST_CACHE_GROUP_VAL(event & 0xff); |
| 467 | |
| 468 | mask |= CNST_CACHE_PMC4_MASK; |
| 469 | if (pmc == 4) |
| 470 | value |= CNST_CACHE_PMC4_VAL; |
| 471 | } else if (cache & 0x7) { |
| 472 | /* |
| 473 | * L2/L3 events contain a cache selector field, which is |
| 474 | * supposed to be programmed into MMCRC. However MMCRC is only |
| 475 | * HV writable, and there is no API for guest kernels to modify |
| 476 | * it. The solution is for the hypervisor to initialise the |
| 477 | * field to zeroes, and for us to only ever allow events that |
| 478 | * have a cache selector of zero. The bank selector (bit 3) is |
| 479 | * irrelevant, as long as the rest of the value is 0. |
| 480 | */ |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 481 | return -1; |
Madhavan Srinivasan | 5902913 | 2018-06-10 19:57:01 +0530 | [diff] [blame] | 482 | } |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 483 | |
Madhavan Srinivasan | 2d46d48 | 2017-10-09 19:42:40 +0530 | [diff] [blame] | 484 | } else if (cpu_has_feature(CPU_FTR_ARCH_300) || (event & EVENT_IS_L1)) { |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 485 | mask |= CNST_L1_QUAL_MASK; |
| 486 | value |= CNST_L1_QUAL_VAL(cache); |
| 487 | } |
| 488 | |
Athira Rajeev | d3afd28 | 2020-11-26 11:54:38 -0500 | [diff] [blame] | 489 | if (cpu_has_feature(CPU_FTR_ARCH_31)) { |
| 490 | mask |= CNST_RADIX_SCOPE_GROUP_MASK; |
| 491 | value |= CNST_RADIX_SCOPE_GROUP_VAL(event >> p10_EVENT_RADIX_SCOPE_QUAL_SHIFT); |
| 492 | } |
| 493 | |
Madhavan Srinivasan | 78b4416 | 2017-02-20 19:29:03 +0530 | [diff] [blame] | 494 | if (is_event_marked(event)) { |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 495 | mask |= CNST_SAMPLE_MASK; |
| 496 | value |= CNST_SAMPLE_VAL(event >> EVENT_SAMPLE_SHIFT); |
| 497 | } |
| 498 | |
Athira Rajeev | 0263bbb | 2020-11-26 11:54:40 -0500 | [diff] [blame] | 499 | if (cpu_has_feature(CPU_FTR_ARCH_31)) { |
Kajol Jain | 82d2c16 | 2021-02-09 15:22:34 +0530 | [diff] [blame] | 500 | if (event_is_threshold(event) && is_thresh_cmp_valid(event_config1)) { |
Athira Rajeev | 0263bbb | 2020-11-26 11:54:40 -0500 | [diff] [blame] | 501 | mask |= CNST_THRESH_CTL_SEL_MASK; |
| 502 | value |= CNST_THRESH_CTL_SEL_VAL(event >> EVENT_THRESH_SHIFT); |
Kajol Jain | 82d2c16 | 2021-02-09 15:22:34 +0530 | [diff] [blame] | 503 | mask |= p10_CNST_THRESH_CMP_MASK; |
| 504 | value |= p10_CNST_THRESH_CMP_VAL(p10_thresh_cmp_val(event_config1)); |
Athira Rajeev | 0263bbb | 2020-11-26 11:54:40 -0500 | [diff] [blame] | 505 | } |
| 506 | } else if (cpu_has_feature(CPU_FTR_ARCH_300)) { |
Madhavan Srinivasan | 78a16d9 | 2017-02-13 17:02:54 +0530 | [diff] [blame] | 507 | if (event_is_threshold(event) && is_thresh_cmp_valid(event)) { |
| 508 | mask |= CNST_THRESH_MASK; |
| 509 | value |= CNST_THRESH_VAL(event >> EVENT_THRESH_SHIFT); |
| 510 | } |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 511 | } else { |
| 512 | /* |
Madhavan Srinivasan | 78a16d9 | 2017-02-13 17:02:54 +0530 | [diff] [blame] | 513 | * Special case for PM_MRK_FAB_RSP_MATCH and PM_MRK_FAB_RSP_MATCH_CYC, |
| 514 | * the threshold control bits are used for the match value. |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 515 | */ |
Madhavan Srinivasan | 78a16d9 | 2017-02-13 17:02:54 +0530 | [diff] [blame] | 516 | if (event_is_fab_match(event)) { |
| 517 | mask |= CNST_FAB_MATCH_MASK; |
| 518 | value |= CNST_FAB_MATCH_VAL(event >> EVENT_THR_CTL_SHIFT); |
| 519 | } else { |
| 520 | if (!is_thresh_cmp_valid(event)) |
| 521 | return -1; |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 522 | |
Madhavan Srinivasan | 78a16d9 | 2017-02-13 17:02:54 +0530 | [diff] [blame] | 523 | mask |= CNST_THRESH_MASK; |
| 524 | value |= CNST_THRESH_VAL(event >> EVENT_THRESH_SHIFT); |
| 525 | } |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 526 | } |
| 527 | |
Athira Rajeev | 3b6c3ad | 2020-09-21 03:10:04 -0400 | [diff] [blame] | 528 | ebb_bhrb: |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 529 | if (!pmc && ebb) |
| 530 | /* EBB events must specify the PMC */ |
| 531 | return -1; |
| 532 | |
| 533 | if (event & EVENT_WANTS_BHRB) { |
| 534 | if (!ebb) |
| 535 | /* Only EBB events can request BHRB */ |
| 536 | return -1; |
| 537 | |
| 538 | mask |= CNST_IFM_MASK; |
| 539 | value |= CNST_IFM_VAL(event >> EVENT_IFM_SHIFT); |
| 540 | } |
| 541 | |
| 542 | /* |
| 543 | * All events must agree on EBB, either all request it or none. |
| 544 | * EBB events are pinned & exclusive, so this should never actually |
| 545 | * hit, but we leave it as a fallback in case. |
| 546 | */ |
Athira Rajeev | 10f8f96 | 2021-04-06 12:16:01 -0400 | [diff] [blame] | 547 | mask |= CNST_EBB_MASK; |
| 548 | value |= CNST_EBB_VAL(ebb); |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 549 | |
| 550 | *maskp = mask; |
| 551 | *valp = value; |
| 552 | |
| 553 | return 0; |
| 554 | } |
| 555 | |
| 556 | int isa207_compute_mmcr(u64 event[], int n_ev, |
Athira Rajeev | 78d7681 | 2020-07-17 10:38:13 -0400 | [diff] [blame] | 557 | unsigned int hwc[], struct mmcr_regs *mmcr, |
Kajol Jain | 82d2c16 | 2021-02-09 15:22:34 +0530 | [diff] [blame] | 558 | struct perf_event *pevents[], u32 flags) |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 559 | { |
| 560 | unsigned long mmcra, mmcr1, mmcr2, unit, combine, psel, cache, val; |
Athira Rajeev | a64e697 | 2020-07-17 10:38:20 -0400 | [diff] [blame] | 561 | unsigned long mmcr3; |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 562 | unsigned int pmc, pmc_inuse; |
| 563 | int i; |
| 564 | |
| 565 | pmc_inuse = 0; |
| 566 | |
| 567 | /* First pass to count resource use */ |
| 568 | for (i = 0; i < n_ev; ++i) { |
| 569 | pmc = (event[i] >> EVENT_PMC_SHIFT) & EVENT_PMC_MASK; |
| 570 | if (pmc) |
| 571 | pmc_inuse |= 1 << pmc; |
| 572 | } |
| 573 | |
Athira Rajeev | a64e697 | 2020-07-17 10:38:20 -0400 | [diff] [blame] | 574 | mmcra = mmcr1 = mmcr2 = mmcr3 = 0; |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 575 | |
Athira Rajeev | 1cade52 | 2020-07-17 10:38:23 -0400 | [diff] [blame] | 576 | /* |
| 577 | * Disable bhrb unless explicitly requested |
| 578 | * by setting MMCRA (BHRBRD) bit. |
| 579 | */ |
| 580 | if (cpu_has_feature(CPU_FTR_ARCH_31)) |
| 581 | mmcra |= MMCRA_BHRB_DISABLE; |
| 582 | |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 583 | /* Second pass: assign PMCs, set all MMCR1 fields */ |
| 584 | for (i = 0; i < n_ev; ++i) { |
| 585 | pmc = (event[i] >> EVENT_PMC_SHIFT) & EVENT_PMC_MASK; |
| 586 | unit = (event[i] >> EVENT_UNIT_SHIFT) & EVENT_UNIT_MASK; |
Madhavan Srinivasan | c7c3f56 | 2016-12-02 06:05:02 +0530 | [diff] [blame] | 587 | combine = combine_from_event(event[i]); |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 588 | psel = event[i] & EVENT_PSEL_MASK; |
| 589 | |
| 590 | if (!pmc) { |
| 591 | for (pmc = 1; pmc <= 4; ++pmc) { |
| 592 | if (!(pmc_inuse & (1 << pmc))) |
| 593 | break; |
| 594 | } |
| 595 | |
| 596 | pmc_inuse |= 1 << pmc; |
| 597 | } |
| 598 | |
| 599 | if (pmc <= 4) { |
| 600 | mmcr1 |= unit << MMCR1_UNIT_SHIFT(pmc); |
Madhavan Srinivasan | c7c3f56 | 2016-12-02 06:05:02 +0530 | [diff] [blame] | 601 | mmcr1 |= combine << combine_shift(pmc); |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 602 | mmcr1 |= psel << MMCR1_PMCSEL_SHIFT(pmc); |
| 603 | } |
| 604 | |
Madhavan Srinivasan | c7c3f56 | 2016-12-02 06:05:02 +0530 | [diff] [blame] | 605 | /* In continuous sampling mode, update SDAR on TLB miss */ |
Madhavan Srinivasan | 78b4416 | 2017-02-20 19:29:03 +0530 | [diff] [blame] | 606 | mmcra_sdar_mode(event[i], &mmcra); |
Madhavan Srinivasan | c7c3f56 | 2016-12-02 06:05:02 +0530 | [diff] [blame] | 607 | |
Madhavan Srinivasan | 2d46d48 | 2017-10-09 19:42:40 +0530 | [diff] [blame] | 608 | if (cpu_has_feature(CPU_FTR_ARCH_300)) { |
| 609 | cache = dc_ic_rld_quad_l1_sel(event[i]); |
| 610 | mmcr1 |= (cache) << MMCR1_DC_IC_QUAL_SHIFT; |
| 611 | } else { |
| 612 | if (event[i] & EVENT_IS_L1) { |
| 613 | cache = dc_ic_rld_quad_l1_sel(event[i]); |
| 614 | mmcr1 |= (cache) << MMCR1_DC_IC_QUAL_SHIFT; |
| 615 | } |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 616 | } |
| 617 | |
Athira Rajeev | d3afd28 | 2020-11-26 11:54:38 -0500 | [diff] [blame] | 618 | /* Set RADIX_SCOPE_QUAL bit */ |
| 619 | if (cpu_has_feature(CPU_FTR_ARCH_31)) { |
| 620 | val = (event[i] >> p10_EVENT_RADIX_SCOPE_QUAL_SHIFT) & |
| 621 | p10_EVENT_RADIX_SCOPE_QUAL_MASK; |
| 622 | mmcr1 |= val << p10_MMCR1_RADIX_SCOPE_QUAL_SHIFT; |
| 623 | } |
| 624 | |
Madhavan Srinivasan | 78b4416 | 2017-02-20 19:29:03 +0530 | [diff] [blame] | 625 | if (is_event_marked(event[i])) { |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 626 | mmcra |= MMCRA_SAMPLE_ENABLE; |
| 627 | |
| 628 | val = (event[i] >> EVENT_SAMPLE_SHIFT) & EVENT_SAMPLE_MASK; |
| 629 | if (val) { |
| 630 | mmcra |= (val & 3) << MMCRA_SAMP_MODE_SHIFT; |
| 631 | mmcra |= (val >> 2) << MMCRA_SAMP_ELIG_SHIFT; |
| 632 | } |
| 633 | } |
| 634 | |
| 635 | /* |
| 636 | * PM_MRK_FAB_RSP_MATCH and PM_MRK_FAB_RSP_MATCH_CYC, |
| 637 | * the threshold bits are used for the match value. |
| 638 | */ |
Madhavan Srinivasan | 78a16d9 | 2017-02-13 17:02:54 +0530 | [diff] [blame] | 639 | if (!cpu_has_feature(CPU_FTR_ARCH_300) && event_is_fab_match(event[i])) { |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 640 | mmcr1 |= ((event[i] >> EVENT_THR_CTL_SHIFT) & |
| 641 | EVENT_THR_CTL_MASK) << MMCR1_FAB_SHIFT; |
| 642 | } else { |
| 643 | val = (event[i] >> EVENT_THR_CTL_SHIFT) & EVENT_THR_CTL_MASK; |
| 644 | mmcra |= val << MMCRA_THR_CTL_SHIFT; |
| 645 | val = (event[i] >> EVENT_THR_SEL_SHIFT) & EVENT_THR_SEL_MASK; |
| 646 | mmcra |= val << MMCRA_THR_SEL_SHIFT; |
Athira Rajeev | a64e697 | 2020-07-17 10:38:20 -0400 | [diff] [blame] | 647 | if (!cpu_has_feature(CPU_FTR_ARCH_31)) { |
| 648 | val = (event[i] >> EVENT_THR_CMP_SHIFT) & |
| 649 | EVENT_THR_CMP_MASK; |
| 650 | mmcra |= thresh_cmp_val(val); |
Kajol Jain | 82d2c16 | 2021-02-09 15:22:34 +0530 | [diff] [blame] | 651 | } else if (flags & PPMU_HAS_ATTR_CONFIG1) { |
| 652 | val = (pevents[i]->attr.config1 >> p10_EVENT_THR_CMP_SHIFT) & |
| 653 | p10_EVENT_THR_CMP_MASK; |
| 654 | mmcra |= thresh_cmp_val(val); |
Athira Rajeev | a64e697 | 2020-07-17 10:38:20 -0400 | [diff] [blame] | 655 | } |
| 656 | } |
| 657 | |
| 658 | if (cpu_has_feature(CPU_FTR_ARCH_31) && (unit == 6)) { |
| 659 | val = (event[i] >> p10_L2L3_EVENT_SHIFT) & |
| 660 | p10_EVENT_L2L3_SEL_MASK; |
| 661 | mmcr2 |= val << p10_L2L3_SEL_SHIFT; |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 662 | } |
| 663 | |
| 664 | if (event[i] & EVENT_WANTS_BHRB) { |
| 665 | val = (event[i] >> EVENT_IFM_SHIFT) & EVENT_IFM_MASK; |
| 666 | mmcra |= val << MMCRA_IFM_SHIFT; |
| 667 | } |
| 668 | |
Athira Rajeev | 1cade52 | 2020-07-17 10:38:23 -0400 | [diff] [blame] | 669 | /* set MMCRA (BHRBRD) to 0 if there is user request for BHRB */ |
| 670 | if (cpu_has_feature(CPU_FTR_ARCH_31) && |
| 671 | (has_branch_stack(pevents[i]) || (event[i] & EVENT_WANTS_BHRB))) |
| 672 | mmcra &= ~MMCRA_BHRB_DISABLE; |
| 673 | |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 674 | if (pevents[i]->attr.exclude_user) |
| 675 | mmcr2 |= MMCR2_FCP(pmc); |
| 676 | |
| 677 | if (pevents[i]->attr.exclude_hv) |
| 678 | mmcr2 |= MMCR2_FCH(pmc); |
| 679 | |
| 680 | if (pevents[i]->attr.exclude_kernel) { |
| 681 | if (cpu_has_feature(CPU_FTR_HVMODE)) |
| 682 | mmcr2 |= MMCR2_FCH(pmc); |
| 683 | else |
| 684 | mmcr2 |= MMCR2_FCS(pmc); |
| 685 | } |
| 686 | |
Athira Rajeev | a64e697 | 2020-07-17 10:38:20 -0400 | [diff] [blame] | 687 | if (cpu_has_feature(CPU_FTR_ARCH_31)) { |
| 688 | if (pmc <= 4) { |
| 689 | val = (event[i] >> p10_EVENT_MMCR3_SHIFT) & |
| 690 | p10_EVENT_MMCR3_MASK; |
| 691 | mmcr3 |= val << MMCR3_SHIFT(pmc); |
| 692 | } |
| 693 | } |
| 694 | |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 695 | hwc[i] = pmc - 1; |
| 696 | } |
| 697 | |
| 698 | /* Return MMCRx values */ |
Athira Rajeev | 78d7681 | 2020-07-17 10:38:13 -0400 | [diff] [blame] | 699 | mmcr->mmcr0 = 0; |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 700 | |
| 701 | /* pmc_inuse is 1-based */ |
| 702 | if (pmc_inuse & 2) |
Athira Rajeev | 78d7681 | 2020-07-17 10:38:13 -0400 | [diff] [blame] | 703 | mmcr->mmcr0 = MMCR0_PMC1CE; |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 704 | |
| 705 | if (pmc_inuse & 0x7c) |
Athira Rajeev | 78d7681 | 2020-07-17 10:38:13 -0400 | [diff] [blame] | 706 | mmcr->mmcr0 |= MMCR0_PMCjCE; |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 707 | |
| 708 | /* If we're not using PMC 5 or 6, freeze them */ |
| 709 | if (!(pmc_inuse & 0x60)) |
Athira Rajeev | 78d7681 | 2020-07-17 10:38:13 -0400 | [diff] [blame] | 710 | mmcr->mmcr0 |= MMCR0_FC56; |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 711 | |
Athira Rajeev | 91668ab | 2020-11-26 11:54:44 -0500 | [diff] [blame] | 712 | /* |
| 713 | * Set mmcr0 (PMCCEXT) for p10 which |
| 714 | * will restrict access to group B registers |
| 715 | * when MMCR0 PMCC=0b00. |
| 716 | */ |
| 717 | if (cpu_has_feature(CPU_FTR_ARCH_31)) |
| 718 | mmcr->mmcr0 |= MMCR0_PMCCEXT; |
| 719 | |
Athira Rajeev | 78d7681 | 2020-07-17 10:38:13 -0400 | [diff] [blame] | 720 | mmcr->mmcr1 = mmcr1; |
| 721 | mmcr->mmcra = mmcra; |
| 722 | mmcr->mmcr2 = mmcr2; |
Athira Rajeev | a64e697 | 2020-07-17 10:38:20 -0400 | [diff] [blame] | 723 | mmcr->mmcr3 = mmcr3; |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 724 | |
| 725 | return 0; |
| 726 | } |
| 727 | |
Athira Rajeev | 78d7681 | 2020-07-17 10:38:13 -0400 | [diff] [blame] | 728 | void isa207_disable_pmc(unsigned int pmc, struct mmcr_regs *mmcr) |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 729 | { |
| 730 | if (pmc <= 3) |
Athira Rajeev | 78d7681 | 2020-07-17 10:38:13 -0400 | [diff] [blame] | 731 | mmcr->mmcr1 &= ~(0xffUL << MMCR1_PMCSEL_SHIFT(pmc + 1)); |
Madhavan Srinivasan | 7ffd948 | 2016-06-26 23:07:05 +0530 | [diff] [blame] | 732 | } |
Madhavan Srinivasan | efe881a | 2017-02-12 22:33:10 +0530 | [diff] [blame] | 733 | |
| 734 | static int find_alternative(u64 event, const unsigned int ev_alt[][MAX_ALT], int size) |
| 735 | { |
| 736 | int i, j; |
| 737 | |
| 738 | for (i = 0; i < size; ++i) { |
| 739 | if (event < ev_alt[i][0]) |
| 740 | break; |
| 741 | |
| 742 | for (j = 0; j < MAX_ALT && ev_alt[i][j]; ++j) |
| 743 | if (event == ev_alt[i][j]) |
| 744 | return i; |
| 745 | } |
| 746 | |
| 747 | return -1; |
| 748 | } |
| 749 | |
Madhavan Srinivasan | 70a7e72 | 2017-07-31 13:32:41 +0530 | [diff] [blame] | 750 | int isa207_get_alternatives(u64 event, u64 alt[], int size, unsigned int flags, |
| 751 | const unsigned int ev_alt[][MAX_ALT]) |
Madhavan Srinivasan | efe881a | 2017-02-12 22:33:10 +0530 | [diff] [blame] | 752 | { |
| 753 | int i, j, num_alt = 0; |
| 754 | u64 alt_event; |
| 755 | |
| 756 | alt[num_alt++] = event; |
| 757 | i = find_alternative(event, ev_alt, size); |
| 758 | if (i >= 0) { |
| 759 | /* Filter out the original event, it's already in alt[0] */ |
| 760 | for (j = 0; j < MAX_ALT; ++j) { |
| 761 | alt_event = ev_alt[i][j]; |
| 762 | if (alt_event && alt_event != event) |
| 763 | alt[num_alt++] = alt_event; |
| 764 | } |
| 765 | } |
| 766 | |
Madhavan Srinivasan | 70a7e72 | 2017-07-31 13:32:41 +0530 | [diff] [blame] | 767 | if (flags & PPMU_ONLY_COUNT_RUN) { |
| 768 | /* |
| 769 | * We're only counting in RUN state, so PM_CYC is equivalent to |
| 770 | * PM_RUN_CYC and PM_INST_CMPL === PM_RUN_INST_CMPL. |
| 771 | */ |
| 772 | j = num_alt; |
| 773 | for (i = 0; i < num_alt; ++i) { |
| 774 | switch (alt[i]) { |
| 775 | case 0x1e: /* PMC_CYC */ |
| 776 | alt[j++] = 0x600f4; /* PM_RUN_CYC */ |
| 777 | break; |
| 778 | case 0x600f4: |
| 779 | alt[j++] = 0x1e; |
| 780 | break; |
| 781 | case 0x2: /* PM_INST_CMPL */ |
| 782 | alt[j++] = 0x500fa; /* PM_RUN_INST_CMPL */ |
| 783 | break; |
| 784 | case 0x500fa: |
| 785 | alt[j++] = 0x2; |
| 786 | break; |
| 787 | } |
| 788 | } |
| 789 | num_alt = j; |
| 790 | } |
| 791 | |
Madhavan Srinivasan | efe881a | 2017-02-12 22:33:10 +0530 | [diff] [blame] | 792 | return num_alt; |
| 793 | } |
Madhavan Srinivasan | d8a1d6c | 2021-04-08 13:15:04 +0530 | [diff] [blame] | 794 | |
| 795 | int isa3XX_check_attr_config(struct perf_event *ev) |
| 796 | { |
| 797 | u64 val, sample_mode; |
| 798 | u64 event = ev->attr.config; |
| 799 | |
| 800 | val = (event >> EVENT_SAMPLE_SHIFT) & EVENT_SAMPLE_MASK; |
| 801 | sample_mode = val & 0x3; |
| 802 | |
| 803 | /* |
| 804 | * MMCRA[61:62] is Random Sampling Mode (SM). |
| 805 | * value of 0b11 is reserved. |
| 806 | */ |
| 807 | if (sample_mode == 0x3) |
| 808 | return -EINVAL; |
| 809 | |
| 810 | /* |
| 811 | * Check for all reserved value |
| 812 | * Source: Performance Monitoring Unit User Guide |
| 813 | */ |
| 814 | switch (val) { |
| 815 | case 0x5: |
| 816 | case 0x9: |
| 817 | case 0xD: |
| 818 | case 0x19: |
| 819 | case 0x1D: |
| 820 | case 0x1A: |
| 821 | case 0x1E: |
| 822 | return -EINVAL; |
| 823 | } |
| 824 | |
| 825 | /* |
| 826 | * MMCRA[48:51]/[52:55]) Threshold Start/Stop |
| 827 | * Events Selection. |
| 828 | * 0b11110000/0b00001111 is reserved. |
| 829 | */ |
| 830 | val = (event >> EVENT_THR_CTL_SHIFT) & EVENT_THR_CTL_MASK; |
| 831 | if (((val & 0xF0) == 0xF0) || ((val & 0xF) == 0xF)) |
| 832 | return -EINVAL; |
| 833 | |
| 834 | return 0; |
| 835 | } |