blob: ab964db855ac8119d7aa4e86ae44f7f0ebdfbbf2 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001// SPDX-License-Identifier: GPL-2.0
Adrian Hunter045f8cd82013-08-27 11:23:13 +03002#include <stdbool.h>
Arnaldo Carvalho de Melofd20e812017-04-17 15:23:08 -03003#include <inttypes.h>
Arnaldo Carvalho de Melo215a0d32019-07-04 11:21:24 -03004#include <stdlib.h>
Arnaldo Carvalho de Melo8520a982019-08-29 16:18:59 -03005#include <string.h>
Arnaldo Carvalho de Melo2f2ae232019-01-27 14:08:22 +01006#include <linux/bitops.h>
Arnaldo Carvalho de Melo877a7a12017-04-17 11:39:06 -03007#include <linux/kernel.h>
Borislav Petkovd944c4e2014-04-25 21:31:02 +02008#include <linux/types.h>
Adrian Hunter045f8cd82013-08-27 11:23:13 +03009
Arnaldo Carvalho de Melod3300a32019-08-30 15:09:54 -030010#include "map_symbol.h"
Arnaldo Carvalho de Melo2f2ae232019-01-27 14:08:22 +010011#include "branch.h"
Adrian Hunter045f8cd82013-08-27 11:23:13 +030012#include "event.h"
13#include "evsel.h"
Jiri Olsa84f5d362014-07-14 23:46:48 +020014#include "debug.h"
Arnaldo Carvalho de Meloea49e012019-09-18 11:36:13 -030015#include "util/synthetic-events.h"
Adrian Hunter045f8cd82013-08-27 11:23:13 +030016
17#include "tests.h"
18
19#define COMP(m) do { \
20 if (s1->m != s2->m) { \
21 pr_debug("Samples differ at '"#m"'\n"); \
22 return false; \
23 } \
24} while (0)
25
26#define MCOMP(m) do { \
27 if (memcmp(&s1->m, &s2->m, sizeof(s1->m))) { \
28 pr_debug("Samples differ at '"#m"'\n"); \
29 return false; \
30 } \
31} while (0)
32
33static bool samples_same(const struct perf_sample *s1,
Jiri Olsa352ea452014-01-07 13:47:25 +010034 const struct perf_sample *s2,
35 u64 type, u64 read_format)
Adrian Hunter045f8cd82013-08-27 11:23:13 +030036{
37 size_t i;
38
39 if (type & PERF_SAMPLE_IDENTIFIER)
40 COMP(id);
41
42 if (type & PERF_SAMPLE_IP)
43 COMP(ip);
44
45 if (type & PERF_SAMPLE_TID) {
46 COMP(pid);
47 COMP(tid);
48 }
49
50 if (type & PERF_SAMPLE_TIME)
51 COMP(time);
52
53 if (type & PERF_SAMPLE_ADDR)
54 COMP(addr);
55
56 if (type & PERF_SAMPLE_ID)
57 COMP(id);
58
59 if (type & PERF_SAMPLE_STREAM_ID)
60 COMP(stream_id);
61
62 if (type & PERF_SAMPLE_CPU)
63 COMP(cpu);
64
65 if (type & PERF_SAMPLE_PERIOD)
66 COMP(period);
67
68 if (type & PERF_SAMPLE_READ) {
69 if (read_format & PERF_FORMAT_GROUP)
70 COMP(read.group.nr);
71 else
72 COMP(read.one.value);
73 if (read_format & PERF_FORMAT_TOTAL_TIME_ENABLED)
74 COMP(read.time_enabled);
75 if (read_format & PERF_FORMAT_TOTAL_TIME_RUNNING)
76 COMP(read.time_running);
77 /* PERF_FORMAT_ID is forced for PERF_SAMPLE_READ */
78 if (read_format & PERF_FORMAT_GROUP) {
79 for (i = 0; i < s1->read.group.nr; i++)
80 MCOMP(read.group.values[i]);
81 } else {
82 COMP(read.one.id);
83 }
84 }
85
86 if (type & PERF_SAMPLE_CALLCHAIN) {
87 COMP(callchain->nr);
88 for (i = 0; i < s1->callchain->nr; i++)
89 COMP(callchain->ips[i]);
90 }
91
92 if (type & PERF_SAMPLE_RAW) {
93 COMP(raw_size);
94 if (memcmp(s1->raw_data, s2->raw_data, s1->raw_size)) {
95 pr_debug("Samples differ at 'raw_data'\n");
96 return false;
97 }
98 }
99
100 if (type & PERF_SAMPLE_BRANCH_STACK) {
101 COMP(branch_stack->nr);
Kan Liang42bbabe2020-02-28 08:30:00 -0800102 COMP(branch_stack->hw_idx);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300103 for (i = 0; i < s1->branch_stack->nr; i++)
104 MCOMP(branch_stack->entries[i]);
105 }
106
107 if (type & PERF_SAMPLE_REGS_USER) {
Jiri Olsa352ea452014-01-07 13:47:25 +0100108 size_t sz = hweight_long(s1->user_regs.mask) * sizeof(u64);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300109
Jiri Olsa352ea452014-01-07 13:47:25 +0100110 COMP(user_regs.mask);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300111 COMP(user_regs.abi);
112 if (s1->user_regs.abi &&
113 (!s1->user_regs.regs || !s2->user_regs.regs ||
114 memcmp(s1->user_regs.regs, s2->user_regs.regs, sz))) {
115 pr_debug("Samples differ at 'user_regs'\n");
116 return false;
117 }
118 }
119
120 if (type & PERF_SAMPLE_STACK_USER) {
121 COMP(user_stack.size);
Rasmus Villemoes605a3062015-01-22 18:01:23 +0100122 if (memcmp(s1->user_stack.data, s2->user_stack.data,
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300123 s1->user_stack.size)) {
124 pr_debug("Samples differ at 'user_stack'\n");
125 return false;
126 }
127 }
128
129 if (type & PERF_SAMPLE_WEIGHT)
130 COMP(weight);
131
132 if (type & PERF_SAMPLE_DATA_SRC)
133 COMP(data_src);
134
Adrian Hunter091a4ef2013-11-01 15:51:37 +0200135 if (type & PERF_SAMPLE_TRANSACTION)
136 COMP(transaction);
137
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200138 if (type & PERF_SAMPLE_REGS_INTR) {
139 size_t sz = hweight_long(s1->intr_regs.mask) * sizeof(u64);
140
141 COMP(intr_regs.mask);
142 COMP(intr_regs.abi);
143 if (s1->intr_regs.abi &&
144 (!s1->intr_regs.regs || !s2->intr_regs.regs ||
145 memcmp(s1->intr_regs.regs, s2->intr_regs.regs, sz))) {
146 pr_debug("Samples differ at 'intr_regs'\n");
147 return false;
148 }
149 }
150
Kan Liangfc33dcc2017-08-29 13:11:12 -0400151 if (type & PERF_SAMPLE_PHYS_ADDR)
152 COMP(phys_addr);
153
Namhyung Kimba78c1c2020-03-25 21:45:30 +0900154 if (type & PERF_SAMPLE_CGROUP)
155 COMP(cgroup);
156
Adrian Hunter98dcf142019-11-15 14:42:11 +0200157 if (type & PERF_SAMPLE_AUX) {
158 COMP(aux_sample.size);
159 if (memcmp(s1->aux_sample.data, s2->aux_sample.data,
160 s1->aux_sample.size)) {
161 pr_debug("Samples differ at 'aux_sample'\n");
162 return false;
163 }
164 }
165
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300166 return true;
167}
168
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200169static int do_test(u64 sample_type, u64 sample_regs, u64 read_format)
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300170{
Jiri Olsa32dcd022019-07-21 13:23:51 +0200171 struct evsel evsel = {
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300172 .needs_swap = false,
Jiri Olsa1fc632c2019-07-21 13:24:29 +0200173 .core = {
174 . attr = {
175 .sample_type = sample_type,
176 .read_format = read_format,
177 },
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300178 },
179 };
180 union perf_event *event;
181 union {
182 struct ip_callchain callchain;
183 u64 data[64];
184 } callchain = {
185 /* 3 ips */
186 .data = {3, 201, 202, 203},
187 };
188 union {
189 struct branch_stack branch_stack;
190 u64 data[64];
191 } branch_stack = {
192 /* 1 branch_entry */
Kan Liang42bbabe2020-02-28 08:30:00 -0800193 .data = {1, -1ULL, 211, 212, 213},
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300194 };
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200195 u64 regs[64];
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300196 const u64 raw_data[] = {0x123456780a0b0c0dULL, 0x1102030405060708ULL};
197 const u64 data[] = {0x2211443366558877ULL, 0, 0xaabbccddeeff4321ULL};
Adrian Hunter98dcf142019-11-15 14:42:11 +0200198 const u64 aux_data[] = {0xa55a, 0, 0xeeddee, 0x0282028202820282};
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300199 struct perf_sample sample = {
200 .ip = 101,
201 .pid = 102,
202 .tid = 103,
203 .time = 104,
204 .addr = 105,
205 .id = 106,
206 .stream_id = 107,
207 .period = 108,
208 .weight = 109,
209 .cpu = 110,
210 .raw_size = sizeof(raw_data),
211 .data_src = 111,
Adrian Hunter091a4ef2013-11-01 15:51:37 +0200212 .transaction = 112,
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300213 .raw_data = (void *)raw_data,
214 .callchain = &callchain.callchain,
Kan Liang42bbabe2020-02-28 08:30:00 -0800215 .no_hw_idx = false,
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300216 .branch_stack = &branch_stack.branch_stack,
217 .user_regs = {
218 .abi = PERF_SAMPLE_REGS_ABI_64,
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200219 .mask = sample_regs,
220 .regs = regs,
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300221 },
222 .user_stack = {
223 .size = sizeof(data),
224 .data = (void *)data,
225 },
226 .read = {
227 .time_enabled = 0x030a59d664fca7deULL,
228 .time_running = 0x011b6ae553eb98edULL,
229 },
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200230 .intr_regs = {
231 .abi = PERF_SAMPLE_REGS_ABI_64,
232 .mask = sample_regs,
233 .regs = regs,
234 },
Kan Liangfc33dcc2017-08-29 13:11:12 -0400235 .phys_addr = 113,
Namhyung Kimba78c1c2020-03-25 21:45:30 +0900236 .cgroup = 114,
Adrian Hunter98dcf142019-11-15 14:42:11 +0200237 .aux_sample = {
238 .size = sizeof(aux_data),
239 .data = (void *)aux_data,
240 },
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300241 };
242 struct sample_read_value values[] = {{1, 5}, {9, 3}, {2, 7}, {6, 4},};
243 struct perf_sample sample_out;
244 size_t i, sz, bufsz;
245 int err, ret = -1;
246
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200247 if (sample_type & PERF_SAMPLE_REGS_USER)
Jiri Olsa1fc632c2019-07-21 13:24:29 +0200248 evsel.core.attr.sample_regs_user = sample_regs;
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200249
250 if (sample_type & PERF_SAMPLE_REGS_INTR)
Jiri Olsa1fc632c2019-07-21 13:24:29 +0200251 evsel.core.attr.sample_regs_intr = sample_regs;
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200252
Kan Liang42bbabe2020-02-28 08:30:00 -0800253 if (sample_type & PERF_SAMPLE_BRANCH_STACK)
254 evsel.core.attr.branch_sample_type |= PERF_SAMPLE_BRANCH_HW_INDEX;
255
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200256 for (i = 0; i < sizeof(regs); i++)
257 *(i + (u8 *)regs) = i & 0xfe;
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300258
259 if (read_format & PERF_FORMAT_GROUP) {
260 sample.read.group.nr = 4;
261 sample.read.group.values = values;
262 } else {
263 sample.read.one.value = 0x08789faeb786aa87ULL;
264 sample.read.one.id = 99;
265 }
266
Jiri Olsa352ea452014-01-07 13:47:25 +0100267 sz = perf_event__sample_event_size(&sample, sample_type, read_format);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300268 bufsz = sz + 4096; /* Add a bit for overrun checking */
269 event = malloc(bufsz);
270 if (!event) {
271 pr_debug("malloc failed\n");
272 return -1;
273 }
274
275 memset(event, 0xff, bufsz);
276 event->header.type = PERF_RECORD_SAMPLE;
277 event->header.misc = 0;
278 event->header.size = sz;
279
Jiri Olsa352ea452014-01-07 13:47:25 +0100280 err = perf_event__synthesize_sample(event, sample_type, read_format,
Adrian Hunter936f1f32018-01-16 15:14:52 +0200281 &sample);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300282 if (err) {
283 pr_debug("%s failed for sample_type %#"PRIx64", error %d\n",
284 "perf_event__synthesize_sample", sample_type, err);
285 goto out_free;
286 }
287
288 /* The data does not contain 0xff so we use that to check the size */
289 for (i = bufsz; i > 0; i--) {
290 if (*(i - 1 + (u8 *)event) != 0xff)
291 break;
292 }
293 if (i != sz) {
294 pr_debug("Event size mismatch: actual %zu vs expected %zu\n",
295 i, sz);
296 goto out_free;
297 }
298
Arnaldo Carvalho de Melo2aaefde2020-04-29 16:00:27 -0300299 evsel.sample_size = __evsel__sample_size(sample_type);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300300
301 err = perf_evsel__parse_sample(&evsel, event, &sample_out);
302 if (err) {
303 pr_debug("%s failed for sample_type %#"PRIx64", error %d\n",
304 "perf_evsel__parse_sample", sample_type, err);
305 goto out_free;
306 }
307
Jiri Olsa352ea452014-01-07 13:47:25 +0100308 if (!samples_same(&sample, &sample_out, sample_type, read_format)) {
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300309 pr_debug("parsing failed for sample_type %#"PRIx64"\n",
310 sample_type);
311 goto out_free;
312 }
313
314 ret = 0;
315out_free:
316 free(event);
317 if (ret && read_format)
318 pr_debug("read_format %#"PRIx64"\n", read_format);
319 return ret;
320}
321
322/**
323 * test__sample_parsing - test sample parsing.
324 *
325 * This function implements a test that synthesizes a sample event, parses it
326 * and then checks that the parsed sample matches the original sample. The test
327 * checks sample format bits separately and together. If the test passes %0 is
328 * returned, otherwise %-1 is returned.
329 */
Arnaldo Carvalho de Melo81f17c92017-08-03 15:16:31 -0300330int test__sample_parsing(struct test *test __maybe_unused, int subtest __maybe_unused)
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300331{
332 const u64 rf[] = {4, 5, 6, 7, 12, 13, 14, 15};
333 u64 sample_type;
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200334 u64 sample_regs;
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300335 size_t i;
336 int err;
337
338 /*
339 * Fail the test if it has not been updated when new sample format bits
Adrian Hunter091a4ef2013-11-01 15:51:37 +0200340 * were added. Please actually update the test rather than just change
341 * the condition below.
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300342 */
Namhyung Kimba78c1c2020-03-25 21:45:30 +0900343 if (PERF_SAMPLE_MAX > PERF_SAMPLE_CGROUP << 1) {
Arnaldo Carvalho de Melo11a4d432013-10-22 15:24:58 -0300344 pr_debug("sample format has changed, some new PERF_SAMPLE_ bit was introduced - test needs updating\n");
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300345 return -1;
346 }
347
348 /* Test each sample format bit separately */
349 for (sample_type = 1; sample_type != PERF_SAMPLE_MAX;
350 sample_type <<= 1) {
351 /* Test read_format variations */
352 if (sample_type == PERF_SAMPLE_READ) {
353 for (i = 0; i < ARRAY_SIZE(rf); i++) {
354 err = do_test(sample_type, 0, rf[i]);
355 if (err)
356 return err;
357 }
358 continue;
359 }
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200360 sample_regs = 0;
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300361
362 if (sample_type == PERF_SAMPLE_REGS_USER)
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200363 sample_regs = 0x3fff;
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300364
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200365 if (sample_type == PERF_SAMPLE_REGS_INTR)
366 sample_regs = 0xff0fff;
367
368 err = do_test(sample_type, sample_regs, 0);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300369 if (err)
370 return err;
371 }
372
373 /* Test all sample format bits together */
374 sample_type = PERF_SAMPLE_MAX - 1;
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200375 sample_regs = 0x3fff; /* shared yb intr and user regs */
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300376 for (i = 0; i < ARRAY_SIZE(rf); i++) {
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200377 err = do_test(sample_type, sample_regs, rf[i]);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300378 if (err)
379 return err;
380 }
381
382 return 0;
383}