blob: 8fd8a4ef97da1521f3441ead3f367b328bc815c1 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001// SPDX-License-Identifier: GPL-2.0
Adrian Hunter045f8cd82013-08-27 11:23:13 +03002#include <stdbool.h>
Arnaldo Carvalho de Melofd20e812017-04-17 15:23:08 -03003#include <inttypes.h>
Arnaldo Carvalho de Melo215a0d32019-07-04 11:21:24 -03004#include <stdlib.h>
Arnaldo Carvalho de Melo8520a982019-08-29 16:18:59 -03005#include <string.h>
Arnaldo Carvalho de Melo2f2ae232019-01-27 14:08:22 +01006#include <linux/bitops.h>
Arnaldo Carvalho de Melo877a7a12017-04-17 11:39:06 -03007#include <linux/kernel.h>
Borislav Petkovd944c4e2014-04-25 21:31:02 +02008#include <linux/types.h>
Adrian Hunter045f8cd82013-08-27 11:23:13 +03009
Arnaldo Carvalho de Melod3300a32019-08-30 15:09:54 -030010#include "map_symbol.h"
Arnaldo Carvalho de Melo2f2ae232019-01-27 14:08:22 +010011#include "branch.h"
Adrian Hunter045f8cd82013-08-27 11:23:13 +030012#include "event.h"
13#include "evsel.h"
Jiri Olsa84f5d362014-07-14 23:46:48 +020014#include "debug.h"
Arnaldo Carvalho de Meloea49e012019-09-18 11:36:13 -030015#include "util/synthetic-events.h"
Adrian Hunter045f8cd82013-08-27 11:23:13 +030016
17#include "tests.h"
18
19#define COMP(m) do { \
20 if (s1->m != s2->m) { \
21 pr_debug("Samples differ at '"#m"'\n"); \
22 return false; \
23 } \
24} while (0)
25
26#define MCOMP(m) do { \
27 if (memcmp(&s1->m, &s2->m, sizeof(s1->m))) { \
28 pr_debug("Samples differ at '"#m"'\n"); \
29 return false; \
30 } \
31} while (0)
32
33static bool samples_same(const struct perf_sample *s1,
Jiri Olsa352ea452014-01-07 13:47:25 +010034 const struct perf_sample *s2,
35 u64 type, u64 read_format)
Adrian Hunter045f8cd82013-08-27 11:23:13 +030036{
37 size_t i;
38
39 if (type & PERF_SAMPLE_IDENTIFIER)
40 COMP(id);
41
42 if (type & PERF_SAMPLE_IP)
43 COMP(ip);
44
45 if (type & PERF_SAMPLE_TID) {
46 COMP(pid);
47 COMP(tid);
48 }
49
50 if (type & PERF_SAMPLE_TIME)
51 COMP(time);
52
53 if (type & PERF_SAMPLE_ADDR)
54 COMP(addr);
55
56 if (type & PERF_SAMPLE_ID)
57 COMP(id);
58
59 if (type & PERF_SAMPLE_STREAM_ID)
60 COMP(stream_id);
61
62 if (type & PERF_SAMPLE_CPU)
63 COMP(cpu);
64
65 if (type & PERF_SAMPLE_PERIOD)
66 COMP(period);
67
68 if (type & PERF_SAMPLE_READ) {
69 if (read_format & PERF_FORMAT_GROUP)
70 COMP(read.group.nr);
71 else
72 COMP(read.one.value);
73 if (read_format & PERF_FORMAT_TOTAL_TIME_ENABLED)
74 COMP(read.time_enabled);
75 if (read_format & PERF_FORMAT_TOTAL_TIME_RUNNING)
76 COMP(read.time_running);
77 /* PERF_FORMAT_ID is forced for PERF_SAMPLE_READ */
78 if (read_format & PERF_FORMAT_GROUP) {
79 for (i = 0; i < s1->read.group.nr; i++)
80 MCOMP(read.group.values[i]);
81 } else {
82 COMP(read.one.id);
83 }
84 }
85
86 if (type & PERF_SAMPLE_CALLCHAIN) {
87 COMP(callchain->nr);
88 for (i = 0; i < s1->callchain->nr; i++)
89 COMP(callchain->ips[i]);
90 }
91
92 if (type & PERF_SAMPLE_RAW) {
93 COMP(raw_size);
94 if (memcmp(s1->raw_data, s2->raw_data, s1->raw_size)) {
95 pr_debug("Samples differ at 'raw_data'\n");
96 return false;
97 }
98 }
99
100 if (type & PERF_SAMPLE_BRANCH_STACK) {
101 COMP(branch_stack->nr);
Kan Liang42bbabe2020-02-28 08:30:00 -0800102 COMP(branch_stack->hw_idx);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300103 for (i = 0; i < s1->branch_stack->nr; i++)
104 MCOMP(branch_stack->entries[i]);
105 }
106
107 if (type & PERF_SAMPLE_REGS_USER) {
Jiri Olsa352ea452014-01-07 13:47:25 +0100108 size_t sz = hweight_long(s1->user_regs.mask) * sizeof(u64);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300109
Jiri Olsa352ea452014-01-07 13:47:25 +0100110 COMP(user_regs.mask);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300111 COMP(user_regs.abi);
112 if (s1->user_regs.abi &&
113 (!s1->user_regs.regs || !s2->user_regs.regs ||
114 memcmp(s1->user_regs.regs, s2->user_regs.regs, sz))) {
115 pr_debug("Samples differ at 'user_regs'\n");
116 return false;
117 }
118 }
119
120 if (type & PERF_SAMPLE_STACK_USER) {
121 COMP(user_stack.size);
Rasmus Villemoes605a3062015-01-22 18:01:23 +0100122 if (memcmp(s1->user_stack.data, s2->user_stack.data,
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300123 s1->user_stack.size)) {
124 pr_debug("Samples differ at 'user_stack'\n");
125 return false;
126 }
127 }
128
129 if (type & PERF_SAMPLE_WEIGHT)
130 COMP(weight);
131
132 if (type & PERF_SAMPLE_DATA_SRC)
133 COMP(data_src);
134
Adrian Hunter091a4ef2013-11-01 15:51:37 +0200135 if (type & PERF_SAMPLE_TRANSACTION)
136 COMP(transaction);
137
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200138 if (type & PERF_SAMPLE_REGS_INTR) {
139 size_t sz = hweight_long(s1->intr_regs.mask) * sizeof(u64);
140
141 COMP(intr_regs.mask);
142 COMP(intr_regs.abi);
143 if (s1->intr_regs.abi &&
144 (!s1->intr_regs.regs || !s2->intr_regs.regs ||
145 memcmp(s1->intr_regs.regs, s2->intr_regs.regs, sz))) {
146 pr_debug("Samples differ at 'intr_regs'\n");
147 return false;
148 }
149 }
150
Kan Liangfc33dcc2017-08-29 13:11:12 -0400151 if (type & PERF_SAMPLE_PHYS_ADDR)
152 COMP(phys_addr);
153
Namhyung Kimba78c1c2020-03-25 21:45:30 +0900154 if (type & PERF_SAMPLE_CGROUP)
155 COMP(cgroup);
156
Arnaldo Carvalho de Melodc67d192020-12-16 12:45:10 -0300157 if (type & PERF_SAMPLE_DATA_PAGE_SIZE)
158 COMP(data_page_size);
159
Stephane Eraniand8eda892021-01-05 11:57:52 -0800160 if (type & PERF_SAMPLE_CODE_PAGE_SIZE)
161 COMP(code_page_size);
162
Adrian Hunter98dcf142019-11-15 14:42:11 +0200163 if (type & PERF_SAMPLE_AUX) {
164 COMP(aux_sample.size);
165 if (memcmp(s1->aux_sample.data, s2->aux_sample.data,
166 s1->aux_sample.size)) {
167 pr_debug("Samples differ at 'aux_sample'\n");
168 return false;
169 }
170 }
171
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300172 return true;
173}
174
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200175static int do_test(u64 sample_type, u64 sample_regs, u64 read_format)
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300176{
Jiri Olsa32dcd022019-07-21 13:23:51 +0200177 struct evsel evsel = {
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300178 .needs_swap = false,
Jiri Olsa1fc632c2019-07-21 13:24:29 +0200179 .core = {
180 . attr = {
181 .sample_type = sample_type,
182 .read_format = read_format,
183 },
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300184 },
185 };
186 union perf_event *event;
187 union {
188 struct ip_callchain callchain;
189 u64 data[64];
190 } callchain = {
191 /* 3 ips */
192 .data = {3, 201, 202, 203},
193 };
194 union {
195 struct branch_stack branch_stack;
196 u64 data[64];
197 } branch_stack = {
198 /* 1 branch_entry */
Kan Liang42bbabe2020-02-28 08:30:00 -0800199 .data = {1, -1ULL, 211, 212, 213},
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300200 };
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200201 u64 regs[64];
Namhyung Kimc5c97ca2021-02-14 18:16:38 +0900202 const u32 raw_data[] = {0x12345678, 0x0a0b0c0d, 0x11020304, 0x05060708, 0 };
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300203 const u64 data[] = {0x2211443366558877ULL, 0, 0xaabbccddeeff4321ULL};
Adrian Hunter98dcf142019-11-15 14:42:11 +0200204 const u64 aux_data[] = {0xa55a, 0, 0xeeddee, 0x0282028202820282};
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300205 struct perf_sample sample = {
206 .ip = 101,
207 .pid = 102,
208 .tid = 103,
209 .time = 104,
210 .addr = 105,
211 .id = 106,
212 .stream_id = 107,
213 .period = 108,
214 .weight = 109,
215 .cpu = 110,
216 .raw_size = sizeof(raw_data),
217 .data_src = 111,
Adrian Hunter091a4ef2013-11-01 15:51:37 +0200218 .transaction = 112,
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300219 .raw_data = (void *)raw_data,
220 .callchain = &callchain.callchain,
Kan Liang42bbabe2020-02-28 08:30:00 -0800221 .no_hw_idx = false,
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300222 .branch_stack = &branch_stack.branch_stack,
223 .user_regs = {
224 .abi = PERF_SAMPLE_REGS_ABI_64,
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200225 .mask = sample_regs,
226 .regs = regs,
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300227 },
228 .user_stack = {
229 .size = sizeof(data),
230 .data = (void *)data,
231 },
232 .read = {
233 .time_enabled = 0x030a59d664fca7deULL,
234 .time_running = 0x011b6ae553eb98edULL,
235 },
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200236 .intr_regs = {
237 .abi = PERF_SAMPLE_REGS_ABI_64,
238 .mask = sample_regs,
239 .regs = regs,
240 },
Kan Liangfc33dcc2017-08-29 13:11:12 -0400241 .phys_addr = 113,
Namhyung Kimba78c1c2020-03-25 21:45:30 +0900242 .cgroup = 114,
Arnaldo Carvalho de Melodc67d192020-12-16 12:45:10 -0300243 .data_page_size = 115,
Stephane Eraniand8eda892021-01-05 11:57:52 -0800244 .code_page_size = 116,
Adrian Hunter98dcf142019-11-15 14:42:11 +0200245 .aux_sample = {
246 .size = sizeof(aux_data),
247 .data = (void *)aux_data,
248 },
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300249 };
250 struct sample_read_value values[] = {{1, 5}, {9, 3}, {2, 7}, {6, 4},};
251 struct perf_sample sample_out;
252 size_t i, sz, bufsz;
253 int err, ret = -1;
254
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200255 if (sample_type & PERF_SAMPLE_REGS_USER)
Jiri Olsa1fc632c2019-07-21 13:24:29 +0200256 evsel.core.attr.sample_regs_user = sample_regs;
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200257
258 if (sample_type & PERF_SAMPLE_REGS_INTR)
Jiri Olsa1fc632c2019-07-21 13:24:29 +0200259 evsel.core.attr.sample_regs_intr = sample_regs;
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200260
Kan Liang42bbabe2020-02-28 08:30:00 -0800261 if (sample_type & PERF_SAMPLE_BRANCH_STACK)
262 evsel.core.attr.branch_sample_type |= PERF_SAMPLE_BRANCH_HW_INDEX;
263
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200264 for (i = 0; i < sizeof(regs); i++)
265 *(i + (u8 *)regs) = i & 0xfe;
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300266
267 if (read_format & PERF_FORMAT_GROUP) {
268 sample.read.group.nr = 4;
269 sample.read.group.values = values;
270 } else {
271 sample.read.one.value = 0x08789faeb786aa87ULL;
272 sample.read.one.id = 99;
273 }
274
Jiri Olsa352ea452014-01-07 13:47:25 +0100275 sz = perf_event__sample_event_size(&sample, sample_type, read_format);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300276 bufsz = sz + 4096; /* Add a bit for overrun checking */
277 event = malloc(bufsz);
278 if (!event) {
279 pr_debug("malloc failed\n");
280 return -1;
281 }
282
283 memset(event, 0xff, bufsz);
284 event->header.type = PERF_RECORD_SAMPLE;
285 event->header.misc = 0;
286 event->header.size = sz;
287
Jiri Olsa352ea452014-01-07 13:47:25 +0100288 err = perf_event__synthesize_sample(event, sample_type, read_format,
Adrian Hunter936f1f32018-01-16 15:14:52 +0200289 &sample);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300290 if (err) {
291 pr_debug("%s failed for sample_type %#"PRIx64", error %d\n",
292 "perf_event__synthesize_sample", sample_type, err);
293 goto out_free;
294 }
295
296 /* The data does not contain 0xff so we use that to check the size */
297 for (i = bufsz; i > 0; i--) {
298 if (*(i - 1 + (u8 *)event) != 0xff)
299 break;
300 }
301 if (i != sz) {
302 pr_debug("Event size mismatch: actual %zu vs expected %zu\n",
303 i, sz);
304 goto out_free;
305 }
306
Arnaldo Carvalho de Melo2aaefde2020-04-29 16:00:27 -0300307 evsel.sample_size = __evsel__sample_size(sample_type);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300308
Arnaldo Carvalho de Melo6b6017a2020-04-30 11:03:49 -0300309 err = evsel__parse_sample(&evsel, event, &sample_out);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300310 if (err) {
311 pr_debug("%s failed for sample_type %#"PRIx64", error %d\n",
Arnaldo Carvalho de Melo6b6017a2020-04-30 11:03:49 -0300312 "evsel__parse_sample", sample_type, err);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300313 goto out_free;
314 }
315
Jiri Olsa352ea452014-01-07 13:47:25 +0100316 if (!samples_same(&sample, &sample_out, sample_type, read_format)) {
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300317 pr_debug("parsing failed for sample_type %#"PRIx64"\n",
318 sample_type);
319 goto out_free;
320 }
321
322 ret = 0;
323out_free:
324 free(event);
325 if (ret && read_format)
326 pr_debug("read_format %#"PRIx64"\n", read_format);
327 return ret;
328}
329
330/**
331 * test__sample_parsing - test sample parsing.
332 *
333 * This function implements a test that synthesizes a sample event, parses it
334 * and then checks that the parsed sample matches the original sample. The test
335 * checks sample format bits separately and together. If the test passes %0 is
336 * returned, otherwise %-1 is returned.
337 */
Arnaldo Carvalho de Melo81f17c92017-08-03 15:16:31 -0300338int test__sample_parsing(struct test *test __maybe_unused, int subtest __maybe_unused)
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300339{
340 const u64 rf[] = {4, 5, 6, 7, 12, 13, 14, 15};
341 u64 sample_type;
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200342 u64 sample_regs;
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300343 size_t i;
344 int err;
345
346 /*
347 * Fail the test if it has not been updated when new sample format bits
Adrian Hunter091a4ef2013-11-01 15:51:37 +0200348 * were added. Please actually update the test rather than just change
349 * the condition below.
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300350 */
Kan Liangc7444292021-02-02 12:09:11 -0800351 if (PERF_SAMPLE_MAX > PERF_SAMPLE_WEIGHT_STRUCT << 1) {
Arnaldo Carvalho de Melo11a4d432013-10-22 15:24:58 -0300352 pr_debug("sample format has changed, some new PERF_SAMPLE_ bit was introduced - test needs updating\n");
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300353 return -1;
354 }
355
356 /* Test each sample format bit separately */
357 for (sample_type = 1; sample_type != PERF_SAMPLE_MAX;
358 sample_type <<= 1) {
359 /* Test read_format variations */
360 if (sample_type == PERF_SAMPLE_READ) {
361 for (i = 0; i < ARRAY_SIZE(rf); i++) {
362 err = do_test(sample_type, 0, rf[i]);
363 if (err)
364 return err;
365 }
366 continue;
367 }
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200368 sample_regs = 0;
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300369
370 if (sample_type == PERF_SAMPLE_REGS_USER)
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200371 sample_regs = 0x3fff;
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300372
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200373 if (sample_type == PERF_SAMPLE_REGS_INTR)
374 sample_regs = 0xff0fff;
375
376 err = do_test(sample_type, sample_regs, 0);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300377 if (err)
378 return err;
379 }
380
Kan Liangc7444292021-02-02 12:09:11 -0800381 /*
382 * Test all sample format bits together
383 * Note: PERF_SAMPLE_WEIGHT and PERF_SAMPLE_WEIGHT_STRUCT cannot
384 * be set simultaneously.
385 */
386 sample_type = (PERF_SAMPLE_MAX - 1) & ~PERF_SAMPLE_WEIGHT;
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200387 sample_regs = 0x3fff; /* shared yb intr and user regs */
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300388 for (i = 0; i < ARRAY_SIZE(rf); i++) {
Stephane Eranian26ff0f02014-09-24 13:48:40 +0200389 err = do_test(sample_type, sample_regs, rf[i]);
Adrian Hunter045f8cd82013-08-27 11:23:13 +0300390 if (err)
391 return err;
392 }
393
394 return 0;
395}