blob: a5edc2ebc947a6ba770065dc6e8dbe8e2688f7c2 [file] [log] [blame]
Thomas Gleixner71700662019-05-19 15:51:55 +02001// SPDX-License-Identifier: GPL-2.0-or-later
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +01002/*
3 * test_kprobes.c - simple sanity test for *probes
4 *
5 * Copyright IBM Corp. 2008
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +01006 */
7
8#include <linux/kernel.h>
9#include <linux/kprobes.h>
10#include <linux/random.h>
Sven Schnellee44e81c2021-10-21 09:54:24 +090011#include <kunit/test.h>
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010012
13#define div_factor 3
14
Masami Hiramatsu2c7d6622017-10-06 08:15:17 +090015static u32 rand1, preh_val, posth_val;
Masami Hiramatsu8e114402009-01-06 14:41:47 -080016static u32 (*target)(u32 value);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -080017static u32 (*target2)(u32 value);
Sven Schnellee44e81c2021-10-21 09:54:24 +090018static struct kunit *current_test;
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010019
Masami Hiramatsu1f6d3a82021-10-25 20:41:52 +090020static unsigned long (*internal_target)(void);
21static unsigned long (*stacktrace_target)(void);
22static unsigned long (*stacktrace_driver)(void);
23static unsigned long target_return_address[2];
24
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010025static noinline u32 kprobe_target(u32 value)
26{
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010027 return (value / div_factor);
28}
29
30static int kp_pre_handler(struct kprobe *p, struct pt_regs *regs)
31{
Sven Schnellee44e81c2021-10-21 09:54:24 +090032 KUNIT_EXPECT_FALSE(current_test, preemptible());
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010033 preh_val = (rand1 / div_factor);
34 return 0;
35}
36
37static void kp_post_handler(struct kprobe *p, struct pt_regs *regs,
38 unsigned long flags)
39{
Sven Schnellee44e81c2021-10-21 09:54:24 +090040 KUNIT_EXPECT_FALSE(current_test, preemptible());
41 KUNIT_EXPECT_EQ(current_test, preh_val, (rand1 / div_factor));
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010042 posth_val = preh_val + div_factor;
43}
44
45static struct kprobe kp = {
46 .symbol_name = "kprobe_target",
47 .pre_handler = kp_pre_handler,
48 .post_handler = kp_post_handler
49};
50
Sven Schnellee44e81c2021-10-21 09:54:24 +090051static void test_kprobe(struct kunit *test)
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010052{
Sven Schnellee44e81c2021-10-21 09:54:24 +090053 current_test = test;
54 KUNIT_EXPECT_EQ(test, 0, register_kprobe(&kp));
55 target(rand1);
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010056 unregister_kprobe(&kp);
Sven Schnellee44e81c2021-10-21 09:54:24 +090057 KUNIT_EXPECT_NE(test, 0, preh_val);
58 KUNIT_EXPECT_NE(test, 0, posth_val);
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010059}
60
Masami Hiramatsu12da3b82009-01-06 14:41:48 -080061static noinline u32 kprobe_target2(u32 value)
62{
63 return (value / div_factor) + 1;
64}
65
Masami Hiramatsu1f6d3a82021-10-25 20:41:52 +090066static noinline unsigned long kprobe_stacktrace_internal_target(void)
67{
68 if (!target_return_address[0])
69 target_return_address[0] = (unsigned long)__builtin_return_address(0);
70 return target_return_address[0];
71}
72
73static noinline unsigned long kprobe_stacktrace_target(void)
74{
75 if (!target_return_address[1])
76 target_return_address[1] = (unsigned long)__builtin_return_address(0);
77
78 if (internal_target)
79 internal_target();
80
81 return target_return_address[1];
82}
83
84static noinline unsigned long kprobe_stacktrace_driver(void)
85{
86 if (stacktrace_target)
87 stacktrace_target();
88
89 /* This is for preventing inlining the function */
90 return (unsigned long)__builtin_return_address(0);
91}
92
Masami Hiramatsu12da3b82009-01-06 14:41:48 -080093static int kp_pre_handler2(struct kprobe *p, struct pt_regs *regs)
94{
95 preh_val = (rand1 / div_factor) + 1;
96 return 0;
97}
98
99static void kp_post_handler2(struct kprobe *p, struct pt_regs *regs,
100 unsigned long flags)
101{
Sven Schnellee44e81c2021-10-21 09:54:24 +0900102 KUNIT_EXPECT_EQ(current_test, preh_val, (rand1 / div_factor) + 1);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800103 posth_val = preh_val + div_factor;
104}
105
106static struct kprobe kp2 = {
107 .symbol_name = "kprobe_target2",
108 .pre_handler = kp_pre_handler2,
109 .post_handler = kp_post_handler2
110};
111
Sven Schnellee44e81c2021-10-21 09:54:24 +0900112static void test_kprobes(struct kunit *test)
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800113{
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800114 struct kprobe *kps[2] = {&kp, &kp2};
115
Sven Schnellee44e81c2021-10-21 09:54:24 +0900116 current_test = test;
117
Masami Hiramatsufd02e6f2010-10-14 12:10:24 +0900118 /* addr and flags should be cleard for reusing kprobe. */
119 kp.addr = NULL;
120 kp.flags = 0;
Sven Schnellee44e81c2021-10-21 09:54:24 +0900121
122 KUNIT_EXPECT_EQ(test, 0, register_kprobes(kps, 2));
123 preh_val = 0;
124 posth_val = 0;
125 target(rand1);
126
127 KUNIT_EXPECT_NE(test, 0, preh_val);
128 KUNIT_EXPECT_NE(test, 0, posth_val);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800129
130 preh_val = 0;
131 posth_val = 0;
Sven Schnellee44e81c2021-10-21 09:54:24 +0900132 target2(rand1);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800133
Sven Schnellee44e81c2021-10-21 09:54:24 +0900134 KUNIT_EXPECT_NE(test, 0, preh_val);
135 KUNIT_EXPECT_NE(test, 0, posth_val);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800136 unregister_kprobes(kps, 2);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800137}
138
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100139#ifdef CONFIG_KRETPROBES
140static u32 krph_val;
141
Abhishek Sagarf47cd9b2008-02-06 01:38:22 -0800142static int entry_handler(struct kretprobe_instance *ri, struct pt_regs *regs)
143{
Sven Schnellee44e81c2021-10-21 09:54:24 +0900144 KUNIT_EXPECT_FALSE(current_test, preemptible());
Abhishek Sagarf47cd9b2008-02-06 01:38:22 -0800145 krph_val = (rand1 / div_factor);
146 return 0;
147}
148
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100149static int return_handler(struct kretprobe_instance *ri, struct pt_regs *regs)
150{
151 unsigned long ret = regs_return_value(regs);
152
Sven Schnellee44e81c2021-10-21 09:54:24 +0900153 KUNIT_EXPECT_FALSE(current_test, preemptible());
154 KUNIT_EXPECT_EQ(current_test, ret, rand1 / div_factor);
155 KUNIT_EXPECT_NE(current_test, krph_val, 0);
Abhishek Sagarf47cd9b2008-02-06 01:38:22 -0800156 krph_val = rand1;
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100157 return 0;
158}
159
160static struct kretprobe rp = {
161 .handler = return_handler,
Abhishek Sagarf47cd9b2008-02-06 01:38:22 -0800162 .entry_handler = entry_handler,
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100163 .kp.symbol_name = "kprobe_target"
164};
165
Sven Schnellee44e81c2021-10-21 09:54:24 +0900166static void test_kretprobe(struct kunit *test)
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100167{
Sven Schnellee44e81c2021-10-21 09:54:24 +0900168 current_test = test;
169 KUNIT_EXPECT_EQ(test, 0, register_kretprobe(&rp));
170 target(rand1);
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100171 unregister_kretprobe(&rp);
Sven Schnellee44e81c2021-10-21 09:54:24 +0900172 KUNIT_EXPECT_EQ(test, krph_val, rand1);
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100173}
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800174
175static int return_handler2(struct kretprobe_instance *ri, struct pt_regs *regs)
176{
177 unsigned long ret = regs_return_value(regs);
178
Sven Schnellee44e81c2021-10-21 09:54:24 +0900179 KUNIT_EXPECT_EQ(current_test, ret, (rand1 / div_factor) + 1);
180 KUNIT_EXPECT_NE(current_test, krph_val, 0);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800181 krph_val = rand1;
182 return 0;
183}
184
185static struct kretprobe rp2 = {
186 .handler = return_handler2,
187 .entry_handler = entry_handler,
188 .kp.symbol_name = "kprobe_target2"
189};
190
Sven Schnellee44e81c2021-10-21 09:54:24 +0900191static void test_kretprobes(struct kunit *test)
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800192{
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800193 struct kretprobe *rps[2] = {&rp, &rp2};
194
Sven Schnellee44e81c2021-10-21 09:54:24 +0900195 current_test = test;
Masami Hiramatsufd02e6f2010-10-14 12:10:24 +0900196 /* addr and flags should be cleard for reusing kprobe. */
197 rp.kp.addr = NULL;
198 rp.kp.flags = 0;
Sven Schnellee44e81c2021-10-21 09:54:24 +0900199 KUNIT_EXPECT_EQ(test, 0, register_kretprobes(rps, 2));
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800200
201 krph_val = 0;
Sven Schnellee44e81c2021-10-21 09:54:24 +0900202 target(rand1);
203 KUNIT_EXPECT_EQ(test, krph_val, rand1);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800204
205 krph_val = 0;
Sven Schnellee44e81c2021-10-21 09:54:24 +0900206 target2(rand1);
207 KUNIT_EXPECT_EQ(test, krph_val, rand1);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800208 unregister_kretprobes(rps, 2);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800209}
Masami Hiramatsu1f6d3a82021-10-25 20:41:52 +0900210
211#ifdef CONFIG_ARCH_CORRECT_STACKTRACE_ON_KRETPROBE
212#define STACK_BUF_SIZE 16
213static unsigned long stack_buf[STACK_BUF_SIZE];
214
215static int stacktrace_return_handler(struct kretprobe_instance *ri, struct pt_regs *regs)
216{
217 unsigned long retval = regs_return_value(regs);
218 int i, ret;
219
220 KUNIT_EXPECT_FALSE(current_test, preemptible());
221 KUNIT_EXPECT_EQ(current_test, retval, target_return_address[1]);
222
223 /*
224 * Test stacktrace inside the kretprobe handler, this will involves
225 * kretprobe trampoline, but must include correct return address
226 * of the target function.
227 */
228 ret = stack_trace_save(stack_buf, STACK_BUF_SIZE, 0);
229 KUNIT_EXPECT_NE(current_test, ret, 0);
230
231 for (i = 0; i < ret; i++) {
232 if (stack_buf[i] == target_return_address[1])
233 break;
234 }
235 KUNIT_EXPECT_NE(current_test, i, ret);
236
237#if !IS_MODULE(CONFIG_KPROBES_SANITY_TEST)
238 /*
239 * Test stacktrace from pt_regs at the return address. Thus the stack
240 * trace must start from the target return address.
241 */
242 ret = stack_trace_save_regs(regs, stack_buf, STACK_BUF_SIZE, 0);
243 KUNIT_EXPECT_NE(current_test, ret, 0);
244 KUNIT_EXPECT_EQ(current_test, stack_buf[0], target_return_address[1]);
245#endif
246
247 return 0;
248}
249
250static struct kretprobe rp3 = {
251 .handler = stacktrace_return_handler,
252 .kp.symbol_name = "kprobe_stacktrace_target"
253};
254
255static void test_stacktrace_on_kretprobe(struct kunit *test)
256{
257 unsigned long myretaddr = (unsigned long)__builtin_return_address(0);
258
259 current_test = test;
260 rp3.kp.addr = NULL;
261 rp3.kp.flags = 0;
262
263 /*
264 * Run the stacktrace_driver() to record correct return address in
265 * stacktrace_target() and ensure stacktrace_driver() call is not
266 * inlined by checking the return address of stacktrace_driver()
267 * and the return address of this function is different.
268 */
269 KUNIT_ASSERT_NE(test, myretaddr, stacktrace_driver());
270
271 KUNIT_ASSERT_EQ(test, 0, register_kretprobe(&rp3));
272 KUNIT_ASSERT_NE(test, myretaddr, stacktrace_driver());
273 unregister_kretprobe(&rp3);
274}
275
276static int stacktrace_internal_return_handler(struct kretprobe_instance *ri, struct pt_regs *regs)
277{
278 unsigned long retval = regs_return_value(regs);
279 int i, ret;
280
281 KUNIT_EXPECT_FALSE(current_test, preemptible());
282 KUNIT_EXPECT_EQ(current_test, retval, target_return_address[0]);
283
284 /*
285 * Test stacktrace inside the kretprobe handler for nested case.
286 * The unwinder will find the kretprobe_trampoline address on the
287 * return address, and kretprobe must solve that.
288 */
289 ret = stack_trace_save(stack_buf, STACK_BUF_SIZE, 0);
290 KUNIT_EXPECT_NE(current_test, ret, 0);
291
292 for (i = 0; i < ret - 1; i++) {
293 if (stack_buf[i] == target_return_address[0]) {
294 KUNIT_EXPECT_EQ(current_test, stack_buf[i + 1], target_return_address[1]);
295 break;
296 }
297 }
298 KUNIT_EXPECT_NE(current_test, i, ret);
299
300#if !IS_MODULE(CONFIG_KPROBES_SANITY_TEST)
301 /* Ditto for the regs version. */
302 ret = stack_trace_save_regs(regs, stack_buf, STACK_BUF_SIZE, 0);
303 KUNIT_EXPECT_NE(current_test, ret, 0);
304 KUNIT_EXPECT_EQ(current_test, stack_buf[0], target_return_address[0]);
305 KUNIT_EXPECT_EQ(current_test, stack_buf[1], target_return_address[1]);
306#endif
307
308 return 0;
309}
310
311static struct kretprobe rp4 = {
312 .handler = stacktrace_internal_return_handler,
313 .kp.symbol_name = "kprobe_stacktrace_internal_target"
314};
315
316static void test_stacktrace_on_nested_kretprobe(struct kunit *test)
317{
318 unsigned long myretaddr = (unsigned long)__builtin_return_address(0);
319 struct kretprobe *rps[2] = {&rp3, &rp4};
320
321 current_test = test;
322 rp3.kp.addr = NULL;
323 rp3.kp.flags = 0;
324
325 //KUNIT_ASSERT_NE(test, myretaddr, stacktrace_driver());
326
327 KUNIT_ASSERT_EQ(test, 0, register_kretprobes(rps, 2));
328 KUNIT_ASSERT_NE(test, myretaddr, stacktrace_driver());
329 unregister_kretprobes(rps, 2);
330}
331#endif /* CONFIG_ARCH_CORRECT_STACKTRACE_ON_KRETPROBE */
332
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100333#endif /* CONFIG_KRETPROBES */
334
Sven Schnellee44e81c2021-10-21 09:54:24 +0900335static int kprobes_test_init(struct kunit *test)
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100336{
Masami Hiramatsu8e114402009-01-06 14:41:47 -0800337 target = kprobe_target;
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800338 target2 = kprobe_target2;
Masami Hiramatsu1f6d3a82021-10-25 20:41:52 +0900339 stacktrace_target = kprobe_stacktrace_target;
340 internal_target = kprobe_stacktrace_internal_target;
341 stacktrace_driver = kprobe_stacktrace_driver;
Masami Hiramatsu8e114402009-01-06 14:41:47 -0800342
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100343 do {
Akinobu Mita6d65df32013-04-29 16:21:30 -0700344 rand1 = prandom_u32();
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100345 } while (rand1 <= div_factor);
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100346 return 0;
347}
Sven Schnellee44e81c2021-10-21 09:54:24 +0900348
349static struct kunit_case kprobes_testcases[] = {
350 KUNIT_CASE(test_kprobe),
351 KUNIT_CASE(test_kprobes),
352#ifdef CONFIG_KRETPROBES
353 KUNIT_CASE(test_kretprobe),
354 KUNIT_CASE(test_kretprobes),
Masami Hiramatsu1f6d3a82021-10-25 20:41:52 +0900355#ifdef CONFIG_ARCH_CORRECT_STACKTRACE_ON_KRETPROBE
356 KUNIT_CASE(test_stacktrace_on_kretprobe),
357 KUNIT_CASE(test_stacktrace_on_nested_kretprobe),
358#endif
Sven Schnellee44e81c2021-10-21 09:54:24 +0900359#endif
360 {}
361};
362
363static struct kunit_suite kprobes_test_suite = {
364 .name = "kprobes_test",
365 .init = kprobes_test_init,
366 .test_cases = kprobes_testcases,
367};
368
369kunit_test_suites(&kprobes_test_suite);
370
371MODULE_LICENSE("GPL");