Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 2 | #ifndef _ASM_X86_UACCESS_32_H |
| 3 | #define _ASM_X86_UACCESS_32_H |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 4 | |
| 5 | /* |
| 6 | * User space memory access functions |
| 7 | */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 8 | #include <linux/string.h> |
H. Peter Anvin | 14e6d17 | 2008-02-04 16:47:59 +0100 | [diff] [blame] | 9 | #include <asm/asm.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 10 | #include <asm/page.h> |
| 11 | |
Al Viro | beba3a2 | 2017-03-25 19:33:21 -0400 | [diff] [blame] | 12 | unsigned long __must_check __copy_user_ll |
| 13 | (void *to, const void *from, unsigned long n); |
Joe Perches | b1fcec7 | 2008-03-23 01:03:48 -0700 | [diff] [blame] | 14 | unsigned long __must_check __copy_from_user_ll_nocache_nozero |
| 15 | (void *to, const void __user *from, unsigned long n); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 16 | |
Ingo Molnar | 652050a | 2006-01-14 13:21:30 -0800 | [diff] [blame] | 17 | static __always_inline unsigned long __must_check |
Al Viro | beba3a2 | 2017-03-25 19:33:21 -0400 | [diff] [blame] | 18 | raw_copy_to_user(void __user *to, const void *from, unsigned long n) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 19 | { |
Al Viro | beba3a2 | 2017-03-25 19:33:21 -0400 | [diff] [blame] | 20 | return __copy_user_ll((__force void *)to, from, n); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 21 | } |
| 22 | |
Ingo Molnar | 652050a | 2006-01-14 13:21:30 -0800 | [diff] [blame] | 23 | static __always_inline unsigned long |
Al Viro | beba3a2 | 2017-03-25 19:33:21 -0400 | [diff] [blame] | 24 | raw_copy_from_user(void *to, const void __user *from, unsigned long n) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 25 | { |
Al Viro | beba3a2 | 2017-03-25 19:33:21 -0400 | [diff] [blame] | 26 | return __copy_user_ll(to, (__force const void *)from, n); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 27 | } |
| 28 | |
Ingo Molnar | 652050a | 2006-01-14 13:21:30 -0800 | [diff] [blame] | 29 | static __always_inline unsigned long |
Joe Perches | b1fcec7 | 2008-03-23 01:03:48 -0700 | [diff] [blame] | 30 | __copy_from_user_inatomic_nocache(void *to, const void __user *from, |
| 31 | unsigned long n) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 32 | { |
NeilBrown | 7c12d81 | 2006-06-25 05:48:02 -0700 | [diff] [blame] | 33 | return __copy_from_user_ll_nocache_nozero(to, from, n); |
Hiro Yoshioka | c22ce14 | 2006-06-23 02:04:16 -0700 | [diff] [blame] | 34 | } |
| 35 | |
H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 36 | #endif /* _ASM_X86_UACCESS_32_H */ |