blob: b92f147339f3948e37323743cc6a743c13758d28 [file] [log] [blame]
H. Peter Anvin1965aae2008-10-22 22:26:29 -07001#ifndef _ASM_X86_CMPXCHG_64_H
2#define _ASM_X86_CMPXCHG_64_H
Jeff Dikea436ed92007-05-08 00:35:02 -07003
4#include <asm/alternative.h> /* Provides LOCK_PREFIX */
5
Jeff Dikea436ed92007-05-08 00:35:02 -07006#define __xg(x) ((volatile long *)(x))
7
8static inline void set_64bit(volatile unsigned long *ptr, unsigned long val)
9{
10 *ptr = val;
11}
12
13#define _set_64bit set_64bit
14
Peter Zijlstraf3834b92009-10-09 10:12:46 +020015extern void __xchg_wrong_size(void);
16extern void __cmpxchg_wrong_size(void);
17
Jeff Dikea436ed92007-05-08 00:35:02 -070018/*
19 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
20 * Note 2: xchg has side effect, so that attribute volatile is necessary,
21 * but generally the primitive is invalid, *ptr is output argument. --ANK
22 */
Peter Zijlstraf3834b92009-10-09 10:12:46 +020023#define __xchg(x, ptr, size) \
24({ \
25 __typeof(*(ptr)) __x = (x); \
26 switch (size) { \
27 case 1: \
28 asm volatile("xchgb %b0,%1" \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070029 : "=q" (__x), "+m" (*__xg(ptr)) \
30 : "0" (__x) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020031 : "memory"); \
32 break; \
33 case 2: \
34 asm volatile("xchgw %w0,%1" \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070035 : "=r" (__x), "+m" (*__xg(ptr)) \
36 : "0" (__x) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020037 : "memory"); \
38 break; \
39 case 4: \
40 asm volatile("xchgl %k0,%1" \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070041 : "=r" (__x), "+m" (*__xg(ptr)) \
42 : "0" (__x) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020043 : "memory"); \
44 break; \
45 case 8: \
46 asm volatile("xchgq %0,%1" \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070047 : "=r" (__x), "+m" (*__xg(ptr)) \
48 : "0" (__x) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020049 : "memory"); \
50 break; \
51 default: \
52 __xchg_wrong_size(); \
53 } \
54 __x; \
55})
56
57#define xchg(ptr, v) \
58 __xchg((v), (ptr), sizeof(*ptr))
59
60#define __HAVE_ARCH_CMPXCHG 1
Jeff Dikea436ed92007-05-08 00:35:02 -070061
62/*
63 * Atomic compare and exchange. Compare OLD with MEM, if identical,
64 * store NEW in MEM. Return the initial value in MEM. Success is
65 * indicated by comparing RETURN with OLD.
66 */
Peter Zijlstraf3834b92009-10-09 10:12:46 +020067#define __raw_cmpxchg(ptr, old, new, size, lock) \
68({ \
69 __typeof__(*(ptr)) __ret; \
70 __typeof__(*(ptr)) __old = (old); \
71 __typeof__(*(ptr)) __new = (new); \
72 switch (size) { \
73 case 1: \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070074 asm volatile(lock "cmpxchgb %b2,%1" \
75 : "=a" (__ret), "+m" (*__xg(ptr)) \
76 : "q" (__new), "0" (__old) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020077 : "memory"); \
78 break; \
79 case 2: \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070080 asm volatile(lock "cmpxchgw %w2,%1" \
81 : "=a" (__ret), "+m" (*__xg(ptr)) \
82 : "r" (__new), "0" (__old) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020083 : "memory"); \
84 break; \
85 case 4: \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070086 asm volatile(lock "cmpxchgl %k2,%1" \
87 : "=a" (__ret), "+m" (*__xg(ptr)) \
88 : "r" (__new), "0" (__old) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020089 : "memory"); \
90 break; \
91 case 8: \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070092 asm volatile(lock "cmpxchgq %2,%1" \
93 : "=a" (__ret), "+m" (*__xg(ptr)) \
94 : "r" (__new), "0" (__old) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020095 : "memory"); \
96 break; \
97 default: \
98 __cmpxchg_wrong_size(); \
99 } \
100 __ret; \
101})
Jeff Dikea436ed92007-05-08 00:35:02 -0700102
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200103#define __cmpxchg(ptr, old, new, size) \
104 __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
Jeff Dikea436ed92007-05-08 00:35:02 -0700105
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200106#define __sync_cmpxchg(ptr, old, new, size) \
107 __raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
Jeff Dikea436ed92007-05-08 00:35:02 -0700108
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200109#define __cmpxchg_local(ptr, old, new, size) \
110 __raw_cmpxchg((ptr), (old), (new), (size), "")
Jeremy Fitzhardinge15878c02008-06-25 00:19:10 -0400111
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200112#define cmpxchg(ptr, old, new) \
113 __cmpxchg((ptr), (old), (new), sizeof(*ptr))
Jeff Dikea436ed92007-05-08 00:35:02 -0700114
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200115#define sync_cmpxchg(ptr, old, new) \
116 __sync_cmpxchg((ptr), (old), (new), sizeof(*ptr))
117
118#define cmpxchg_local(ptr, old, new) \
119 __cmpxchg_local((ptr), (old), (new), sizeof(*ptr))
120
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800121#define cmpxchg64(ptr, o, n) \
Joe Perchese52da352008-03-23 01:01:52 -0700122({ \
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800123 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
124 cmpxchg((ptr), (o), (n)); \
Joe Perchese52da352008-03-23 01:01:52 -0700125})
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200126
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800127#define cmpxchg64_local(ptr, o, n) \
Joe Perchese52da352008-03-23 01:01:52 -0700128({ \
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800129 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
130 cmpxchg_local((ptr), (o), (n)); \
Joe Perchese52da352008-03-23 01:01:52 -0700131})
Jeff Dikea436ed92007-05-08 00:35:02 -0700132
H. Peter Anvin1965aae2008-10-22 22:26:29 -0700133#endif /* _ASM_X86_CMPXCHG_64_H */