|  | 
| 19 | 19 | #define __ASM_CMPXCHG_H | 
| 20 | 20 | 
 | 
| 21 | 21 | #include <linux/bug.h> | 
|  | 22 | +#include <linux/mmdebug.h> | 
| 22 | 23 | 
 | 
| 23 | 24 | #include <asm/barrier.h> | 
| 24 | 25 | 
 | 
| @@ -152,6 +153,51 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old, | 
| 152 | 153 | 	return oldval; | 
| 153 | 154 | } | 
| 154 | 155 | 
 | 
|  | 156 | +#define system_has_cmpxchg_double()     1 | 
|  | 157 | + | 
|  | 158 | +static inline int __cmpxchg_double(volatile void *ptr1, volatile void *ptr2, | 
|  | 159 | +		unsigned long old1, unsigned long old2, | 
|  | 160 | +		unsigned long new1, unsigned long new2, int size) | 
|  | 161 | +{ | 
|  | 162 | +	unsigned long loop, lost; | 
|  | 163 | + | 
|  | 164 | +	switch (size) { | 
|  | 165 | +	case 8: | 
|  | 166 | +		VM_BUG_ON((unsigned long *)ptr2 - (unsigned long *)ptr1 != 1); | 
|  | 167 | +		do { | 
|  | 168 | +			asm volatile("// __cmpxchg_double8\n" | 
|  | 169 | +			"	ldxp	%0, %1, %2\n" | 
|  | 170 | +			"	eor	%0, %0, %3\n" | 
|  | 171 | +			"	eor	%1, %1, %4\n" | 
|  | 172 | +			"	orr	%1, %0, %1\n" | 
|  | 173 | +			"	mov	%w0, #0\n" | 
|  | 174 | +			"	cbnz	%1, 1f\n" | 
|  | 175 | +			"	stxp	%w0, %5, %6, %2\n" | 
|  | 176 | +			"1:\n" | 
|  | 177 | +				: "=&r"(loop), "=&r"(lost), "+Q" (*(u64 *)ptr1) | 
|  | 178 | +				: "r" (old1), "r"(old2), "r"(new1), "r"(new2)); | 
|  | 179 | +		} while (loop); | 
|  | 180 | +		break; | 
|  | 181 | +	default: | 
|  | 182 | +		BUILD_BUG(); | 
|  | 183 | +	} | 
|  | 184 | + | 
|  | 185 | +	return !lost; | 
|  | 186 | +} | 
|  | 187 | + | 
|  | 188 | +static inline int __cmpxchg_double_mb(volatile void *ptr1, volatile void *ptr2, | 
|  | 189 | +			unsigned long old1, unsigned long old2, | 
|  | 190 | +			unsigned long new1, unsigned long new2, int size) | 
|  | 191 | +{ | 
|  | 192 | +	int ret; | 
|  | 193 | + | 
|  | 194 | +	smp_mb(); | 
|  | 195 | +	ret = __cmpxchg_double(ptr1, ptr2, old1, old2, new1, new2, size); | 
|  | 196 | +	smp_mb(); | 
|  | 197 | + | 
|  | 198 | +	return ret; | 
|  | 199 | +} | 
|  | 200 | + | 
| 155 | 201 | static inline unsigned long __cmpxchg_mb(volatile void *ptr, unsigned long old, | 
| 156 | 202 | 					 unsigned long new, int size) | 
| 157 | 203 | { | 
| @@ -182,6 +228,31 @@ static inline unsigned long __cmpxchg_mb(volatile void *ptr, unsigned long old, | 
| 182 | 228 | 	__ret; \ | 
| 183 | 229 | }) | 
| 184 | 230 | 
 | 
|  | 231 | +#define cmpxchg_double(ptr1, ptr2, o1, o2, n1, n2) \ | 
|  | 232 | +({\ | 
|  | 233 | +	int __ret;\ | 
|  | 234 | +	__ret = __cmpxchg_double_mb((ptr1), (ptr2), (unsigned long)(o1), \ | 
|  | 235 | +			(unsigned long)(o2), (unsigned long)(n1), \ | 
|  | 236 | +			(unsigned long)(n2), sizeof(*(ptr1)));\ | 
|  | 237 | +	__ret; \ | 
|  | 238 | +}) | 
|  | 239 | + | 
|  | 240 | +#define cmpxchg_double_local(ptr1, ptr2, o1, o2, n1, n2) \ | 
|  | 241 | +({\ | 
|  | 242 | +	int __ret;\ | 
|  | 243 | +	__ret = __cmpxchg_double((ptr1), (ptr2), (unsigned long)(o1), \ | 
|  | 244 | +			(unsigned long)(o2), (unsigned long)(n1), \ | 
|  | 245 | +			(unsigned long)(n2), sizeof(*(ptr1)));\ | 
|  | 246 | +	__ret; \ | 
|  | 247 | +}) | 
|  | 248 | + | 
|  | 249 | +#define this_cpu_cmpxchg_8(ptr, o, n) \ | 
|  | 250 | +	cmpxchg_local(raw_cpu_ptr(&(ptr)), o, n) | 
|  | 251 | + | 
|  | 252 | +#define this_cpu_cmpxchg_double_8(ptr1, ptr2, o1, o2, n1, n2) \ | 
|  | 253 | +	cmpxchg_double_local(raw_cpu_ptr(&(ptr1)), raw_cpu_ptr(&(ptr2)), \ | 
|  | 254 | +				o1, o2, n1, n2) | 
|  | 255 | + | 
| 185 | 256 | #define cmpxchg64(ptr,o,n)		cmpxchg((ptr),(o),(n)) | 
| 186 | 257 | #define cmpxchg64_local(ptr,o,n)	cmpxchg_local((ptr),(o),(n)) | 
| 187 | 258 | 
 | 
|  | 
0 commit comments