00001 #ifndef __loaded__cmpxchg_h__
00002 #define __loaded__cmpxchg_h__
00003 using namespace std;
00004 #line 1 "cmpxchg.h++"
00005
00011 #ifndef __HAVE_ARCH_CMPXCHG
00012 #ifdef CONFIG_SMP
00013 #define LOCK_PREFIX \
00014 ".section .smp_locks,\"a\"\n" \
00015 " .align 8\n" \
00016 " .quad 661f\n" \
00017 ".previous\n" \
00018 "661:\n\tlock; "
00019 #else
00020 #define LOCK_PREFIX ""
00021 #endif
00022
00023 #define __xg(x) ((volatile long *)(x))
00024
00025 static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
00026 unsigned long nieuw, int size)
00027 {
00028 unsigned long prev;
00029 switch (size) {
00030 case 1:
00031 __asm__ __volatile__(LOCK_PREFIX "cmpxchg %b1,%2"
00032 : "=a"(prev)
00033 : "q"(nieuw), "m"(*__xg(ptr)), "0"(old)
00034 : "memory");
00035 return prev;
00036 case 2:
00037 __asm__ __volatile__(LOCK_PREFIX "cmpxchg %w1,%2"
00038 : "=a"(prev)
00039 : "r"(nieuw), "m"(*__xg(ptr)), "0"(old)
00040 : "memory");
00041 return prev;
00042 case 4:
00043 __asm__ __volatile__(LOCK_PREFIX "cmpxchg %k1,%2"
00044 : "=a"(prev)
00045 : "r"(nieuw), "m"(*__xg(ptr)), "0"(old)
00046 : "memory");
00047 return prev;
00048 case 8:
00049 __asm__ __volatile__(LOCK_PREFIX "cmpxchg %1,%2"
00050 : "=a"(prev)
00051 : "r"(nieuw), "m"(*__xg(ptr)), "0"(old)
00052 : "memory");
00053 return prev;
00054 }
00055 return old;
00056 }
00057
00058 #define cmpxchg(ptr,o,n)\
00059 ((__typeof__(*(ptr)))__cmpxchg((ptr),(unsigned long)(o),\
00060 (unsigned long)(n),sizeof(*(ptr))))
00061 #endif
00062 #endif // __loaded__cmpxchg_h__