00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019 #ifndef ATOMIC_H
00020 #define ATOMIC_H
00021
00022 #if defined(X86) || defined(X64) || defined(SPARC32)
00023 #define ret_t char
00024 #define ret32_t char
00025 #define ret64_t char
00026 #else
00027 #define ret_t long
00028 #define ret32_t int
00029 #define ret64_t long long
00030 #endif
00031 #define rc_ret_t ret32_t
00032
00033 #if defined(X86)
00034
00035 static inline int cas32(int volatile * addr,
00036 int expval,
00037 int newval) {
00038 char ret;
00039 __asm__ __volatile__(
00040 "lock; cmpxchg %2, %1; setz %0;\n"
00041 : "=a"(ret), "=m"(*(addr))
00042 : "r" (newval), "a"(expval)
00043 : "cc");
00044 return ret;
00045 }
00046
00047 #define ISYNC
00048 #define LWSYNC
00049 #define HWSYNC
00050
00051 #define RBR
00052 #define RBW
00053 #define WBW
00054 #define WBR asm volatile ("mfence")
00055
00056 #define GET_PC(pc) \
00057 pc = 0xc0dec0de // fake pc - obsolete
00058
00059 #ifdef X86
00060
00061 static inline int cas64(long long volatile * addr,
00062 long long expval,
00063 long long newval) {
00064 long long oldval;
00065 __asm__ __volatile__(
00066 "lock; cmpxchg8b %3"
00067 : "=A"(oldval)
00068 : "b"((int)newval),
00069 "c"((int)(newval >> 32)),
00070 "m"(*(volatile long long *)(addr)),
00071 "0"(expval)
00072 : "memory");
00073 return (oldval == expval);
00074 }
00075
00076 #else
00077
00078 static inline int cas64(long long volatile * addr,
00079 long long expval,
00080 long long newval) {
00081 char ret;
00082 __asm__ __volatile__(
00083 "lock; cmpxchg %2, %1; setz %0;\n"
00084 : "=a"(ret), "=m"(*(addr))
00085 : "r" (newval), "a"(expval)
00086 : "cc");
00087 return ret;
00088 }
00089
00090 #endif
00091
00092 #elif defined(SPARC32)
00093
00094 static inline int cas32(unsigned volatile * addr,
00095 unsigned expval,
00096 unsigned newval) {
00097 asm volatile (
00098 "casa %0 0x80, %2, %1\n"
00099 : "+m"(*(addr)), "+r"(newval)
00100 : "r" (expval)
00101 );
00102 return (newval == expval);
00103 }
00104
00105 #define ISYNC
00106 #define LWSYNC
00107 #define RBR asm volatile ("membar #LoadLoad")
00108 #define RBW asm volatile ("membar #LoadStore")
00109 #define WBW asm volatile ("membar #StoreStore")
00110
00111 #else
00112 #error architecture not defined
00113 #endif
00114
00115 #define CAS32(addr,expval,newval) \
00116 cas32((int *)(addr),(int)(expval),(int)(newval))
00117 #define CAS64(addr,expval,newval) \
00118 cas64((long long *)(addr),(long long)(expval),(long long)(newval))
00119
00120 #if defined(BIT64)
00121 #define CAS(addr,expval,newval) CAS64(addr,expval,newval)
00122 #else // 32-bit
00123 #define CAS(addr,expval,newval) CAS32(addr,expval,newval)
00124 #endif // 64-bit v 32-bit
00125
00126 #define FAA(oldval,addr,val) \
00127 do { \
00128 long newval; \
00129 oldval = *(addr); \
00130 newval = oldval + val; \
00131 int ret = CAS(addr,oldval,newval); \
00132 if (ret) break; \
00133 } while (1);
00134
00135 static void tts_acq(volatile long * lock) {
00136 while (1) {
00137 while (*lock);
00138 int ret = CAS(lock,0,1);
00139 if (ret)
00140 break;
00141 }
00142 ISYNC;
00143 }
00144
00145 static void tts_rel(volatile long * lock) {
00146 LWSYNC;
00147 *lock = 0;
00148 }
00149
00150 #endif