[PATCH v2] includes: Use memory operands for atomic functions

Maarten Lankhorst m.b.lankhorst at gmail.com
Thu Sep 29 05:25:05 CDT 2011


Signed-off-by: Maarten Lankhorst <m.b.lankhorst at gmail.com>

---
Uses the + operand to mark memory as input and output, fixing the issues with v1.

Linux kernel is doing the same, so if this didn't work, you wouldn't be running linux. ;)

diff --git a/include/winbase.h b/include/winbase.h
index a37d073..1f6afaf 100644
--- a/include/winbase.h
+++ b/include/winbase.h
@@ -2355,24 +2355,24 @@ extern WCHAR * CDECL wine_get_dos_file_name( LPCSTR str );
 static FORCEINLINE LONG WINAPI InterlockedCompareExchange( LONG volatile *dest, LONG xchg, LONG compare )
 {
     LONG ret;
-    __asm__ __volatile__( "lock; cmpxchgl %2,(%1)"
-                          : "=a" (ret) : "r" (dest), "r" (xchg), "0" (compare) : "memory" );
+    __asm__ __volatile__( "lock; cmpxchgl %2,%1"
+                          : "=a" (ret), "+m" (*dest) : "r" (xchg), "0" (compare) : "memory" );
     return ret;
 }
 
 static FORCEINLINE LONG WINAPI InterlockedExchange( LONG volatile *dest, LONG val )
 {
     LONG ret;
-    __asm__ __volatile__( "lock; xchgl %0,(%1)"
-                          : "=r" (ret) :"r" (dest), "0" (val) : "memory" );
+    __asm__ __volatile__( "lock; xchgl %0,%1"
+                          : "=r" (ret), "+m" (*dest) : "0" (val) : "memory" );
     return ret;
 }
 
 static FORCEINLINE LONG WINAPI InterlockedExchangeAdd( LONG volatile *dest, LONG incr )
 {
     LONG ret;
-    __asm__ __volatile__( "lock; xaddl %0,(%1)"
-                          : "=r" (ret) : "r" (dest), "0" (incr) : "memory" );
+    __asm__ __volatile__( "lock; xaddl %0,%1"
+                          : "=r" (ret), "+m" (*dest) : "0" (incr) : "memory" );
     return ret;
 }
 
@@ -2414,8 +2414,8 @@ static FORCEINLINE LONG WINAPI InterlockedCompareExchange( LONG volatile *dest,
 {
 #if defined(__x86_64__) && defined(__GNUC__)
     LONG ret;
-    __asm__ __volatile__( "lock; cmpxchgl %2,(%1)"
-                          : "=a" (ret) : "r" (dest), "r" (xchg), "0" (compare) : "memory" );
+    __asm__ __volatile__( "lock; cmpxchgl %2,%1"
+                          : "=a" (ret), "+m" (*dest) : "r" (xchg), "0" (compare) : "memory" );
     return ret;
 #else
     extern int interlocked_cmpxchg( int *dest, int xchg, int compare );
@@ -2427,8 +2427,8 @@ static FORCEINLINE PVOID WINAPI InterlockedCompareExchangePointer( PVOID volatil
 {
 #if defined(__x86_64__) && defined(__GNUC__)
     PVOID ret;
-    __asm__ __volatile__( "lock; cmpxchgq %2,(%1)"
-                          : "=a" (ret) : "r" (dest), "r" (xchg), "0" (compare) : "memory" );
+    __asm__ __volatile__( "lock; cmpxchgq %2,%1"
+                          : "=a" (ret), "+m" (*dest) : "r" (xchg), "0" (compare) : "memory" );
     return ret;
 #else
     extern void *interlocked_cmpxchg_ptr( void **dest, void *xchg, void *compare );
@@ -2440,8 +2440,8 @@ static FORCEINLINE LONGLONG WINAPI InterlockedCompareExchange64( LONGLONG volati
 {
 #if defined(__x86_64__) && defined(__GNUC__)
     LONGLONG ret;
-    __asm__ __volatile__( "lock; cmpxchgq %2,(%1)"
-                          : "=a" (ret) : "r" (dest), "r" (xchg), "0" (compare) : "memory" );
+    __asm__ __volatile__( "lock; cmpxchgq %2,%1"
+                          : "=a" (ret), "+m" (*dest) : "r" (xchg), "0" (compare) : "memory" );
     return ret;
 #else
     extern __int64 interlocked_cmpxchg64( __int64 *dest, __int64 xchg, __int64 compare );
@@ -2453,8 +2453,8 @@ static FORCEINLINE LONG WINAPI InterlockedExchange( LONG volatile *dest, LONG va
 {
 #if defined(__x86_64__) && defined(__GNUC__)
     LONG ret;
-    __asm__ __volatile__( "lock; xchgl %0,(%1)"
-                          : "=r" (ret) :"r" (dest), "0" (val) : "memory" );
+    __asm__ __volatile__( "lock; xchgl %0,%1"
+                          : "=r" (ret), "+m" (*dest) : "0" (val) : "memory" );
     return ret;
 #else
     extern int interlocked_xchg( int *dest, int val );
@@ -2466,8 +2466,8 @@ static FORCEINLINE PVOID WINAPI InterlockedExchangePointer( PVOID volatile *dest
 {
 #if defined(__x86_64__) && defined(__GNUC__)
     PVOID ret;
-    __asm__ __volatile__( "lock; xchgq %0,(%1)"
-                          : "=r" (ret) :"r" (dest), "0" (val) : "memory" );
+    __asm__ __volatile__( "lock; xchgq %0,%1"
+                          : "=r" (ret), "+m" (*dest) : "0" (val) : "memory" );
     return ret;
 #else
     extern void *interlocked_xchg_ptr( void **dest, void *val );
@@ -2479,8 +2479,8 @@ static FORCEINLINE LONG WINAPI InterlockedExchangeAdd( LONG volatile *dest, LONG
 {
 #if defined(__x86_64__) && defined(__GNUC__)
     LONG ret;
-    __asm__ __volatile__( "lock; xaddl %0,(%1)"
-                          : "=r" (ret) : "r" (dest), "0" (incr) : "memory" );
+    __asm__ __volatile__( "lock; xaddl %0,%1"
+                          : "=r" (ret), "+m" (*dest) : "0" (incr) : "memory" );
     return ret;
 #else
     extern int interlocked_xchg_add( int *dest, int incr );
diff --git a/include/wine/port.h b/include/wine/port.h
index 8281653..df14ef9 100644
--- a/include/wine/port.h
+++ b/include/wine/port.h
@@ -356,8 +356,8 @@ extern __int64 interlocked_cmpxchg64( __int64 *dest, __int64 xchg, __int64 compa
 static inline int interlocked_cmpxchg( int *dest, int xchg, int compare )
 {
     int ret;
-    __asm__ __volatile__( "lock; cmpxchgl %2,(%1)"
-                          : "=a" (ret) : "r" (dest), "r" (xchg), "0" (compare) : "memory" );
+    __asm__ __volatile__( "lock; cmpxchgl %2,%1"
+                          : "=a" (ret), "+m" (*dest) : "r" (xchg), "0" (compare) : "memory" );
     return ret;
 }
 
@@ -365,11 +365,11 @@ static inline void *interlocked_cmpxchg_ptr( void **dest, void *xchg, void *comp
 {
     void *ret;
 #ifdef __x86_64__
-    __asm__ __volatile__( "lock; cmpxchgq %2,(%1)"
-                          : "=a" (ret) : "r" (dest), "r" (xchg), "0" (compare) : "memory" );
+    __asm__ __volatile__( "lock; cmpxchgq %2,%1"
+                          : "=a" (ret), "+m" (*dest) : "r" (xchg), "0" (compare) : "memory" );
 #else
-    __asm__ __volatile__( "lock; cmpxchgl %2,(%1)"
-                          : "=a" (ret) : "r" (dest), "r" (xchg), "0" (compare) : "memory" );
+    __asm__ __volatile__( "lock; cmpxchgl %2,%1"
+                          : "=a" (ret), "+m" (*dest) : "r" (xchg), "0" (compare) : "memory" );
 #endif
     return ret;
 }
@@ -377,8 +377,8 @@ static inline void *interlocked_cmpxchg_ptr( void **dest, void *xchg, void *comp
 static inline int interlocked_xchg( int *dest, int val )
 {
     int ret;
-    __asm__ __volatile__( "lock; xchgl %0,(%1)"
-                          : "=r" (ret) : "r" (dest), "0" (val) : "memory" );
+    __asm__ __volatile__( "lock; xchgl %0,%1"
+                          : "=r" (ret), "+m" (*dest) : "0" (val) : "memory" );
     return ret;
 }
 
@@ -386,11 +386,11 @@ static inline void *interlocked_xchg_ptr( void **dest, void *val )
 {
     void *ret;
 #ifdef __x86_64__
-    __asm__ __volatile__( "lock; xchgq %0,(%1)"
-                          : "=r" (ret) :"r" (dest), "0" (val) : "memory" );
+    __asm__ __volatile__( "lock; xchgq %0,%1"
+                          : "=r" (ret), "+m" (*dest) : "0" (val) : "memory" );
 #else
-    __asm__ __volatile__( "lock; xchgl %0,(%1)"
-                          : "=r" (ret) : "r" (dest), "0" (val) : "memory" );
+    __asm__ __volatile__( "lock; xchgl %0,%1"
+                          : "=r" (ret), "+m" (*dest) : "0" (val) : "memory" );
 #endif
     return ret;
 }
@@ -398,8 +398,8 @@ static inline void *interlocked_xchg_ptr( void **dest, void *val )
 static inline int interlocked_xchg_add( int *dest, int incr )
 {
     int ret;
-    __asm__ __volatile__( "lock; xaddl %0,(%1)"
-                          : "=r" (ret) : "r" (dest), "0" (incr) : "memory" );
+    __asm__ __volatile__( "lock; xaddl %0,%1"
+                          : "=r" (ret), "+m" (*dest) : "0" (incr) : "memory" );
     return ret;
 }
 
@@ -408,11 +408,10 @@ static inline unsigned char interlocked_cmpxchg128( __int64 *dest, __int64 xchg_
                                                     __int64 xchg_low, __int64 *compare )
 {
     unsigned char ret;
-    __asm__ __volatile__( "lock cmpxchg16b %0; setz %b2"
-                          : "=m" (dest[0]), "=m" (dest[1]), "=r" (ret),
-                            "=a" (compare[0]), "=d" (compare[1])
-                          : "m" (dest[0]), "m" (dest[1]), "3" (compare[0]), "4" (compare[1]),
-                            "c" (xchg_high), "b" (xchg_low) );
+    __asm__ __volatile__( "lock cmpxchg16b %1; setz %b0"
+                          : "=r" (ret), "+m" (dest[0]), "+m" (dest[1]),
+                            "+a" (compare[0]), "+d" (compare[1])
+                          : "c" (xchg_high), "b" (xchg_low) : "memory" );
     return ret;
 }
 #endif





More information about the wine-patches mailing list