This code should now build the x86_x64-softmmu part 2.
This commit is contained in:
@@ -18,7 +18,12 @@
|
||||
/* For C11 atomic ops */
|
||||
|
||||
/* Compiler barrier */
|
||||
#ifdef _MSC_VER
|
||||
// TODO: fix me!!!
|
||||
#define barrier() //{ __asm volatile("" ::: "memory"); (void)0; }
|
||||
#else
|
||||
#define barrier() ({ asm volatile("" ::: "memory"); (void)0; })
|
||||
#endif
|
||||
|
||||
#ifndef __ATOMIC_RELAXED
|
||||
|
||||
@@ -31,9 +36,19 @@
|
||||
#if defined(__i386__) || defined(__x86_64__)
|
||||
#if !QEMU_GNUC_PREREQ(4, 4)
|
||||
#if defined __x86_64__
|
||||
#define smp_mb() ({ asm volatile("mfence" ::: "memory"); (void)0; })
|
||||
# ifdef _MSC_VER
|
||||
// TODO: fix me!!!
|
||||
# define smp_mb() //{ __asm volatile("mfence" ::: "memory"); (void)0; }
|
||||
# else
|
||||
# define smp_mb() ({ asm volatile("mfence" ::: "memory"); (void)0; })
|
||||
# endif
|
||||
#else
|
||||
#define smp_mb() ({ asm volatile("lock; addl $0,0(%%esp) " ::: "memory"); (void)0; })
|
||||
# ifdef _MSC_VER
|
||||
// TODO: fix me!!!
|
||||
# define smp_mb() //{ __asm volatile("lock; addl $0,0(%esp) " ::: "memory"); (void)0; }
|
||||
# else
|
||||
# define smp_mb() ({ asm volatile("lock; addl $0,0(%%esp) " ::: "memory"); (void)0; })
|
||||
# endif
|
||||
#endif
|
||||
#endif
|
||||
#endif
|
||||
@@ -183,6 +198,19 @@
|
||||
#endif
|
||||
|
||||
/* Provide shorter names for GCC atomic builtins. */
|
||||
#ifdef _MSC_VER
|
||||
#ifdef _WIN64
|
||||
#define atomic_fetch_inc(ptr) InterlockedIncrement64(ptr)
|
||||
#define atomic_fetch_dec(ptr) InterlockedDecrement64(ptr)
|
||||
#define atomic_fetch_add(ptr, n) InterlockedAdd64(ptr, n)
|
||||
#define atomic_fetch_sub(ptr, n) InterlockedAdd64(ptr, -n)
|
||||
#else
|
||||
#define atomic_fetch_inc(ptr) InterlockedIncrement(ptr)
|
||||
#define atomic_fetch_dec(ptr) InterlockedDecrement(ptr)
|
||||
#define atomic_fetch_add(ptr, n) InterlockedAdd(ptr, n)
|
||||
#define atomic_fetch_sub(ptr, n) InterlockedAdd(ptr, -n)
|
||||
#endif
|
||||
#else
|
||||
#define atomic_fetch_inc(ptr) __sync_fetch_and_add(ptr, 1)
|
||||
#define atomic_fetch_dec(ptr) __sync_fetch_and_add(ptr, -1)
|
||||
#define atomic_fetch_add __sync_fetch_and_add
|
||||
@@ -190,13 +218,28 @@
|
||||
#define atomic_fetch_and __sync_fetch_and_and
|
||||
#define atomic_fetch_or __sync_fetch_and_or
|
||||
#define atomic_cmpxchg __sync_val_compare_and_swap
|
||||
#endif
|
||||
|
||||
/* And even shorter names that return void. */
|
||||
#ifdef _MSC_VER
|
||||
#ifdef _WIN64
|
||||
#define atomic_inc(ptr) ((void) InterlockedIncrement64(ptr))
|
||||
#define atomic_dec(ptr) ((void) InterlockedDecrement64(ptr))
|
||||
#define atomic_add(ptr, n) ((void) InterlockedAdd64(ptr, n))
|
||||
#define atomic_sub(ptr, n) ((void) InterlockedAdd64(ptr, -n))
|
||||
#else
|
||||
#define atomic_inc(ptr) ((void) InterlockedIncrement(ptr))
|
||||
#define atomic_dec(ptr) ((void) InterlockedDecrement(ptr))
|
||||
#define atomic_add(ptr, n) ((void) InterlockedAdd(ptr, n))
|
||||
#define atomic_sub(ptr, n) ((void) InterlockedAdd(ptr, -n))
|
||||
#endif
|
||||
#else
|
||||
#define atomic_inc(ptr) ((void) __sync_fetch_and_add(ptr, 1))
|
||||
#define atomic_dec(ptr) ((void) __sync_fetch_and_add(ptr, -1))
|
||||
#define atomic_add(ptr, n) ((void) __sync_fetch_and_add(ptr, n))
|
||||
#define atomic_sub(ptr, n) ((void) __sync_fetch_and_sub(ptr, n))
|
||||
#define atomic_and(ptr, n) ((void) __sync_fetch_and_and(ptr, n))
|
||||
#define atomic_or(ptr, n) ((void) __sync_fetch_and_or(ptr, n))
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
||||
Reference in New Issue
Block a user