From 3777c9b429d977fe56864aaf907d651514af0e6e Mon Sep 17 00:00:00 2001 From: Joel Dice Date: Wed, 2 Dec 2009 08:49:10 -0700 Subject: [PATCH] fix MSVC build --- src/compile.cpp | 4 ++-- src/compiler.cpp | 2 +- src/x86.h | 35 ++++++++++++++++++++--------------- 3 files changed, 23 insertions(+), 18 deletions(-) diff --git a/src/compile.cpp b/src/compile.cpp index c4dd0da4b9..0434215f42 100644 --- a/src/compile.cpp +++ b/src/compile.cpp @@ -2955,8 +2955,8 @@ intrinsic(MyThread* t, Frame* frame, object target) { #define MATCH(name, constant) \ (byteArrayLength(t, name) == sizeof(constant) \ - and strcmp(reinterpret_cast(&byteArrayBody(t, name, 0)), \ - constant) == 0) + and ::strcmp(reinterpret_cast(&byteArrayBody(t, name, 0)), \ + constant) == 0) object className = vm::className(t, methodClass(t, target)); if (UNLIKELY(MATCH(className, "java/lang/Math"))) { diff --git a/src/compiler.cpp b/src/compiler.cpp index 5721683799..c13d00a222 100644 --- a/src/compiler.cpp +++ b/src/compiler.cpp @@ -3212,7 +3212,7 @@ class CallEvent: public Event { } for (unsigned i = 0; i < stackArgumentFootprint; ++i) { - Value* v = arguments[i]; + Value* v = RUNTIME_ARRAY_BODY(arguments)[i]; if (v) { int frameIndex = i + frameOffset; diff --git a/src/x86.h b/src/x86.h index 4ae08f6859..f9d3c4f559 100644 --- a/src/x86.h +++ b/src/x86.h @@ -16,6 +16,7 @@ #ifdef _MSC_VER # include "windows.h" +# undef interface #endif #ifdef ARCH_x86_32 @@ -155,7 +156,23 @@ trap() } inline void -memoryBarrier() +programOrderMemoryBarrier() +{ +#ifdef _MSC_VER + MemoryBarrier(); +#else + __asm__ __volatile__("": : :"memory"); +#endif +} + +inline void +storeStoreMemoryBarrier() +{ + programOrderMemoryBarrier(); +} + +inline void +storeLoadMemoryBarrier() { #ifdef _MSC_VER MemoryBarrier(); @@ -166,28 +183,16 @@ memoryBarrier() #endif // ARCH_x86_64 } -inline void -storeStoreMemoryBarrier() -{ - __asm__ __volatile__("": : :"memory"); -} - -inline void -storeLoadMemoryBarrier() -{ - memoryBarrier(); -} - inline void loadMemoryBarrier() { - __asm__ __volatile__("": : :"memory"); + programOrderMemoryBarrier(); } inline void syncInstructionCache(const void*, unsigned) { - __asm__ __volatile__("": : :"memory"); + programOrderMemoryBarrier(); } #ifdef USE_ATOMIC_OPERATIONS