add memory barriers where appropriate in compile.cpp

This commit is contained in:
Joel Dice 2009-03-02 18:40:06 -07:00
parent 5f0b40e531
commit 8c9d625f8f
4 changed files with 44 additions and 11 deletions

View File

@ -163,6 +163,11 @@ methodForIp(MyThread* t, void* ip)
fprintf(stderr, "query for method containing %p\n", ip);
}
// we must use a version of the method tree at least as recent as the
// compiled form of the method containing the specified address (see
// compile(MyThread*, Allocator*, BootContext*, object)):
memoryBarrier();
return treeQuery(t, methodTree(t), reinterpret_cast<intptr_t>(ip),
methodTreeSentinal(t), compareIpToMethodBounds);
}
@ -4189,11 +4194,7 @@ finish(MyThread* t, Allocator* allocator, Context* context)
(&byteArrayBody(t, methodName(t, context->method), 0)),
"printStackTrace") == 0)
{
#ifdef __POWERPC__
asm("trap");
#else
asm("int3");
#endif
trap();
}
return start;
@ -5377,6 +5378,11 @@ findCallNode(MyThread* t, void* address)
fprintf(stderr, "find call node %p\n", address);
}
// we must use a version of the call table at least as recent as the
// compiled form of the method containing the specified address (see
// compile(MyThread*, Allocator*, BootContext*, object)):
memoryBarrier();
MyProcessor* p = processor(t);
object table = p->callTable;
@ -5961,6 +5967,8 @@ compile(MyThread* t, Allocator* allocator, BootContext* bootContext,
reinterpret_cast<intptr_t>(compiled), clone, methodTreeSentinal(t),
compareIpToMethodBounds);
memoryBarrier();
methodCompiled(t, method) = reinterpret_cast<intptr_t>(compiled);
if (methodVirtual(t, method)) {

View File

@ -790,6 +790,10 @@ handleSignal(int signal, siginfo_t* info, void* context)
{
ucontext_t* c = static_cast<ucontext_t*>(context);
#ifndef BASE_REGISTER
# define BASE_REGISTER(x) 0
#endif
void* ip = reinterpret_cast<void*>(IP_REGISTER(c));
void* base = reinterpret_cast<void*>(BASE_REGISTER(c));
void* stack = reinterpret_cast<void*>(STACK_REGISTER(c));

View File

@ -17,20 +17,17 @@
#ifdef __APPLE__
# if __DARWIN_UNIX03 && defined(_STRUCT_X86_EXCEPTION_STATE32)
# define IP_REGISTER(context) (context->uc_mcontext->__ss.__srr0)
# define BASE_REGISTER(context) (context->uc_mcontext->__ss.__r13)
# define STACK_REGISTER(context) (context->uc_mcontext->__ss.__r1)
# define THREAD_REGISTER(context) (context->uc_mcontext->__ss.__r14)
# define THREAD_REGISTER(context) (context->uc_mcontext->__ss.__r13)
# else
# define IP_REGISTER(context) (context->uc_mcontext->ss.srr0)
# define BASE_REGISTER(context) (context->uc_mcontext->ss.r13)
# define STACK_REGISTER(context) (context->uc_mcontext->ss.r1)
# define THREAD_REGISTER(context) (context->uc_mcontext->ss.r14)
# define THREAD_REGISTER(context) (context->uc_mcontext->ss.r13)
# endif
#else
# define IP_REGISTER(context) (context->uc_mcontext.gregs[32])
# define BASE_REGISTER(context) (context->uc_mcontext.gregs[13])
# define STACK_REGISTER(context) (context->uc_mcontext.gregs[1])
# define THREAD_REGISTER(context) (context->uc_mcontext.gregs[14])
# define THREAD_REGISTER(context) (context->uc_mcontext.gregs[13])
#endif
extern "C" uint64_t
@ -40,6 +37,18 @@ vmNativeCall(void* function, unsigned stackTotal, void* memoryTable,
namespace vm {
inline void
trap()
{
asm("trap");
}
inline void
memoryBarrier()
{
__asm__ __volatile__("sync": : :"memory");
}
inline uint64_t
dynamicCall(void* function, uintptr_t* arguments, uint8_t* argumentTypes,
unsigned argumentCount, unsigned argumentsSize,

View File

@ -41,6 +41,18 @@ vmNativeCall(void* function, void* stack, unsigned stackSize,
namespace vm {
inline void
trap()
{
asm("int3");
}
inline void
memoryBarrier()
{
__asm__ __volatile__("": : :"memory");
}
inline uint64_t
dynamicCall(void* function, uintptr_t* arguments, uint8_t*,
unsigned, unsigned argumentsSize, unsigned returnType)