diff --git a/src/assembler.h b/src/assembler.h index e597554ee8..8369c59064 100644 --- a/src/assembler.h +++ b/src/assembler.h @@ -16,8 +16,10 @@ enum UnaryOperation { Push, Pop, Call, + LongCall, AlignedCall, Jump, + LongJump, JumpIfLess, JumpIfGreater, JumpIfLessOrEqual, diff --git a/src/compile.cpp b/src/compile.cpp index 8e70d23947..5718a06195 100644 --- a/src/compile.cpp +++ b/src/compile.cpp @@ -5033,7 +5033,7 @@ compileThunks(MyThread* t, MyProcessor* p) defaultContext.promise.value_ = reinterpret_cast(compileMethod); Assembler::Constant proc(&(defaultContext.promise)); - a->apply(Call, BytesPerWord, ConstantOperand, &proc); + a->apply(LongCall, BytesPerWord, ConstantOperand, &proc); popThread(t, a); @@ -5052,7 +5052,7 @@ compileThunks(MyThread* t, MyProcessor* p) nativeContext.promise.value_ = reinterpret_cast(invokeNative); Assembler::Constant proc(&(nativeContext.promise)); - a->apply(Call, BytesPerWord, ConstantOperand, &proc); + a->apply(LongCall, BytesPerWord, ConstantOperand, &proc); popThread(t, a); @@ -5071,7 +5071,7 @@ compileThunks(MyThread* t, MyProcessor* p) (throwArrayIndexOutOfBounds); Assembler::Constant proc(&(aioobContext.promise)); - a->apply(Call, BytesPerWord, ConstantOperand, &proc); + a->apply(LongCall, BytesPerWord, ConstantOperand, &proc); } ThunkContext tableContext(t); @@ -5081,7 +5081,7 @@ compileThunks(MyThread* t, MyProcessor* p) saveStackAndBase(t, a); Assembler::Constant proc(&(tableContext.promise)); - a->apply(Jump, BytesPerWord, ConstantOperand, &proc); + a->apply(LongJump, BytesPerWord, ConstantOperand, &proc); } p->thunkSize = pad(tableContext.context.assembler->length()); diff --git a/src/posix.cpp b/src/posix.cpp index 6b0c63739f..e705245bf1 100644 --- a/src/posix.cpp +++ b/src/posix.cpp @@ -71,23 +71,25 @@ const int signals[] = { VisitSignal, SegFaultSignal, InterruptSignal }; # define BASE_REGISTER(context) (context->uc_mcontext.gregs[REG_RBP]) # define STACK_REGISTER(context) (context->uc_mcontext.gregs[REG_RSP]) # define THREAD_REGISTER(context) (context->uc_mcontext.gregs[REG_RBX]) -#elif defined __APPLE__ -# if __DARWIN_UNIX03 && defined(_STRUCT_X86_EXCEPTION_STATE32) -# define IP_REGISTER(context) (context->uc_mcontext->__ss.__eip) -# define BASE_REGISTER(context) (context->uc_mcontext->__ss.__ebp) -# define STACK_REGISTER(context) (context->uc_mcontext->__ss.__esp) -# define THREAD_REGISTER(context) (context->uc_mcontext->__ss.__ebx) -# else -# define IP_REGISTER(context) (context->uc_mcontext->ss.eip) -# define BASE_REGISTER(context) (context->uc_mcontext->ss.ebp) -# define STACK_REGISTER(context) (context->uc_mcontext->ss.esp) -# define THREAD_REGISTER(context) (context->uc_mcontext->ss.ebx) -# endif #elif defined __i386__ -# define IP_REGISTER(context) (context->uc_mcontext.gregs[REG_EIP]) -# define BASE_REGISTER(context) (context->uc_mcontext.gregs[REG_EBP]) -# define STACK_REGISTER(context) (context->uc_mcontext.gregs[REG_ESP]) -# define THREAD_REGISTER(context) (context->uc_mcontext.gregs[REG_EBX]) +# ifdef __APPLE__ +# if __DARWIN_UNIX03 && defined(_STRUCT_X86_EXCEPTION_STATE32) +# define IP_REGISTER(context) (context->uc_mcontext->__ss.__eip) +# define BASE_REGISTER(context) (context->uc_mcontext->__ss.__ebp) +# define STACK_REGISTER(context) (context->uc_mcontext->__ss.__esp) +# define THREAD_REGISTER(context) (context->uc_mcontext->__ss.__ebx) +# else +# define IP_REGISTER(context) (context->uc_mcontext->ss.eip) +# define BASE_REGISTER(context) (context->uc_mcontext->ss.ebp) +# define STACK_REGISTER(context) (context->uc_mcontext->ss.esp) +# define THREAD_REGISTER(context) (context->uc_mcontext->ss.ebx) +# endif +# else +# define IP_REGISTER(context) (context->uc_mcontext.gregs[REG_EIP]) +# define BASE_REGISTER(context) (context->uc_mcontext.gregs[REG_EBP]) +# define STACK_REGISTER(context) (context->uc_mcontext.gregs[REG_ESP]) +# define THREAD_REGISTER(context) (context->uc_mcontext.gregs[REG_EBX]) +# endif #else # error unsupported architecture #endif @@ -560,8 +562,14 @@ class MySystem: public System { virtual void* tryAllocateExecutable(unsigned sizeInBytes) { assert(this, sizeInBytes % LikelyPageSizeInBytes == 0); +#ifdef __x86_64__ + const unsigned Extra = MAP_32BIT; +#else + const unsigned Extra = 0; +#endif + void* p = mmap(0, sizeInBytes, PROT_EXEC | PROT_READ | PROT_WRITE, - MAP_PRIVATE | MAP_ANON, -1, 0); + MAP_PRIVATE | MAP_ANON | Extra, -1, 0); if (p == MAP_FAILED) { return 0; diff --git a/src/x86.cpp b/src/x86.cpp index 75334f5b84..07a2ea5ab7 100644 --- a/src/x86.cpp +++ b/src/x86.cpp @@ -305,26 +305,28 @@ callC(Context* c, unsigned size, Assembler::Constant* a) { assert(c, size == BytesPerWord); + unconditional(c, 0xe8, a); +} + +void +longCallC(Context* c, unsigned size, Assembler::Constant* a) +{ + assert(c, size == BytesPerWord); + if (BytesPerWord == 8) { Assembler::Register r(r10); moveCR(c, size, a, &r); callR(c, size, &r); } else { - unconditional(c, 0xe8, a); + callC(c, size, a); } } void alignedCallC(Context* c, unsigned size, Assembler::Constant* a) { - if (BytesPerWord == 8) { - while ((c->code.length() + 2) % 8) { - c->code.append(0x90); - } - } else { - while ((c->code.length() + 1) % 4) { - c->code.append(0x90); - } + while ((c->code.length() + 1) % 4) { + c->code.append(0x90); } callC(c, size, a); } @@ -362,12 +364,20 @@ jumpC(Context* c, unsigned size, Assembler::Constant* a) { assert(c, size == BytesPerWord); + unconditional(c, 0xe9, a); +} + +void +longJumpC(Context* c, unsigned size, Assembler::Constant* a) +{ + assert(c, size == BytesPerWord); + if (BytesPerWord == 8) { Assembler::Register r(r10); moveCR(c, size, a, &r); jumpR(c, size, &r); } else { - unconditional(c, 0xe9, a); + jumpC(c, size, a); } } @@ -1776,12 +1786,16 @@ populateTables() UnaryOperations[INDEX1(Call, Register)] = CAST1(callR); UnaryOperations[INDEX1(Call, Memory)] = CAST1(callM); + UnaryOperations[INDEX1(LongCall, Constant)] = CAST1(longCallC); + UnaryOperations[INDEX1(AlignedCall, Constant)] = CAST1(alignedCallC); - UnaryOperations[INDEX1(Jump, Register)] = CAST1(jumpR); UnaryOperations[INDEX1(Jump, Constant)] = CAST1(jumpC); + UnaryOperations[INDEX1(Jump, Register)] = CAST1(jumpR); UnaryOperations[INDEX1(Jump, Memory)] = CAST1(jumpM); + UnaryOperations[INDEX1(LongJump, Constant)] = CAST1(longJumpC); + UnaryOperations[INDEX1(JumpIfEqual, Constant)] = CAST1(jumpIfEqualC); UnaryOperations[INDEX1(JumpIfNotEqual, Constant)] = CAST1(jumpIfNotEqualC); UnaryOperations[INDEX1(JumpIfGreater, Constant)] = CAST1(jumpIfGreaterC); @@ -2062,26 +2076,13 @@ class MyAssembler: public Assembler { } virtual void updateCall(void* returnAddress, void* newTarget) { - if (BytesPerWord == 8) { - uint8_t* instruction = static_cast(returnAddress) - 13; - assert(&c, instruction[0] == 0x49); - assert(&c, instruction[1] == 0xba); - assert(&c, instruction[10] == 0x41); - assert(&c, instruction[11] == 0xff); - assert(&c, instruction[12] == 0xd2); - assert(&c, reinterpret_cast(instruction + 2) % 8 == 0); + uint8_t* instruction = static_cast(returnAddress) - 5; + assert(&c, *instruction == 0xE8); + assert(&c, reinterpret_cast(instruction + 1) % 4 == 0); - intptr_t v = reinterpret_cast(newTarget); - memcpy(instruction + 2, &v, 8); - } else { - uint8_t* instruction = static_cast(returnAddress) - 5; - assert(&c, *instruction == 0xE8); - assert(&c, reinterpret_cast(instruction + 1) % 4 == 0); - - int32_t v = static_cast(newTarget) - - static_cast(returnAddress); - memcpy(instruction + 1, &v, 4); - } + int32_t v = static_cast(newTarget) + - static_cast(returnAddress); + memcpy(instruction + 1, &v, 4); } virtual void dispose() {