more work on tail recursion

We now create a unique thunk for each vtable position so as to avoid
relying on using the return address to determine what method is to be
compiled and invoked, since we will not have the correct return address
in the case of a tail call.  This required refactoring how executable
memory is allocated in order to keep AOT compilation working.  Also, we
must always use the same register to hold the class pointer when
compiling virtual calls, and ensure that the pointer stays there until
the call instruction is executed so we know where to find it in the
thunk.
This commit is contained in:
Joel Dice 2009-04-05 15:42:10 -06:00
parent 5e740170f2
commit fea92ed995
11 changed files with 347 additions and 140 deletions

View File

@ -274,6 +274,8 @@ class Assembler {
virtual unsigned argumentRegisterCount() = 0; virtual unsigned argumentRegisterCount() = 0;
virtual int argumentRegister(unsigned index) = 0; virtual int argumentRegister(unsigned index) = 0;
virtual bool matchCall(void* returnAddress, void* target) = 0;
virtual void updateCall(UnaryOperation op, bool assertAlignment, virtual void updateCall(UnaryOperation op, bool assertAlignment,
void* returnAddress, void* newTarget) = 0; void* returnAddress, void* newTarget) = 0;

View File

@ -37,7 +37,7 @@ makeCodeImage(Thread* t, Zone* zone, BootImage* image, uint8_t* code,
const char* methodName, const char* methodSpec) const char* methodName, const char* methodSpec)
{ {
unsigned size = 0; unsigned size = 0;
t->m->processor->compileThunks(t, image, code, &size, capacity); t->m->processor->initialize(t, image, code, &size, capacity);
object constants = 0; object constants = 0;
PROTECT(t, constants); PROTECT(t, constants);
@ -79,8 +79,7 @@ makeCodeImage(Thread* t, Zone* zone, BootImage* image, uint8_t* code,
== 0))) == 0)))
{ {
t->m->processor->compileMethod t->m->processor->compileMethod
(t, zone, code, &size, capacity, &constants, &calls, &addresses, (t, zone, &constants, &calls, &addresses, method);
method);
} }
} }
} }

View File

@ -39,6 +39,7 @@ class BootImage {
unsigned types; unsigned types;
unsigned methodTree; unsigned methodTree;
unsigned methodTreeSentinal; unsigned methodTreeSentinal;
unsigned virtualThunks;
uintptr_t codeBase; uintptr_t codeBase;

View File

@ -39,6 +39,8 @@ const unsigned MaxNativeCallFootprint = 4;
const unsigned InitialZoneCapacityInBytes = 64 * 1024; const unsigned InitialZoneCapacityInBytes = 64 * 1024;
const unsigned ExecutableAreaSizeInBytes = 16 * 1024 * 1024;
class MyThread: public Thread { class MyThread: public Thread {
public: public:
class CallTrace { class CallTrace {
@ -123,6 +125,19 @@ resolveTarget(MyThread* t, void* stack, object method)
} }
} }
object
resolveTarget(MyThread* t, object class_, unsigned index)
{
if (classVmFlags(t, class_) & BootstrapFlag) {
PROTECT(t, class_);
resolveClass(t, className(t, class_));
if (UNLIKELY(t->exception)) return 0;
}
return arrayBody(t, classVirtualTable(t, class_), index);
}
object& object&
methodTree(MyThread* t); methodTree(MyThread* t);
@ -1360,12 +1375,26 @@ tryInitClass(MyThread* t, object class_)
if (UNLIKELY(t->exception)) unwind(t); if (UNLIKELY(t->exception)) unwind(t);
} }
FixedAllocator*
codeAllocator(MyThread* t);
int64_t int64_t
findInterfaceMethodFromInstance(MyThread* t, object method, object instance) findInterfaceMethodFromInstance(MyThread* t, object method, object instance)
{ {
if (instance) { if (instance) {
return methodAddress object target = findInterfaceMethod(t, method, objectClass(t, instance));
(t, findInterfaceMethod(t, method, objectClass(t, instance)));
if (methodAddress(t, target) == defaultThunk(t)) {
PROTECT(t, target);
compile(t, codeAllocator(t), 0, target);
}
if (UNLIKELY(t->exception)) {
unwind(t);
} else {
return methodAddress(t, target);
}
} else { } else {
t->exception = makeNullPointerException(t); t->exception = makeNullPointerException(t);
unwind(t); unwind(t);
@ -3093,7 +3122,7 @@ compile(MyThread* t, Frame* initialFrame, unsigned ip,
3, c->thread(), frame->append(target), 3, c->thread(), frame->append(target),
c->peek(1, instance)), c->peek(1, instance)),
0, 0,
frame->trace(target, TraceElement::VirtualCall), frame->trace(0, 0),
rSize, rSize,
parameterFootprint); parameterFootprint);
@ -3149,15 +3178,32 @@ compile(MyThread* t, Frame* initialFrame, unsigned ip,
unsigned rSize = resultSize(t, methodReturnCode(t, target)); unsigned rSize = resultSize(t, methodReturnCode(t, target));
Compiler::Operand* result = c->stackCall bool tailCall = isTailCall(t, code, ip, context->method, target);
(c->memory
(c->and_ Compiler::Operand* result;
(BytesPerWord, c->constant(PointerMask), if (tailCall and methodParameterFootprint(t, target)
c->memory(instance, 0, 0, 1)), offset, 0, 1), > methodParameterFootprint(t, context->method))
0, {
frame->trace(target, TraceElement::VirtualCall), result = c->stackCall
rSize, (c->constant(tailHelperThunk(t)),
parameterFootprint); 0,
frame->trace(target, TraceElement::VirtualCall),
rSize,
parameterFootprint);
} else {
c->freezeRegister(t->arch->returnLow(),
c->and_(BytesPerWord, c->constant(PointerMask),
c->memory(instance, 0, 0, 1)));
result = c->stackCall
(c->memory(c->register_(t->arch->returnLow()), offset, 0, 1),
tailCall ? Compiler::TailCall : 0,
frame->trace(0, 0),
rSize,
parameterFootprint);
c->thawRegister(t->arch->returnLow());
}
frame->pop(parameterFootprint); frame->pop(parameterFootprint);
@ -4155,9 +4201,6 @@ calculateFrameMaps(MyThread* t, Context* context, uintptr_t* originalRoots,
return eventIndex; return eventIndex;
} }
Zone*
codeZone(MyThread* t);
int int
compareTraceElementPointers(const void* va, const void* vb) compareTraceElementPointers(const void* va, const void* vb)
{ {
@ -4532,32 +4575,21 @@ compileMethod2(MyThread* t, uintptr_t ip)
object target = callNodeTarget(t, node); object target = callNodeTarget(t, node);
PROTECT(t, target); PROTECT(t, target);
if (callNodeFlags(t, node) & TraceElement::VirtualCall) {
target = resolveTarget(t, t->stack, target);
}
if (LIKELY(t->exception == 0)) { if (LIKELY(t->exception == 0)) {
compile(t, codeZone(t), 0, target); compile(t, codeAllocator(t), 0, target);
} }
if (UNLIKELY(t->exception)) { if (UNLIKELY(t->exception)) {
return 0; return 0;
} else { } else {
void* address = reinterpret_cast<void*>(methodAddress(t, target)); void* address = reinterpret_cast<void*>(methodAddress(t, target));
if (callNodeFlags(t, node) & TraceElement::VirtualCall) { uintptr_t updateIp = ip;
classVtable if (callNode(t, node) & TraceElement::TailCall) {
(t, objectClass updateIp -= t->arch->constantCallSize();
(t, resolveThisPointer(t, t->stack, target)), methodOffset(t, target))
= address;
} else {
uintptr_t updateIp = ip;
if (callNode(t, node) & TraceElement::TailCall) {
updateIp -= t->arch->constantCallSize();
}
updateCall
(t, Call, true, reinterpret_cast<void*>(updateIp), address);
} }
updateCall(t, Call, true, reinterpret_cast<void*>(updateIp), address);
return address; return address;
} }
} }
@ -4582,6 +4614,46 @@ compileMethod(MyThread* t)
} }
} }
void*
compileVirtualMethod2(MyThread* t, object class_, unsigned index)
{
PROTECT(t, class_);
object target = resolveTarget(t, class_, index);
if (LIKELY(t->exception == 0)) {
compile(t, codeAllocator(t), 0, target);
}
if (UNLIKELY(t->exception)) {
return 0;
} else {
void* address = reinterpret_cast<void*>(methodAddress(t, target));
if (address != nativeThunk(t, method)) {
classVtable(t, class_, methodOffset(t, target)) = address;
}
return address;
}
}
uint64_t
compileVirtualMethod(MyThread* t)
{
object class_ = t->virtualCallClass;
t->virtualCallClass = 0;
unsigned index = t->virtualCallIndex;
t->virtualCallIndex = 0;
void* r = compileVirtualMethod2(t, class_, index);
if (UNLIKELY(t->exception)) {
unwind(t);
} else {
return reinterpret_cast<uintptr_t>(r);
}
}
void* void*
tailCall2(MyThread* t, uintptr_t ip) tailCall2(MyThread* t, uintptr_t ip)
{ {
@ -4596,7 +4668,7 @@ tailCall2(MyThread* t, uintptr_t ip)
} }
if (LIKELY(t->exception == 0)) { if (LIKELY(t->exception == 0)) {
compile(t, codeZone(t), 0, target); compile(t, codeAllocator(t), 0, target);
} }
if (UNLIKELY(t->exception)) { if (UNLIKELY(t->exception)) {
@ -5200,27 +5272,6 @@ processor(MyThread* t);
class MyProcessor: public Processor { class MyProcessor: public Processor {
public: public:
class CodeAllocator: public Allocator {
public:
CodeAllocator(System* s): s(s) { }
virtual void* tryAllocate(unsigned size) {
return s->tryAllocateExecutable(size);
}
virtual void* allocate(unsigned size) {
void* p = tryAllocate(size);
expect(s, p);
return p;
}
virtual void free(const void* p, unsigned size) {
s->freeExecutable(p, size);
}
System* s;
};
MyProcessor(System* s, Allocator* allocator): MyProcessor(System* s, Allocator* allocator):
s(s), s(s),
allocator(allocator), allocator(allocator),
@ -5233,9 +5284,11 @@ class MyProcessor: public Processor {
methodTreeSentinal(0), methodTreeSentinal(0),
objectPools(0), objectPools(0),
staticTableArray(0), staticTableArray(0),
codeAllocator(s), virtualThunks(0),
codeZone(s, &codeAllocator, 64 * 1024) codeAllocator(s, 0, 0)
{ } {
expect(s, codeAllocator.base);
}
virtual Thread* virtual Thread*
makeThread(Machine* m, object javaThread, Thread* parent) makeThread(Machine* m, object javaThread, Thread* parent)
@ -5292,11 +5345,8 @@ class MyProcessor: public Processor {
virtual void virtual void
initVtable(Thread* t, object c) initVtable(Thread* t, object c)
{ {
void* compiled = reinterpret_cast<void*>
(::defaultThunk(static_cast<MyThread*>(t)));
for (unsigned i = 0; i < classLength(t, c); ++i) { for (unsigned i = 0; i < classLength(t, c); ++i) {
classVtable(t, c, i) = compiled; classVtable(t, c, i) = virtualThunk(static_cast<MyThread*>(t), i);
} }
} }
@ -5399,7 +5449,8 @@ class MyProcessor: public Processor {
PROTECT(t, method); PROTECT(t, method);
compile(static_cast<MyThread*>(t), &codeZone, 0, method); compile(static_cast<MyThread*>(t),
::codeAllocator(static_cast<MyThread*>(t)), 0, method);
if (LIKELY(t->exception == 0)) { if (LIKELY(t->exception == 0)) {
return ::invoke(t, method, &list); return ::invoke(t, method, &list);
@ -5430,7 +5481,8 @@ class MyProcessor: public Processor {
PROTECT(t, method); PROTECT(t, method);
compile(static_cast<MyThread*>(t), &codeZone, 0, method); compile(static_cast<MyThread*>(t),
::codeAllocator(static_cast<MyThread*>(t)), 0, method);
if (LIKELY(t->exception == 0)) { if (LIKELY(t->exception == 0)) {
return ::invoke(t, method, &list); return ::invoke(t, method, &list);
@ -5460,7 +5512,8 @@ class MyProcessor: public Processor {
PROTECT(t, method); PROTECT(t, method);
compile(static_cast<MyThread*>(t), &codeZone, 0, method); compile(static_cast<MyThread*>(t),
::codeAllocator(static_cast<MyThread*>(t)), 0, method);
if (LIKELY(t->exception == 0)) { if (LIKELY(t->exception == 0)) {
return ::invoke(t, method, &list); return ::invoke(t, method, &list);
@ -5483,8 +5536,10 @@ class MyProcessor: public Processor {
} }
virtual void dispose() { virtual void dispose() {
codeZone.dispose(); if (codeAllocator.base) {
s->freeExecutable(codeAllocator.base, codeAllocator.capacity);
}
s->handleSegFault(0); s->handleSegFault(0);
allocator->free(this, sizeof(*this)); allocator->free(this, sizeof(*this));
@ -5551,37 +5606,34 @@ class MyProcessor: public Processor {
return visitor.trace; return visitor.trace;
} }
virtual void compileThunks(Thread* vmt, BootImage* image, uint8_t* code, virtual void initialize(Thread* vmt, BootImage* image, uint8_t* code,
unsigned* offset, unsigned capacity) unsigned capacity)
{ {
MyThread* t = static_cast<MyThread*>(vmt); codeAllocator.base = code;
FixedAllocator allocator(t, code + *offset, capacity); codeAllocator.capacity = capacity;
::compileThunks(t, &allocator, this, image, code); ::compileThunks
(static_cast<MyThread*>(vmt), &codeAllocator, this, image, code);
*offset += allocator.offset;
} }
virtual void compileMethod(Thread* vmt, Zone* zone, uint8_t* code, virtual void compileMethod(Thread* vmt, Zone* zone, object* constants,
unsigned* offset, unsigned capacity, object* calls, DelayedPromise** addresses,
object* constants, object* calls, object method)
DelayedPromise** addresses, object method)
{ {
MyThread* t = static_cast<MyThread*>(vmt); MyThread* t = static_cast<MyThread*>(vmt);
FixedAllocator allocator(t, code + *offset, capacity);
BootContext bootContext(t, *constants, *calls, *addresses, zone); BootContext bootContext(t, *constants, *calls, *addresses, zone);
compile(t, &allocator, &bootContext, method); compile(t, &codeAllocator, &bootContext, method);
*constants = bootContext.constants; *constants = bootContext.constants;
*calls = bootContext.calls; *calls = bootContext.calls;
*addresses = bootContext.addresses; *addresses = bootContext.addresses;
*offset += allocator.offset;
} }
virtual void visitRoots(BootImage* image, HeapWalker* w) { virtual void visitRoots(BootImage* image, HeapWalker* w) {
image->methodTree = w->visitRoot(methodTree); image->methodTree = w->visitRoot(methodTree);
image->methodTreeSentinal = w->visitRoot(methodTreeSentinal); image->methodTreeSentinal = w->visitRoot(methodTreeSentinal);
image->virtualThunks = w->visitRoot(virtualThunks);
} }
virtual unsigned* makeCallTable(Thread* t, BootImage* image, HeapWalker* w, virtual unsigned* makeCallTable(Thread* t, BootImage* image, HeapWalker* w,
@ -5606,6 +5658,9 @@ class MyProcessor: public Processor {
} }
virtual void boot(Thread* t, BootImage* image) { virtual void boot(Thread* t, BootImage* image) {
codeAllocator.base = s->tryAllocateExecutable(ExecutableAreaSizeInBytes);
codeAllocator.capacity = ExecutableAreaSizeInBytes;
if (image) { if (image) {
::boot(static_cast<MyThread*>(t), image); ::boot(static_cast<MyThread*>(t), image);
} else { } else {
@ -5615,7 +5670,7 @@ class MyProcessor: public Processor {
set(t, methodTree, TreeNodeLeft, methodTreeSentinal); set(t, methodTree, TreeNodeLeft, methodTreeSentinal);
set(t, methodTree, TreeNodeRight, methodTreeSentinal); set(t, methodTree, TreeNodeRight, methodTreeSentinal);
::compileThunks(static_cast<MyThread*>(t), &codeZone, this, 0, 0); ::compileThunks(static_cast<MyThread*>(t), &codeAllocator, this, 0, 0);
} }
segFaultHandler.m = t->m; segFaultHandler.m = t->m;
@ -5636,9 +5691,9 @@ class MyProcessor: public Processor {
object methodTreeSentinal; object methodTreeSentinal;
object objectPools; object objectPools;
object staticTableArray; object staticTableArray;
object virtualThunks;
SegFaultHandler segFaultHandler; SegFaultHandler segFaultHandler;
CodeAllocator codeAllocator; FixedAllocator codeAllocator;
Zone codeZone;
}; };
object object
@ -5936,6 +5991,20 @@ fixupThunks(MyThread* t, BootImage* image, uint8_t* code)
#undef THUNK #undef THUNK
} }
void
fixupVirtualThunks(MyThread* t, BootImage* image, uint8_t* code)
{
MyProcessor* p = processor(t);
for (unsigned i = 0; i < wordArrayLength(t, p->virtualThunks); ++i) {
if (wordArrayBody(t, p->virtualThunks, 0)) {
wordArrayBody(t, p->virtualThunks, 0)
= (wordArrayBody(t, p->virtualThunks, 0) - image->codeBase)
+ reinterpret_cast<uintptr_t>(code);
}
}
}
void void
boot(MyThread* t, BootImage* image) boot(MyThread* t, BootImage* image)
{ {
@ -5974,6 +6043,8 @@ boot(MyThread* t, BootImage* image)
p->methodTree = bootObject(heap, image->methodTree); p->methodTree = bootObject(heap, image->methodTree);
p->methodTreeSentinal = bootObject(heap, image->methodTreeSentinal); p->methodTreeSentinal = bootObject(heap, image->methodTreeSentinal);
p->virtualThunks = bootObject(heap, image->virtualThunks);
fixupCode(t, codeMap, codeMapSizeInWords, code, heap); fixupCode(t, codeMap, codeMapSizeInWords, code, heap);
syncInstructionCache(code, image->codeSize); syncInstructionCache(code, image->codeSize);
@ -5991,6 +6062,8 @@ boot(MyThread* t, BootImage* image)
fixupThunks(t, image, code); fixupThunks(t, image, code);
fixupVirtualThunks(t, image, code);
fixupMethods(t, image, code); fixupMethods(t, image, code);
t->m->bootstrapClassMap = makeHashMap(t, 0, 0); t->m->bootstrapClassMap = makeHashMap(t, 0, 0);
@ -6046,6 +6119,38 @@ compileThunks(MyThread* t, Allocator* allocator, MyProcessor* p,
a->endBlock(false)->resolve(0, 0); a->endBlock(false)->resolve(0, 0);
} }
ThunkContext defaultVirtualContext(t, &zone);
{ Assembler* a = defaultVirtualContext.context.assembler;
Assembler::Register class_(t->arch->returnLow());
Assembler::Memory virtualCallClass
(t->arch->thread(), difference(&(t->virtualCallClass), t));
a->apply(Move, BytesPerWord, RegisterOperand, &class_,
BytesPerWord, MemoryOperand, &virtualCallClass);
Assembler::Register index(t->arch->returnHigh());
Assembler::Memory virtualCallIndex
(t->arch->thread(), difference(&(t->virtualCallIndex), t));
a->apply(Move, BytesPerWord, RegisterOperand, &index,
BytesPerWord, MemoryOperand, &virtualCallIndex);
a->saveFrame(difference(&(t->stack), t), difference(&(t->base), t));
Assembler::Register thread(t->arch->thread());
a->pushFrame(1, BytesPerWord, RegisterOperand, &thread);
Assembler::Constant proc(&(defaultVirtualContext.promise));
a->apply(LongCall, BytesPerWord, ConstantOperand, &proc);
a->popFrame();
Assembler::Register result(t->arch->returnLow());
a->apply(Jump, BytesPerWord, RegisterOperand, &result);
a->endBlock(false)->resolve(0, 0);
}
ThunkContext tailHelperContext(t, &zone); ThunkContext tailHelperContext(t, &zone);
{ Assembler* a = tailHelperContext.context.assembler; { Assembler* a = tailHelperContext.context.assembler;
@ -6119,18 +6224,6 @@ compileThunks(MyThread* t, Allocator* allocator, MyProcessor* p,
p->thunkSize = pad(tableContext.context.assembler->length()); p->thunkSize = pad(tableContext.context.assembler->length());
expect(t, codeZone(t)->ensure
(codeSingletonSizeInBytes
(t, defaultContext.context.assembler->length())
+ codeSingletonSizeInBytes
(t, tailHelperContext.context.assembler->length())
+ codeSingletonSizeInBytes
(t, nativeContext.context.assembler->length())
+ codeSingletonSizeInBytes
(t, aioobContext.context.assembler->length())
+ codeSingletonSizeInBytes
(t, p->thunkSize * ThunkCount)));
p->defaultTailThunk = finish p->defaultTailThunk = finish
(t, allocator, defaultContext.context.assembler, "default"); (t, allocator, defaultContext.context.assembler, "default");
@ -6147,6 +6240,20 @@ compileThunks(MyThread* t, Allocator* allocator, MyProcessor* p,
} }
} }
p->defaultVirtualThunk = finish
(t, allocator, defaultVirtualContext.context.assembler, "defaultVirtual");
{ void* call;
defaultVirtualContext.promise.listener->resolve
(reinterpret_cast<intptr_t>(voidPointer(compileVirtualMethod)), &call);
if (image) {
image->defaultVirtualThunk = p->defaultVirtualThunk - imageBase;
image->compileVirtualMethodCall
= static_cast<uint8_t*>(call) - imageBase;
}
}
p->tailHelperThunk = finish p->tailHelperThunk = finish
(t, allocator, defaultContext.context.assembler, "tailHelper"); (t, allocator, defaultContext.context.assembler, "tailHelper");
@ -6249,6 +6356,51 @@ aioobThunk(MyThread* t)
return reinterpret_cast<uintptr_t>(processor(t)->aioobThunk); return reinterpret_cast<uintptr_t>(processor(t)->aioobThunk);
} }
uintptr_t
compileVirtualThunk(MyThread* t, unsigned index)
{
Context context;
Assembler* a = context.assembler;
Assembler::Constant indexConstant(index);
Assembler::Register indexRegister(t->arch->returnHigh());
a->apply(Move, BytesPerWord, ConstantOperand, &indexConstant,
BytesPerWord, ConstantOperand, &indexRegister);
Assembler::Constant thunk(defaultVirtualThunk(t));
a->apply(Jump, BytesPerWord, ConstantOperand, &thunk);
uint8_t* start = codeAllocator(t)->allocate(a->length());
a->writeTo(start);
}
uintptr_t
virtualThunk(MyThread* t, unsigned index)
{
MyProcessor* p = processor(t);
if (p->virtualThunks == 0 or wordArrayLength(t, p->virtualThunks) <= index) {
object newArray = makeWordArray(t, nextPowerOfTwo(index));
if (p->virtualThunks) {
memcpy(&wordArrayBody(t, newArray, 0),
&wordArrayBody(t, p->virtualThunks, 0),
wordArrayLength(t, p->virtualThunks) * BytesPerWord);
}
p->virtualThunks = newArray;
}
if (wordArrayBody(t, p->virtualThunks, index) == 0) {
ACQUIRE(t, t->m->classLock);
if (wordArrayBody(t, p->virtualThunks, index) == 0) {
uintptr_t thunk = compileVirtualThunk(t, index);
wordArrayBody(t, p->virtualThunks, index) = thunk;
}
}
return wordArrayBody(t, p->virtualThunks, index);
}
void void
compile(MyThread* t, Allocator* allocator, BootContext* bootContext, compile(MyThread* t, Allocator* allocator, BootContext* bootContext,
object method) object method)
@ -6330,9 +6482,10 @@ methodTreeSentinal(MyThread* t)
return processor(t)->methodTreeSentinal; return processor(t)->methodTreeSentinal;
} }
Zone* FixedAllocator*
codeZone(MyThread* t) { codeAllocator(MyThread* t)
return &(processor(t)->codeZone); {
return &(processor(t)->codeAllocator);
} }
} // namespace } // namespace

View File

@ -1194,7 +1194,7 @@ pickTarget(Context* c, Read* read, bool intersectRead,
? 0 : Target::Penalty); ? 0 : Target::Penalty);
Target best; Target best;
if ((mask.typeMask & (1 << RegisterOperand))) { if (mask.typeMask & (1 << RegisterOperand)) {
Target mine = pickRegisterTarget(c, read->value, mask.registerMask); Target mine = pickRegisterTarget(c, read->value, mask.registerMask);
mine.cost += registerPenalty; mine.cost += registerPenalty;
@ -3842,6 +3842,61 @@ appendSaveLocals(Context* c)
SaveLocalsEvent(c)); SaveLocalsEvent(c));
} }
class FreezeRegisterEvent: public Event {
public:
FreezeRegisterEvent(Context* c, int number, Value* value):
Event(c), number(number), value(value)
{
addRead(c, this, value, fixedRegisterRead(c, number));
}
virtual const char* name() {
return "FreezeRegisterEvent";
}
virtual void compile(Context* c) {
c->registers[number].freeze(c, value);
for (Read* r = reads; r; r = r->eventNext) {
popRead(c, this, r->value);
}
}
int number;
Value* value;
};
void
appendFreezeRegister(Context* c, int number, Value* value)
{
append(c, new (c->zone->allocate(sizeof(FreezeRegisterEvent)))
FreezeRegisterEvent(c, number, value));
}
class ThawRegisterEvent: public Event {
public:
ThawRegisterEvent(Context* c, int number):
Event(c), number(number)
{ }
virtual const char* name() {
return "ThawRegisterEvent";
}
virtual void compile(Context* c) {
c->registers[number].thaw(c, value);
}
int number;
};
void
appendThawRegister(Context* c, int number, Value* value)
{
append(c, new (c->zone->allocate(sizeof(ThawRegisterEvent)))
ThawRegisterEvent(c, number, value));
}
class DummyEvent: public Event { class DummyEvent: public Event {
public: public:
DummyEvent(Context* c): DummyEvent(Context* c):
@ -4925,6 +4980,14 @@ class MyCompiler: public Compiler {
return value(&c, s, s); return value(&c, s, s);
} }
virtual void freezeRegister(int number, Operand* value) {
appendFreezeRegister(&c, number, static_cast<Value*>(value));
}
virtual void thawRegister(int number) {
appendThawRegister(&c, number);
}
Promise* machineIp() { Promise* machineIp() {
return codePromise(&c, c.logicalCode[c.logicalIp]->lastEvent); return codePromise(&c, c.logicalCode[c.logicalIp]->lastEvent);
} }

View File

@ -60,8 +60,11 @@ class Compiler {
Operand* index = 0, Operand* index = 0,
unsigned scale = 1) = 0; unsigned scale = 1) = 0;
virtual Operand* stack() = 0;
virtual Operand* thread() = 0; virtual Operand* thread() = 0;
virtual Operand* stack() = 0;
virtual void freezeRegister(int number, Operand* value) = 0;
virtual void thawRegister(int number) = 0;
virtual void push(unsigned footprint) = 0; virtual void push(unsigned footprint) = 0;
virtual void push(unsigned footprint, Operand* value) = 0; virtual void push(unsigned footprint, Operand* value) = 0;

View File

@ -1454,17 +1454,17 @@ expect(Thread* t, bool v)
class FixedAllocator: public Allocator { class FixedAllocator: public Allocator {
public: public:
FixedAllocator(Thread* t, uint8_t* base, unsigned capacity): FixedAllocator(System* s, uint8_t* base, unsigned capacity):
t(t), base(base), offset(0), capacity(capacity) s(s), base(base), offset(0), capacity(capacity)
{ } { }
virtual void* tryAllocate(unsigned) { virtual void* tryAllocate(unsigned) {
abort(t); abort(s);
} }
virtual void* allocate(unsigned size) { virtual void* allocate(unsigned size) {
unsigned paddedSize = pad(size); unsigned paddedSize = pad(size);
expect(t, offset + paddedSize < capacity); expect(s, offset + paddedSize < capacity);
void* p = base + offset; void* p = base + offset;
offset += paddedSize; offset += paddedSize;
@ -1472,10 +1472,10 @@ class FixedAllocator: public Allocator {
} }
virtual void free(const void*, unsigned) { virtual void free(const void*, unsigned) {
abort(t); abort(s);
} }
Thread* t; System* s;
uint8_t* base; uint8_t* base;
unsigned offset; unsigned offset;
unsigned capacity; unsigned capacity;

View File

@ -63,8 +63,6 @@ const unsigned VisitSignalIndex = 0;
const unsigned SegFaultSignalIndex = 1; const unsigned SegFaultSignalIndex = 1;
const unsigned InterruptSignalIndex = 2; const unsigned InterruptSignalIndex = 2;
const unsigned ExecutableAreaSizeInBytes = 16 * 1024 * 1024;
class MySystem; class MySystem;
MySystem* system; MySystem* system;
@ -518,9 +516,7 @@ class MySystem: public System {
MySystem(): MySystem():
threadVisitor(0), threadVisitor(0),
visitTarget(0), visitTarget(0)
executableArea(0),
executableOffset(0)
{ {
expect(this, system == 0); expect(this, system == 0);
system = this; system = this;
@ -559,33 +555,23 @@ class MySystem: public System {
} }
virtual void* tryAllocateExecutable(unsigned sizeInBytes) { virtual void* tryAllocateExecutable(unsigned sizeInBytes) {
if (executableArea == 0) {
#ifdef __x86_64__ #ifdef __x86_64__
const unsigned Extra = MAP_32BIT; const unsigned Extra = MAP_32BIT;
#else #else
const unsigned Extra = 0; const unsigned Extra = 0;
#endif #endif
void* p = mmap(0, ExecutableAreaSizeInBytes, PROT_EXEC | PROT_READ void* p = mmap(0, sizeInBytes, PROT_EXEC | PROT_READ
| PROT_WRITE, MAP_PRIVATE | MAP_ANON | Extra, -1, 0); | PROT_WRITE, MAP_PRIVATE | MAP_ANON | Extra, -1, 0);
if (p != MAP_FAILED) { if (p == MAP_FAILED) {
executableArea = static_cast<uint8_t*>(p);
}
}
if (executableArea
and executableOffset + pad(sizeInBytes) < ExecutableAreaSizeInBytes)
{
void* r = executableArea + executableOffset;
executableOffset += pad(sizeInBytes);
return r;
} else {
return 0; return 0;
} else {
return static_cast<uint8_t*>(p);
} }
} }
virtual void freeExecutable(const void*, unsigned) { virtual void freeExecutable(const void* p, unsigned sizeInBytes) {
// ignore munmap(p, sizeInBytes);
} }
virtual bool success(Status s) { virtual bool success(Status s) {

View File

@ -117,12 +117,11 @@ class Processor {
getStackTrace(Thread* t, Thread* target) = 0; getStackTrace(Thread* t, Thread* target) = 0;
virtual void virtual void
compileThunks(Thread* t, BootImage* image, uint8_t* code, unsigned* size, initialize(Thread* t, BootImage* image, uint8_t* code,
unsigned capacity) = 0; unsigned capacity) = 0;
virtual void virtual void
compileMethod(Thread* t, Zone* zone, uint8_t* code, unsigned* offset, compileMethod(Thread* t, Zone* zone, object* constants, object* calls,
unsigned capacity, object* constants, object* calls,
DelayedPromise** addresses, object method) = 0; DelayedPromise** addresses, object method) = 0;
virtual void virtual void

View File

@ -97,6 +97,9 @@
(uintptr_t flags) (uintptr_t flags)
(object next)) (object next))
(type wordArray
(array uintptr_t body))
(type array (type array
(noassert array object body)) (noassert array object body))

View File

@ -523,8 +523,6 @@ class MySystem: public System {
} }
virtual void* tryAllocateExecutable(unsigned sizeInBytes) { virtual void* tryAllocateExecutable(unsigned sizeInBytes) {
assert(this, sizeInBytes % LikelyPageSizeInBytes == 0);
return VirtualAlloc return VirtualAlloc
(0, sizeInBytes, MEM_COMMIT | MEM_RESERVE, PAGE_EXECUTE_READWRITE); (0, sizeInBytes, MEM_COMMIT | MEM_RESERVE, PAGE_EXECUTE_READWRITE);
} }