fix code to visit GC roots on stack to be compatible with tail calls; avoid generating unreachable jumps

This commit is contained in:
Joel Dice 2009-04-26 19:53:42 -06:00
parent 299699f1ff
commit 50529969f9
4 changed files with 109 additions and 106 deletions

View File

@ -198,12 +198,6 @@ class DelayedPromise: public ListenPromise {
DelayedPromise* next; DelayedPromise* next;
}; };
class TraceHandler {
public:
virtual void handleTrace(Promise* address, unsigned padIndex,
unsigned padding) = 0;
};
class Assembler { class Assembler {
public: public:
class Operand { }; class Operand { };

View File

@ -50,6 +50,7 @@ class MyThread: public Thread {
base(t->base), base(t->base),
stack(t->stack), stack(t->stack),
nativeMethod(0), nativeMethod(0),
targetMethod(0),
next(t->trace) next(t->trace)
{ {
t->trace = this; t->trace = this;
@ -68,6 +69,7 @@ class MyThread: public Thread {
void* base; void* base;
void* stack; void* stack;
object nativeMethod; object nativeMethod;
object targetMethod;
CallTrace* next; CallTrace* next;
}; };
@ -464,18 +466,14 @@ class TraceElement: public TraceHandler {
address(0), address(0),
next(next), next(next),
target(target), target(target),
padIndex(0), argumentIndex(0),
padding(0),
flags(flags) flags(flags)
{ } { }
virtual void handleTrace(Promise* address, unsigned padIndex, virtual void handleTrace(Promise* address, unsigned argumentIndex) {
unsigned padding)
{
if (this->address == 0) { if (this->address == 0) {
this->address = address; this->address = address;
this->padIndex = padIndex; this->argumentIndex = argumentIndex;
this->padding = padding;
} }
} }
@ -483,8 +481,7 @@ class TraceElement: public TraceHandler {
Promise* address; Promise* address;
TraceElement* next; TraceElement* next;
object target; object target;
unsigned padIndex; unsigned argumentIndex;
unsigned padding;
unsigned flags; unsigned flags;
uintptr_t map[0]; uintptr_t map[0];
}; };
@ -4092,31 +4089,9 @@ printSet(uintptr_t m, unsigned limit)
} }
} }
void
shiftLeftZeroPadded(void* data, int dataSizeInBytes, int shiftCountInBits)
{
uint8_t* p = static_cast<uint8_t*>(data);
int shiftCountInBytes = shiftCountInBits / 8;
int shift = shiftCountInBits % 8;
int count = dataSizeInBytes - shiftCountInBytes - 1;
for (int i = 0; i < count; ++i) {
int si = i + shiftCountInBytes;
p[i] = (p[si] >> shift) | ((p[si + 1] >> shift) << (8 - shift));
}
if (count >= 0) {
p[count] = (p[count + shiftCountInBytes] >> shift);
}
for (int i = count + 1; i < dataSizeInBytes; ++i) {
p[i] = 0;
}
}
unsigned unsigned
calculateFrameMaps(MyThread* t, Context* context, uintptr_t* originalRoots, calculateFrameMaps(MyThread* t, Context* context, uintptr_t* originalRoots,
unsigned stackPadding, unsigned eventIndex) unsigned eventIndex)
{ {
// for each instruction with more than one predecessor, and for each // for each instruction with more than one predecessor, and for each
// stack position, determine if there exists a path to that // stack position, determine if there exists a path to that
@ -4124,7 +4099,6 @@ calculateFrameMaps(MyThread* t, Context* context, uintptr_t* originalRoots,
// stack position (i.e. it is uninitialized or contains primitive // stack position (i.e. it is uninitialized or contains primitive
// data). // data).
unsigned localSize = ::localSize(t, context->method);
unsigned mapSize = frameMapSizeInWords(t, context->method); unsigned mapSize = frameMapSizeInWords(t, context->method);
uintptr_t roots[mapSize]; uintptr_t roots[mapSize];
@ -4149,8 +4123,7 @@ calculateFrameMaps(MyThread* t, Context* context, uintptr_t* originalRoots,
Event e = static_cast<Event>(context->eventLog.get(eventIndex++)); Event e = static_cast<Event>(context->eventLog.get(eventIndex++));
switch (e) { switch (e) {
case PushContextEvent: { case PushContextEvent: {
eventIndex = calculateFrameMaps eventIndex = calculateFrameMaps(t, context, roots, eventIndex);
(t, context, roots, stackPadding, eventIndex);
} break; } break;
case PopContextEvent: case PopContextEvent:
@ -4201,10 +4174,6 @@ calculateFrameMaps(MyThread* t, Context* context, uintptr_t* originalRoots,
unsigned i = context->eventLog.get2(eventIndex); unsigned i = context->eventLog.get2(eventIndex);
eventIndex += 2; eventIndex += 2;
if (i >= localSize) {
i += stackPadding;
}
markBit(roots, i); markBit(roots, i);
} break; } break;
@ -4212,10 +4181,6 @@ calculateFrameMaps(MyThread* t, Context* context, uintptr_t* originalRoots,
unsigned i = context->eventLog.get2(eventIndex); unsigned i = context->eventLog.get2(eventIndex);
eventIndex += 2; eventIndex += 2;
if (i >= localSize) {
i += stackPadding;
}
clearBit(roots, i); clearBit(roots, i);
} break; } break;
@ -4229,12 +4194,6 @@ calculateFrameMaps(MyThread* t, Context* context, uintptr_t* originalRoots,
memcpy(te->map, roots, mapSize * BytesPerWord); memcpy(te->map, roots, mapSize * BytesPerWord);
if (te->flags & TraceElement::TailCall) {
shiftLeftZeroPadded
(te->map, mapSize * BytesPerWord,
usableFrameSize(t, context->method));
}
eventIndex += BytesPerWord; eventIndex += BytesPerWord;
} break; } break;
@ -4412,22 +4371,8 @@ finish(MyThread* t, Allocator* allocator, Context* context)
(p->address->value())); (p->address->value()));
} }
for (unsigned j = 0, k = 0; j < size; ++j, ++k) { for (unsigned j = 0; j < p->argumentIndex; ++j) {
if (j == p->padIndex) { if (getBit(p->map, j)) {
unsigned limit = j + p->padding;
assert(t, limit <= size);
for (; j < limit; ++j) {
if (DebugFrameMaps) {
fprintf(stderr, "_");
}
clearBit(t, map, context->traceLogCount, size, i, j);
}
if (j == size) break;
}
if (getBit(p->map, k)) {
if (DebugFrameMaps) { if (DebugFrameMaps) {
fprintf(stderr, "1"); fprintf(stderr, "1");
} }
@ -4534,7 +4479,7 @@ compile(MyThread* t, Allocator* allocator, Context* context)
if (UNLIKELY(t->exception)) return 0; if (UNLIKELY(t->exception)) return 0;
context->dirtyRoots = false; context->dirtyRoots = false;
unsigned eventIndex = calculateFrameMaps(t, context, 0, 0, 0); unsigned eventIndex = calculateFrameMaps(t, context, 0, 0);
object eht = codeExceptionHandlerTable(t, methodCode(t, context->method)); object eht = codeExceptionHandlerTable(t, methodCode(t, context->method));
if (eht) { if (eht) {
@ -4583,7 +4528,7 @@ compile(MyThread* t, Allocator* allocator, Context* context)
compile(t, &frame2, exceptionHandlerIp(eh), start); compile(t, &frame2, exceptionHandlerIp(eh), start);
if (UNLIKELY(t->exception)) return 0; if (UNLIKELY(t->exception)) return 0;
eventIndex = calculateFrameMaps(t, context, 0, 0, eventIndex); eventIndex = calculateFrameMaps(t, context, 0, eventIndex);
} }
} }
@ -4593,7 +4538,7 @@ compile(MyThread* t, Allocator* allocator, Context* context)
while (context->dirtyRoots) { while (context->dirtyRoots) {
context->dirtyRoots = false; context->dirtyRoots = false;
calculateFrameMaps(t, context, 0, 0, 0); calculateFrameMaps(t, context, 0, 0);
} }
return finish(t, allocator, context); return finish(t, allocator, context);
@ -4610,13 +4555,17 @@ void*
compileMethod2(MyThread* t, void* ip) compileMethod2(MyThread* t, void* ip)
{ {
object node = findCallNode(t, ip); object node = findCallNode(t, ip);
PROTECT(t, node);
object target = callNodeTarget(t, node); object target = callNodeTarget(t, node);
PROTECT(t, target);
if (LIKELY(t->exception == 0)) { if (LIKELY(t->exception == 0)) {
PROTECT(t, node);
PROTECT(t, target);
t->trace->targetMethod = target;
compile(t, codeAllocator(t), 0, target); compile(t, codeAllocator(t), 0, target);
t->trace->targetMethod = 0;
} }
if (UNLIKELY(t->exception)) { if (UNLIKELY(t->exception)) {
@ -4655,6 +4604,18 @@ compileMethod(MyThread* t)
void* void*
compileVirtualMethod2(MyThread* t, object class_, unsigned index) compileVirtualMethod2(MyThread* t, object class_, unsigned index)
{ {
// If class_ has BootstrapFlag set, that means its vtable is not yet
// available. However, we must set t->trace->targetMethod to an
// appropriate method to ensure we can accurately scan the stack for
// GC roots. We find such a method by looking for a superclass with
// a vtable and using it instead:
object c = class_;
while (classVmFlags(t, c) & BootstrapFlag) {
c = classSuper(t, c);
}
t->trace->targetMethod = arrayBody(t, classVirtualTable(t, c), index);
PROTECT(t, class_); PROTECT(t, class_);
object target = resolveTarget(t, class_, index); object target = resolveTarget(t, class_, index);
@ -4663,6 +4624,8 @@ compileVirtualMethod2(MyThread* t, object class_, unsigned index)
compile(t, codeAllocator(t), 0, target); compile(t, codeAllocator(t), 0, target);
} }
t->trace->targetMethod = 0;
if (UNLIKELY(t->exception)) { if (UNLIKELY(t->exception)) {
return 0; return 0;
} else { } else {
@ -4910,10 +4873,13 @@ invokeNative(MyThread* t)
uint64_t result = 0; uint64_t result = 0;
t->trace->targetMethod = t->trace->nativeMethod;
if (LIKELY(t->exception == 0)) { if (LIKELY(t->exception == 0)) {
result = invokeNative2(t, t->trace->nativeMethod); result = invokeNative2(t, t->trace->nativeMethod);
} }
t->trace->targetMethod = 0;
t->trace->nativeMethod = 0; t->trace->nativeMethod = 0;
if (UNLIKELY(t->exception)) { if (UNLIKELY(t->exception)) {
@ -4951,14 +4917,9 @@ frameMapIndex(MyThread* t, object method, int32_t offset)
void void
visitStackAndLocals(MyThread* t, Heap::Visitor* v, void* frame, object method, visitStackAndLocals(MyThread* t, Heap::Visitor* v, void* frame, object method,
void* ip, bool skipArguments, unsigned argumentFootprint) void* ip)
{ {
unsigned count; unsigned count = usableFrameSizeWithParameters(t, method);
if (skipArguments) {
count = usableFrameSizeWithParameters(t, method) - argumentFootprint;
} else {
count = usableFrameSizeWithParameters(t, method);
}
if (count) { if (count) {
object map = codePool(t, methodCode(t, method)); object map = codePool(t, methodCode(t, method));
@ -4979,6 +4940,49 @@ visitStackAndLocals(MyThread* t, Heap::Visitor* v, void* frame, object method,
} }
} }
void
visitArgument(MyThread* t, Heap::Visitor* v, void* stack, object method,
unsigned index)
{
v->visit(static_cast<object*>(stack)
+ (methodParameterFootprint(t, method) - index - 1)
+ t->arch->frameReturnAddressSize()
+ t->arch->frameFooterSize());
}
void
visitArguments(MyThread* t, Heap::Visitor* v, void* stack, object method)
{
unsigned index = 0;
if ((methodFlags(t, method) & ACC_STATIC) == 0) {
visitArgument(t, v, stack, method, index++);
}
for (MethodSpecIterator it
(t, reinterpret_cast<const char*>
(&byteArrayBody(t, methodSpec(t, method), 0)));
it.hasNext();)
{
switch (*it.next()) {
case 'L':
case '[':
visitArgument(t, v, stack, method, index++);
break;
case 'J':
case 'D':
index += 2;
break;
default:
++ index;
break;
}
}
}
void void
visitStack(MyThread* t, Heap::Visitor* v) visitStack(MyThread* t, Heap::Visitor* v)
{ {
@ -4990,30 +4994,32 @@ visitStack(MyThread* t, Heap::Visitor* v)
} }
MyThread::CallTrace* trace = t->trace; MyThread::CallTrace* trace = t->trace;
bool skipArguments = false; object targetMethod = (trace ? trace->targetMethod : 0);
unsigned argumentFootprint = 0;
while (stack) { while (stack) {
object method = methodForIp(t, ip); object method = methodForIp(t, ip);
if (method) { if (method) {
PROTECT(t, method); PROTECT(t, method);
if (targetMethod) {
visitArguments(t, v, stack, targetMethod);
targetMethod = 0;
}
t->arch->nextFrame(&stack, &base); t->arch->nextFrame(&stack, &base);
visitStackAndLocals visitStackAndLocals(t, v, stack, method, ip);
(t, v, stack, method, ip, skipArguments, argumentFootprint);
skipArguments = true;
argumentFootprint = methodParameterFootprint(t, method);
ip = t->arch->frameIp(stack); ip = t->arch->frameIp(stack);
} else if (trace) { } else if (trace) {
skipArguments = false;
argumentFootprint = 0;
stack = trace->stack; stack = trace->stack;
base = trace->base; base = trace->base;
ip = t->arch->frameIp(stack); ip = t->arch->frameIp(stack);
trace = trace->next; trace = trace->next;
if (trace) {
targetMethod = trace->targetMethod;
}
} else { } else {
break; break;
} }
@ -5380,6 +5386,7 @@ class MyProcessor: public Processor {
for (MyThread::CallTrace* trace = t->trace; trace; trace = trace->next) { for (MyThread::CallTrace* trace = t->trace; trace; trace = trace->next) {
v->visit(&(trace->nativeMethod)); v->visit(&(trace->nativeMethod));
v->visit(&(trace->targetMethod));
} }
for (Reference* r = t->reference; r; r = r->next) { for (Reference* r = t->reference; r; r = r->next) {

View File

@ -2318,8 +2318,7 @@ class CallEvent: public Event {
returnAddressSurrogate(0), returnAddressSurrogate(0),
framePointerSurrogate(0), framePointerSurrogate(0),
popIndex(0), popIndex(0),
padIndex(0), stackArgumentIndex(0),
padding(0),
flags(flags), flags(flags),
resultSize(resultSize), resultSize(resultSize),
stackArgumentFootprint(stackArgumentFootprint) stackArgumentFootprint(stackArgumentFootprint)
@ -2428,19 +2427,18 @@ class CallEvent: public Event {
-- footprint; -- footprint;
if (footprint == 0 and (flags & Compiler::TailJump) == 0) { if (footprint == 0 and (flags & Compiler::TailJump) == 0) {
unsigned logicalIndex = ::frameIndex stackArgumentIndex = s->index + c->localFootprint;
(c, s->index + c->localFootprint);
assert(c, logicalIndex >= frameIndex);
padding = logicalIndex - frameIndex;
padIndex = s->index + c->localFootprint;
} }
++ frameIndex; ++ frameIndex;
} }
if ((flags & Compiler::TailJump) == 0) { if ((flags & Compiler::TailJump) == 0) {
if (stackArgumentFootprint == 0) {
stackArgumentIndex = (stackBefore ? stackBefore->index + 1 : 0)
+ c->localFootprint;
}
popIndex popIndex
= c->alignedFrameSize = c->alignedFrameSize
- c->arch->frameFooterSize() - c->arch->frameFooterSize()
@ -2507,7 +2505,7 @@ class CallEvent: public Event {
if (traceHandler) { if (traceHandler) {
traceHandler->handleTrace(codePromise(c, c->assembler->offset()), traceHandler->handleTrace(codePromise(c, c->assembler->offset()),
padIndex, padding); stackArgumentIndex);
} }
if (flags & Compiler::TailJump) { if (flags & Compiler::TailJump) {
@ -2552,8 +2550,7 @@ class CallEvent: public Event {
Value* returnAddressSurrogate; Value* returnAddressSurrogate;
Value* framePointerSurrogate; Value* framePointerSurrogate;
unsigned popIndex; unsigned popIndex;
unsigned padIndex; unsigned stackArgumentIndex;
unsigned padding;
unsigned flags; unsigned flags;
unsigned resultSize; unsigned resultSize;
unsigned stackArgumentFootprint; unsigned stackArgumentFootprint;
@ -3777,7 +3774,7 @@ class BranchEvent: public Event {
jump = true; jump = true;
} }
if (jump) { if (jump and not unreachable(this)) {
apply(c, type, BytesPerWord, address->source, 0); apply(c, type, BytesPerWord, address->source, 0);
} }

View File

@ -17,6 +17,11 @@
namespace vm { namespace vm {
class TraceHandler {
public:
virtual void handleTrace(Promise* address, unsigned argumentIndex) = 0;
};
class Compiler { class Compiler {
public: public:
class Client { class Client {