pre-allocate Thread::backupHeap for signal safety

It's not safe to use malloc from a signal handler, so we can't
allocate new memory when handling segfaults or Thread.getStackTrace
signals.  Instead, we allocate a fixed-size backup heap for each
thread ahead of time and use it if there's no space left in the normal
heap pool.  In the rare case that the backup heap isn't large enough,
we fall back to using a preallocated exception without a stack trace
as a last resort.
This commit is contained in:
Joel Dice 2010-06-19 16:40:21 -06:00
parent 7ea6036842
commit 3018290238
3 changed files with 62 additions and 38 deletions

View File

@ -2587,11 +2587,15 @@ traceSize(Thread* t)
void NO_RETURN
throwArrayIndexOutOfBounds(MyThread* t)
{
ensure(t, FixedSizeOfArrayIndexOutOfBoundsException + traceSize(t));
t->tracing = true;
t->exception = makeArrayIndexOutOfBoundsException(t, 0);
t->tracing = false;
if (ensure(t, FixedSizeOfArrayIndexOutOfBoundsException + traceSize(t))) {
t->tracing = true;
t->exception = makeArrayIndexOutOfBoundsException(t, 0);
t->tracing = false;
} else {
// not enough memory available for a new exception and stack trace
// -- use a preallocated instance instead
t->exception = t->m->arrayIndexOutOfBoundsException;
}
unwind(t);
}
@ -2644,7 +2648,7 @@ makeNew64(Thread* t, object class_)
void
gcIfNecessary(MyThread* t)
{
if (UNLIKELY(t->backupHeap)) {
if (UNLIKELY(t->useBackupHeap)) {
collect(t, Heap::MinorCollection);
}
}
@ -7093,7 +7097,7 @@ invoke(Thread* thread, object method, ArgumentList* arguments)
}
if (t->exception) {
if (t->backupHeap) {
if (UNLIKELY(t->useBackupHeap)) {
collect(t, Heap::MinorCollection);
}
return 0;
@ -7148,11 +7152,15 @@ class SegFaultHandler: public System::SignalHandler {
static_cast<void**>(*stack) - t->arch->frameReturnAddressSize(),
*base, t->continuation, t->trace);
ensure(t, FixedSizeOfNullPointerException + traceSize(t));
t->tracing = true;
t->exception = makeNullPointerException(t);
t->tracing = false;
if (ensure(t, FixedSizeOfNullPointerException + traceSize(t))) {
t->tracing = true;
t->exception = makeNullPointerException(t);
t->tracing = false;
} else {
// not enough memory available for a new NPE and stack trace
// -- use a preallocated instance instead
t->exception = t->m->nullPointerException;
}
// printTrace(t, t->exception);
@ -7587,11 +7595,11 @@ class MyProcessor: public Processor {
c.stack = 0;
}
ensure(t, traceSize(target));
t->tracing = true;
trace = makeTrace(t, target);
t->tracing = false;
if (ensure(t, traceSize(target))) {
t->tracing = true;
trace = makeTrace(t, target);
t->tracing = false;
}
}
MyThread* t;
@ -7602,7 +7610,7 @@ class MyProcessor: public Processor {
t->m->system->visit(t->systemThread, target->systemThread, &visitor);
if (t->backupHeap) {
if (UNLIKELY(t->useBackupHeap)) {
PROTECT(t, visitor.trace);
collect(t, Heap::MinorCollection);

View File

@ -570,12 +570,11 @@ postCollect(Thread* t)
t->heapOffset = 0;
t->heapIndex = 0;
if (t->backupHeap) {
t->m->heap->free
(t->backupHeap, t->backupHeapSizeInWords * BytesPerWord);
t->backupHeap = 0;
if (t->useBackupHeap) {
memset(t->backupHeap, 0, ThreadBackupHeapSizeInBytes);
t->useBackupHeap = false;
t->backupHeapIndex = 0;
t->backupHeapSizeInWords = 0;
}
for (Thread* c = t->child; c; c = c->peer) {
@ -1978,7 +1977,6 @@ boot(Thread* t)
m->bootstrapClassMap = makeHashMap(t, 0, 0);
m->stringMap = makeWeakHashMap(t, 0, 0);
m->processor->boot(t, 0);
{ object bootCode = makeCode(t, 0, 0, 0, 0, 0, 1);
@ -2123,6 +2121,8 @@ Machine::Machine(System* system, Heap* heap, Finder* finder,
tenuredWeakReferences(0),
shutdownHooks(0),
objectsToFinalize(0),
nullPointerException(0),
arrayIndexOutOfBoundsException(0),
unsafe(false),
triedBuiltinOnLoad(false),
heapPoolIndex(0)
@ -2196,9 +2196,8 @@ Thread::Thread(Machine* m, object javaThread, Thread* parent):
defaultHeap(static_cast<uintptr_t*>
(m->heap->allocate(ThreadHeapSizeInBytes))),
heap(defaultHeap),
backupHeap(0),
backupHeapIndex(0),
backupHeapSizeInWords(0),
useBackupHeap(false),
waiting(false),
tracing(false)
#ifdef VM_STRESS
@ -2210,6 +2209,7 @@ void
Thread::init()
{
memset(defaultHeap, 0, ThreadHeapSizeInBytes);
memset(backupHeap, 0, ThreadBackupHeapSizeInBytes);
if (parent == 0) {
assert(this, m->rootThread == 0);
@ -2248,6 +2248,11 @@ Thread::init()
m->jniMethodTable = makeVector(this, 0, 0);
m->nullPointerException = makeNullPointerException(this);
m->arrayIndexOutOfBoundsException
= makeArrayIndexOutOfBoundsException(this, 0);
m->localThread->set(this);
} else {
peer = parent->child;
@ -2539,15 +2544,15 @@ object
allocate3(Thread* t, Allocator* allocator, Machine::AllocationType type,
unsigned sizeInBytes, bool objectMask)
{
if (t->backupHeap) {
if (UNLIKELY(t->useBackupHeap)) {
expect(t, t->backupHeapIndex + ceiling(sizeInBytes, BytesPerWord)
<= t->backupHeapSizeInWords);
<= ThreadBackupHeapSizeInWords);
object o = reinterpret_cast<object>(t->backupHeap + t->backupHeapIndex);
t->backupHeapIndex += ceiling(sizeInBytes, BytesPerWord);
cast<object>(o, 0) = 0;
return o;
} else if (t->tracing) {
} else if (UNLIKELY(t->tracing)) {
expect(t, t->heapIndex + ceiling(sizeInBytes, BytesPerWord)
<= ThreadHeapSizeInWords);
return allocateSmall(t, sizeInBytes);
@ -3676,6 +3681,8 @@ visitRoots(Machine* m, Heap::Visitor* v)
v->visit(&(m->jniMethodTable));
v->visit(&(m->shutdownHooks));
v->visit(&(m->objectsToFinalize));
v->visit(&(m->nullPointerException));
v->visit(&(m->arrayIndexOutOfBoundsException));
for (Thread* t = m->rootThread; t; t = t->peer) {
::visitRoots(t, v);

View File

@ -49,6 +49,10 @@ const uintptr_t FixedMark = 3;
const unsigned ThreadHeapSizeInBytes = 64 * 1024;
const unsigned ThreadHeapSizeInWords = ThreadHeapSizeInBytes / BytesPerWord;
const unsigned ThreadBackupHeapSizeInBytes = 2 * 1024;
const unsigned ThreadBackupHeapSizeInWords
= ThreadBackupHeapSizeInBytes / BytesPerWord;
const unsigned ThreadHeapPoolSize = 64;
const unsigned FixedFootprintThresholdInBytes
@ -1207,6 +1211,8 @@ class Machine {
object tenuredWeakReferences;
object shutdownHooks;
object objectsToFinalize;
object nullPointerException;
object arrayIndexOutOfBoundsException;
bool unsafe;
bool triedBuiltinOnLoad;
JavaVMVTable javaVMVTable;
@ -1360,9 +1366,9 @@ class Thread {
Runnable runnable;
uintptr_t* defaultHeap;
uintptr_t* heap;
uintptr_t* backupHeap;
uintptr_t backupHeap[ThreadBackupHeapSizeInWords];
unsigned backupHeapIndex;
unsigned backupHeapSizeInWords;
bool useBackupHeap;
bool waiting;
bool tracing;
#ifdef VM_STRESS
@ -1550,20 +1556,23 @@ class FixedAllocator: public Allocator {
unsigned capacity;
};
inline void
inline bool
ensure(Thread* t, unsigned sizeInBytes)
{
if (t->heapIndex + ceiling(sizeInBytes, BytesPerWord)
> ThreadHeapSizeInWords)
{
expect(t, t->backupHeap == 0);
t->backupHeap = static_cast<uintptr_t*>
(t->m->heap->allocate(pad(sizeInBytes)));
if (sizeInBytes <= ThreadBackupHeapSizeInBytes) {
expect(t, not t->useBackupHeap);
memset(t->backupHeap, 0, sizeInBytes);
t->useBackupHeap = true;
t->backupHeapIndex = 0;
t->backupHeapSizeInWords = ceiling(sizeInBytes, BytesPerWord);
return true;
} else {
return false;
}
} else {
return true;
}
}