increase heap pool size and fixed footprint threshold constants based on performance profiling and experimentation

This commit is contained in:
Joel Dice 2008-11-22 14:47:18 -07:00
parent 2e2925c321
commit 8a7852172e
2 changed files with 23 additions and 24 deletions

View File

@ -482,9 +482,9 @@ void
postCollect(Thread* t)
{
#ifdef VM_STRESS
t->m->heap->free(t->defaultHeap, Thread::HeapSizeInBytes);
t->m->heap->free(t->defaultHeap, ThreadHeapSizeInBytes);
t->defaultHeap = static_cast<uintptr_t*>
(t->m->heap->allocate(Thread::HeapSizeInBytes));
(t->m->heap->allocate(ThreadHeapSizeInBytes));
#endif
t->heap = t->defaultHeap;
@ -1703,7 +1703,7 @@ Machine::dispose()
}
for (unsigned i = 0; i < heapPoolIndex; ++i) {
heap->free(heapPool[i], Thread::HeapSizeInBytes);
heap->free(heapPool[i], ThreadHeapSizeInBytes);
}
heap->free(properties, sizeof(const char*) * propertyCount);
@ -1729,7 +1729,7 @@ Thread::Thread(Machine* m, object javaThread, Thread* parent):
protector(0),
runnable(this),
defaultHeap(static_cast<uintptr_t*>
(m->heap->allocate(HeapSizeInBytes))),
(m->heap->allocate(ThreadHeapSizeInBytes))),
heap(defaultHeap),
backupHeap(0),
backupHeapIndex(0),
@ -1870,7 +1870,7 @@ Thread::dispose()
systemThread->dispose();
}
m->heap->free(defaultHeap, Thread::HeapSizeInBytes);
m->heap->free(defaultHeap, ThreadHeapSizeInBytes);
m->processor->dispose(this);
}
@ -2028,7 +2028,7 @@ allocate2(Thread* t, unsigned sizeInBytes, bool objectMask)
{
return allocate3
(t, t->m->heap,
ceiling(sizeInBytes, BytesPerWord) > Thread::HeapSizeInWords ?
ceiling(sizeInBytes, BytesPerWord) > ThreadHeapSizeInWords ?
Machine::FixedAllocation : Machine::MovableAllocation,
sizeInBytes, objectMask);
}
@ -2047,7 +2047,7 @@ allocate3(Thread* t, Allocator* allocator, Machine::AllocationType type,
return o;
} else if (t->tracing) {
expect(t, t->heapIndex + ceiling(sizeInBytes, BytesPerWord)
<= Thread::HeapSizeInWords);
<= ThreadHeapSizeInWords);
return allocateSmall(t, sizeInBytes);
}
@ -2060,18 +2060,16 @@ allocate3(Thread* t, Allocator* allocator, Machine::AllocationType type,
}
if (type == Machine::FixedAllocation) {
if (t->m->fixedFootprint + sizeInBytes
> Machine::FixedFootprintThresholdInBytes)
{
if (t->m->fixedFootprint + sizeInBytes > FixedFootprintThresholdInBytes) {
t->heap = 0;
}
} else if (t->heapIndex + ceiling(sizeInBytes, BytesPerWord)
> Thread::HeapSizeInWords)
> ThreadHeapSizeInWords)
{
t->heap = 0;
if (t->m->heapPoolIndex < Machine::HeapPoolSize) {
if (t->m->heapPoolIndex < ThreadHeapPoolSize) {
t->heap = static_cast<uintptr_t*>
(t->m->heap->tryAllocate(Thread::HeapSizeInBytes));
(t->m->heap->tryAllocate(ThreadHeapSizeInBytes));
if (t->heap) {
t->m->heapPool[t->m->heapPoolIndex++] = t->heap;
t->heapOffset += t->heapIndex;
@ -2758,7 +2756,7 @@ collect(Thread* t, Heap::CollectionType type)
killZombies(t, m->rootThread);
for (unsigned i = 0; i < m->heapPoolIndex; ++i) {
m->heap->free(m->heapPool[i], Thread::HeapSizeInBytes);
m->heap->free(m->heapPool[i], ThreadHeapSizeInBytes);
}
m->heapPoolIndex = 0;

View File

@ -45,6 +45,14 @@ const uintptr_t HashTakenMark = 1;
const uintptr_t ExtendedMark = 2;
const uintptr_t FixedMark = 3;
const unsigned ThreadHeapSizeInBytes = 64 * 1024;
const unsigned ThreadHeapSizeInWords = ThreadHeapSizeInBytes / BytesPerWord;
const unsigned ThreadHeapPoolSize = 32;
const unsigned FixedFootprintThresholdInBytes
= ThreadHeapPoolSize * ThreadHeapSizeInBytes;
enum FieldCode {
VoidField,
ByteField,
@ -1128,10 +1136,6 @@ class Machine {
dispose();
}
static const unsigned HeapPoolSize = 8;
static const unsigned FixedFootprintThresholdInBytes = 256 * 1024;
void dispose();
JavaVMVTable* vtable;
@ -1168,7 +1172,7 @@ class Machine {
bool unsafe;
JavaVMVTable javaVMVTable;
JNIEnvVTable jniEnvVTable;
uintptr_t* heapPool[HeapPoolSize];
uintptr_t* heapPool[ThreadHeapPoolSize];
unsigned heapPoolIndex;
};
@ -1266,9 +1270,6 @@ class Thread {
Thread* t;
};
static const unsigned HeapSizeInBytes = 64 * 1024;
static const unsigned HeapSizeInWords = HeapSizeInBytes / BytesPerWord;
Thread(Machine* m, object javaThread, Thread* parent);
void init();
@ -1436,7 +1437,7 @@ inline void
ensure(Thread* t, unsigned sizeInBytes)
{
if (t->heapIndex + ceiling(sizeInBytes, BytesPerWord)
> Thread::HeapSizeInWords)
> ThreadHeapSizeInWords)
{
expect(t, t->backupHeap == 0);
t->backupHeap = static_cast<uintptr_t*>
@ -1468,7 +1469,7 @@ allocate(Thread* t, unsigned sizeInBytes, bool objectMask)
stress(t);
if (UNLIKELY(t->heapIndex + ceiling(sizeInBytes, BytesPerWord)
> Thread::HeapSizeInWords
> ThreadHeapSizeInWords
or t->m->exclusive))
{
return allocate2(t, sizeInBytes, objectMask);