4#include "PageAllocation.h"
5#include "PageReservation.h"
7#include <private/qnumeric_p.h>
8#include <private/qv4alloca_p.h>
9#include <private/qv4engine_p.h>
10#include <private/qv4identifiertable_p.h>
11#include <private/qv4mapobject_p.h>
12#include <private/qv4mm_p.h>
13#include <private/qv4object_p.h>
14#include <private/qv4profiling_p.h>
15#include <private/qv4qobjectwrapper_p.h>
16#include <private/qv4setobject_p.h>
17#include <private/qv4stackframe_p.h>
19#include <QtQml/qqmlengine.h>
21#include <QtCore/qalgorithms.h>
22#include <QtCore/qelapsedtimer.h>
23#include <QtCore/qloggingcategory.h>
24#include <QtCore/qmap.h>
25#include <QtCore/qscopedvaluerollback.h>
33#if !defined(MM_STATS) && !defined(QT_NO_DEBUG)
38#define DEBUG qDebug() << "MM:"
40#define DEBUG if (1
) ; else qDebug() << "MM:"
44#include <valgrind/valgrind.h>
45#include <valgrind/memcheck.h>
48#ifdef V4_USE_HEAPTRACK
49#include <heaptrack_api.h>
53#include <sys/storage.h>
56#if USE(PTHREADS) && HAVE(PTHREAD_NP_H)
57#include <pthread_np.h>
61Q_STATIC_LOGGING_CATEGORY(lcGcAllocatorStats,
"qt.qml.gc.allocatorStats")
62Q_STATIC_LOGGING_CATEGORY(lcGcStateTransitions,
"qt.qml.gc.stateTransitions")
63Q_STATIC_LOGGING_CATEGORY(lcGcForcedRuns,
"qt.qml.gc.forcedRuns")
64Q_STATIC_LOGGING_CATEGORY(lcGcStepExecution,
"qt.qml.gc.stepExecution")
80 NumChunks =
sizeof(quint64),
89 size += Chunk::ChunkSize;
93 pageReservation = PageReservation::reserve(size, OSAllocator::JSGCHeapPages);
94 base =
reinterpret_cast<Chunk *>((
reinterpret_cast<quintptr>(pageReservation.base()) + Chunk::ChunkSize - 1) & ~(Chunk::ChunkSize - 1));
96 availableBytes = size - (
reinterpret_cast<quintptr>(base) -
reinterpret_cast<quintptr>(pageReservation.base()));
97 if (availableBytes < SegmentSize)
101 qSwap(pageReservation, other.pageReservation);
102 qSwap(base, other.base);
103 qSwap(allocatedMap, other.allocatedMap);
104 qSwap(availableBytes, other.availableBytes);
110 pageReservation.deallocate();
115 quint64 bit =
static_cast<quint64>(1) << index;
120 quint64 bit =
static_cast<quint64>(1) << index;
121 allocatedMap &= ~bit;
125 quint64 bit =
static_cast<quint64>(1) << index;
126 return (allocatedMap & bit);
130 void free(Chunk *chunk, size_t size) {
131 DEBUG <<
"freeing chunk" << chunk;
132 size_t index =
static_cast<size_t>(chunk - base);
133 size_t end = qMin(
static_cast<size_t>(NumChunks), index + (size - 1)/Chunk::ChunkSize + 1);
134 while (index < end) {
135 Q_ASSERT(testBit(index));
140 size_t pageSize = WTF::pageSize();
141 size = (size + pageSize - 1) & ~(pageSize - 1);
142#if !defined(Q_OS_LINUX) && !defined(Q_OS_WIN)
147 memset(chunk, 0, size);
149 pageReservation.decommit(chunk, size);
153 return c >= base && c < base + nChunks;
165 if (!allocatedMap && size >= SegmentSize) {
167 Q_ASSERT(availableBytes >= size);
168 pageReservation.commit(base, size);
169 allocatedMap = ~
static_cast<quint64>(0);
172 size_t requiredChunks = (size +
sizeof(Chunk) - 1)/
sizeof(Chunk);
174 Chunk *candidate =
nullptr;
175 for (uint i = 0; i <
nChunks; ++i) {
178 candidate = base + i;
184 if (sequence == requiredChunks) {
185 pageReservation.commit(candidate, size);
186 for (uint i = 0; i < requiredChunks; ++i)
187 setBit(candidate - base + i);
188 DEBUG <<
"allocated chunk " << candidate << Qt::hex << size;
200 size += Chunk::HeaderSize;
201 size_t pageSize = WTF::pageSize();
202 size = (size + pageSize - 1) & ~(pageSize - 1);
203 if (size < Chunk::ChunkSize)
204 size = Chunk::ChunkSize;
209 void free(Chunk *chunk, size_t size = 0);
216 size = requiredChunkSize(size);
217 for (
auto &m : memorySegments) {
218 if (~m.allocatedMap) {
219 Chunk *c = m.allocate(size);
226 memorySegments.push_back(MemorySegment(size));
227 Chunk *c = memorySegments.back().allocate(size);
234 size = requiredChunkSize(size);
235 for (
auto &m : memorySegments) {
236 if (m.contains(chunk)) {
254#define SDUMP if (1
) ; else qDebug
259Q_GLOBAL_STATIC(
MMStatsHash, freedObjectStatsGlobal)
265 (*freedObjectStatsGlobal())[className]++;
272 SDUMP() <<
"sweeping chunk" <<
this;
279 SDUMP() <<
" index=" <<
i;
310#ifdef V4_USE_HEAPTRACK
356#ifdef V4_USE_HEAPTRACK
376#if QT_POINTER_SIZE == 8
386#if QT_POINTER_SIZE == 8
456 while ((
m = *
last)) {
526#ifdef V4_USE_HEAPTRACK
592#ifdef V4_USE_HEAPTRACK
600 HeapItem *itemToFree = c.chunk->first();
601 Heap::Base *b = *itemToFree;
602 const VTable *v = b->internalClass->vtable;
603 if (Q_UNLIKELY(classCountPtr))
604 classCountPtr(v->className);
608 b->_checkIsDestroyed();
612 c.segment->free(c.chunk, c.size);
615 chunkAllocator->free(c.chunk, c.size);
617#ifdef V4_USE_HEAPTRACK
618 heaptrack_report_free(c.chunk);
653using ExtraData = GCStateInfo::ExtraData;
654GCState markStart(GCStateMachine *that, ExtraData &)
657 that->mm->m_markStack = std::make_unique<MarkStack>(that->mm->engine);
658 that->mm->engine->isGCOngoing =
true;
659 return GCState::MarkGlobalObject;
662GCState markGlobalObject(GCStateMachine *that, ExtraData &)
664 that->mm->engine->markObjects(that->mm->m_markStack.get());
665 return GCState::MarkJSStack;
668GCState markJSStack(GCStateMachine *that, ExtraData &)
670 that->mm->collectFromJSStack(that->mm->markStack());
671 return GCState::InitMarkPersistentValues;
674GCState initMarkPersistentValues(GCStateMachine *that, ExtraData &stateData)
676 if (!that->mm->m_persistentValues)
677 return GCState::InitMarkWeakValues;
678 stateData = GCIteratorStorage { that->mm->m_persistentValues->begin() };
679 return GCState::MarkPersistentValues;
682static constexpr int markLoopIterationCount = 1024;
684bool wasDrainNecessary(MarkStack *ms, QDeadlineTimer deadline)
686 if (ms->remainingBeforeSoftLimit() > markLoopIterationCount)
693GCState markPersistentValues(GCStateMachine *that, ExtraData &stateData) {
694 auto markStack = that->mm->markStack();
695 if (wasDrainNecessary(markStack, that->deadline) && that->deadline.hasExpired())
696 return GCState::MarkPersistentValues;
697 PersistentValueStorage::Iterator& it = get<GCIteratorStorage>(stateData).it;
699 for (
int i = 0; i < markLoopIterationCount; ++i) {
701 return GCState::InitMarkWeakValues;
702 if (Managed *m = (*it).as<Managed>())
706 return GCState::MarkPersistentValues;
709GCState initMarkWeakValues(GCStateMachine *that, ExtraData &stateData)
711 stateData = GCIteratorStorage { that->mm->m_weakValues->begin() };
712 return GCState::MarkWeakValues;
715GCState markWeakValues(GCStateMachine *that, ExtraData &stateData)
717 auto markStack = that->mm->markStack();
718 if (wasDrainNecessary(markStack, that->deadline) && that->deadline.hasExpired())
719 return GCState::MarkWeakValues;
720 PersistentValueStorage::Iterator& it = get<GCIteratorStorage>(stateData).it;
722 for (
int i = 0; i < markLoopIterationCount; ++i) {
724 return GCState::MarkDrain;
725 QObjectWrapper *qobjectWrapper = (*it).as<QObjectWrapper>();
729 QObject *qobject = qobjectWrapper->object();
732 bool keepAlive = QQmlData::keepAliveDuringGarbageCollection(qobject);
735 if (QObject *parent = qobject->parent()) {
736 while (parent->parent())
737 parent = parent->parent();
738 keepAlive = QQmlData::keepAliveDuringGarbageCollection(parent);
743 qobjectWrapper->mark(that->mm->markStack());
745 return GCState::MarkWeakValues;
748GCState markDrain(GCStateMachine *that, ExtraData &)
750 if (that->deadline.isForever()) {
751 that->mm->markStack()->drain();
752 return GCState::MarkReady;
754 auto drainState = that->mm->m_markStack->drain(that->deadline);
755 return drainState == MarkStack::DrainState::Complete
757 : GCState::MarkDrain;
760GCState markReady(GCStateMachine *that, ExtraData &)
762 auto isIncrementalRun = [](GCStateMachine* that){
763 return !that->mm->aggressiveGC && that->timeLimit.count() > 0;
766 if (that->mm->crossValidateIncrementalGC && isIncrementalRun(that))
767 return GCState::CrossValidateIncrementalMarkPhase;
768 return GCState::InitCallDestroyObjects;
771GCState crossValidateIncrementalMarkPhase(GCStateMachine *that, ExtraData &)
774 Chunk* operator()(Chunk* chunk) {
return chunk; }
775 Chunk* operator()(
const HugeItemAllocator::HugeChunk& chunk) {
return chunk.chunk; }
778 auto takeBlackBitmap = [&getChunk](
auto& allocator, std::vector<quintptr>& storage){
779 for (
auto chunk : allocator.chunks) {
780 for (
auto& bitmap : getChunk(chunk)->blackBitmap) {
781 storage.push_back(bitmap);
783 getChunk(chunk)->resetBlackBits();
787 auto runMarkPhase = [](GCStateMachine* that) {
789 that->mm->m_markStack.reset();
791 while (that->state != GCStateMachine::MarkReady) {
792 GCStateInfo& stateInfo = that->stateInfoMap[
int(that->state)];
793 that->state = stateInfo.execute(that, that->stateData);
797 auto checkBlackBitmap = [&that, &getChunk](
auto& allocator,
const std::vector<quintptr>& storedBitmap) {
798 auto reportError = [&allocator, &getChunk, &that](
std::size_t chunk_index,
std::size_t bitmap_index, uint bit_index){
800 auto object =
reinterpret_cast<Heap::Base*>(getChunk(allocator.chunks[chunk_index])->realBase() + (bit_index + (bitmap_index*Chunk::Bits)));
801 qDebug() <<
"Cross Validation Error on chunk" << chunk_index
802 <<
"on bitmap piece" << bitmap_index <<
"and bit" << bit_index
803 << ((object->internalClass) ?
"With type" :
"")
804 << ((object->internalClass) ?
805 Managed::typeToString(Managed::Type(object->internalClass->vtable->type)) : QString());
807 #ifdef QT_BUILD_INTERNAL
808 that->bitmapErrors.emplace_back(chunk_index, bitmap_index, bit_index);
812 auto original = storedBitmap.begin();
813 for (
std::size_t chunk_index = 0; original != storedBitmap.end() && chunk_index < allocator.chunks.size(); ++chunk_index) {
814 for (
std::size_t bitmap_index = 0; bitmap_index < Chunk::EntriesInBitmap; ++bitmap_index) {
815 if (
auto differences = (~(*original)) & getChunk(allocator.chunks[chunk_index])->blackBitmap[bitmap_index]) {
816 while (differences != 0) {
817 uint bit_index = qCountTrailingZeroBits(differences);
818 reportError(chunk_index, bitmap_index, bit_index);
819 differences ^= quintptr{1} << bit_index;
827 #ifdef QT_BUILD_INTERNAL
828 that->bitmapErrors.clear();
831 std::vector<quintptr> blockBitmap{};
832 blockBitmap.reserve(Chunk::EntriesInBitmap * that->mm->blockAllocator.chunks.size());
833 takeBlackBitmap(that->mm->blockAllocator, blockBitmap);
835 std::vector<quintptr> hugeItemBitmap{};
836 hugeItemBitmap.reserve(Chunk::EntriesInBitmap * that->mm->hugeItemAllocator.chunks.size());
837 takeBlackBitmap(that->mm->hugeItemAllocator, hugeItemBitmap);
839 std::vector<quintptr> internalClassBitmap{};
840 internalClassBitmap.reserve(Chunk::EntriesInBitmap * that->mm->icAllocator.chunks.size());
841 takeBlackBitmap(that->mm->icAllocator, internalClassBitmap);
845 checkBlackBitmap(that->mm->blockAllocator, blockBitmap);
846 checkBlackBitmap(that->mm->hugeItemAllocator, hugeItemBitmap);
847 checkBlackBitmap(that->mm->icAllocator, internalClassBitmap);
849 return GCState::InitCallDestroyObjects;
853
854
855void redrain(GCStateMachine *that)
857 that->mm->collectFromJSStack(that->mm->markStack());
858 that->mm->m_markStack->drain();
861GCState initCallDestroyObjects(GCStateMachine *that, ExtraData &stateData)
865 if (!that->mm->m_weakValues)
866 return GCState::FreeWeakMaps;
867 stateData = GCIteratorStorage { that->mm->m_weakValues->begin() };
868 return GCState::CallDestroyObjects;
870GCState callDestroyObject(GCStateMachine *that, ExtraData &stateData)
872 PersistentValueStorage::Iterator& it = get<GCIteratorStorage>(stateData).it;
874 auto oldState = std::exchange(that->mm->gcBlocked, QV4::MemoryManager::Blockness::InCriticalSection);
875 auto cleanup = qScopeGuard([&]() {
876 that->mm->gcBlocked = oldState;
879 for (
int i = 0; i < markLoopIterationCount; ++i) {
881 return GCState::FreeWeakMaps;
882 Managed *m = (*it).managed();
884 if (!m || m->markBit())
888 if (QObjectWrapper *qobjectWrapper = m->as<QObjectWrapper>())
889 qobjectWrapper->destroyObject(
false);
891 return GCState::CallDestroyObjects;
894void freeWeakMaps(MemoryManager *mm)
896 for (
auto [map, lastMap] =
std::tuple {mm->weakMaps, &mm->weakMaps }; map; map = map->nextWeakMap) {
897 if (!map->isMarked())
899 map->removeUnmarkedKeys();
901 lastMap = &map->nextWeakMap;
905GCState freeWeakMaps(GCStateMachine *that, ExtraData &)
907 freeWeakMaps(that->mm);
908 return GCState::FreeWeakSets;
911void freeWeakSets(MemoryManager *mm)
913 for (
auto [set, lastSet] =
std::tuple {mm->weakSets, &mm->weakSets}; set; set = set->nextWeakSet) {
915 if (!set->isMarked())
917 set->removeUnmarkedKeys();
919 lastSet = &set->nextWeakSet;
923GCState freeWeakSets(GCStateMachine *that, ExtraData &)
925 freeWeakSets(that->mm);
926 return GCState::HandleQObjectWrappers;
929GCState handleQObjectWrappers(GCStateMachine *that, ExtraData &)
931 that->mm->cleanupDeletedQObjectWrappersInSweep();
932 return GCState::DoSweep;
935GCState doSweep(GCStateMachine *that, ExtraData &)
939 mm->engine->identifierTable->sweep();
940 mm->blockAllocator.sweep();
942 mm->icAllocator.sweep();
945 mm->blockAllocator.resetBlackBits();
946 mm->hugeItemAllocator.resetBlackBits();
947 mm->icAllocator.resetBlackBits();
949 mm->usedSlotsAfterLastFullSweep = mm->blockAllocator.usedSlotsAfterLastSweep + mm->icAllocator.usedSlotsAfterLastSweep;
950 mm->gcBlocked = MemoryManager::Unblocked;
951 mm->m_markStack.reset();
952 mm->engine->isGCOngoing =
false;
954 mm->updateUnmanagedHeapSizeGCLimit();
956 return GCState::Invalid;
976#ifdef V4_USE_VALGRIND
1224
1225
1226
1227
1274 const QLoggingCategory &stats = lcGcAllocatorStats();
1275 size_t totalSlotMem = 0;
1277 qDebug(stats) <<
"Slot map for" << title <<
"allocator:";
1278 for (uint i = 0; i < BlockAllocator::NumBins; ++i) {
1280 HeapItem *h = b->freeBins[i];
1283 totalSlotMem += h->freeData.availableSlots;
1284 h = h->freeData.next;
1287 qDebug(stats) <<
" number of entries in slot" << i <<
":" << nEntries;
1289 SDUMP() <<
" large slot map";
1290 HeapItem *h = b->freeBins[BlockAllocator::NumBins - 1];
1292 SDUMP() <<
" " << Qt::hex << (quintptr(h)/32) << h->freeData.availableSlots;
1293 h = h->freeData.next;
1297 qDebug(stats) <<
" total mem in bins" << totalSlotMem*Chunk::SlotSize;
1298 return totalSlotMem*Chunk::SlotSize;
1302
1303
1304
1305
1306
1307
1308
1309
1314 <<
"Tried to force the GC to complete a run but failed due to being in a critical section.";
1414 qDebug(
stats) <<
"!!!!!!!!!!!!!!!!!!!!! LOST MEM:" <<
lost <<
"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!";
1506#ifdef V4_USE_VALGRIND
1519 qDebug(
stats) <<
"Qml GC memory allocation statistics:";
1523 qDebug(
stats) <<
"Requests for different item sizes:";
1573 auto registerTimingWithResetOnOverflow = [](
1574 GCStateMachine::StepTiming& storage, quint64 timing, GCState state
1576 auto wouldOverflow = [](quint64 lhs, quint64 rhs) {
1577 return rhs > 0 && lhs > std::numeric_limits<quint64>::max() - rhs;
1580 if (wouldOverflow(storage.rolling_sum, timing) || wouldOverflow(storage.count, 1)) {
1581 qDebug(lcGcStepExecution) <<
"Resetting timings storage for"
1582 << QMetaEnum::fromType<GCState>().key(state) <<
"due to overflow.";
1583 storage.rolling_sum = timing;
1586 storage.rolling_sum += timing;
1591 GCStateMachine::StepTiming& storage = that->executionTiming[that->state];
1592 registerTimingWithResetOnOverflow(storage, timing, that->state);
1594 qDebug(lcGcStepExecution) <<
"Performed" << QMetaEnum::fromType<GCState>().key(that->state)
1595 <<
"in" << timing <<
"microseconds";
1596 qDebug(lcGcStepExecution) <<
"This step was performed" << storage.count <<
" time(s), executing in"
1597 << (storage.rolling_sum / storage.count) <<
"microseconds on average.";
1601 if (!that->collectTimings)
1602 return stateInfo.execute(that, that->stateData);
1604 QElapsedTimer timer;
1606 GCState next = stateInfo.execute(that, that->stateData);
1607 logStepTiming(that, timer.nsecsElapsed()/1000);
1618
1619
1620
1621
1622
1623
1624
1625
1660#include "moc_qv4mm_p.cpp"
static void logStepTiming(GCStateMachine *that, quint64 timing)
static uint markStackSize
static void increaseFreedCountForClass(const char *className)
static size_t dumpBins(BlockAllocator *b, const char *title)
QHash< const char *, int > MMStatsHash
static GCState executeWithLoggingIfEnabled(GCStateMachine *that, GCStateInfo &stateInfo)
static void freeHugeChunk(ChunkAllocator *chunkAllocator, const HugeItemAllocator::HugeChunk &c, ClassDestroyStatsCallback classCountPtr)
Q_STATIC_LOGGING_CATEGORY(lcAccessibilityCore, "qt.accessibility.core")
void free(Chunk *chunk, size_t size=0)
size_t requiredChunkSize(size_t size)
Chunk * allocate(size_t size=0)
std::vector< MemorySegment > memorySegments
void free(Chunk *chunk, size_t size)
Chunk * allocate(size_t size)
MemorySegment(size_t size)
PageReservation pageReservation
void setBit(size_t index)
bool testBit(size_t index) const
void clearBit(size_t index)
bool contains(Chunk *c) const
MemorySegment(MemorySegment &&other)