5#include "PageAllocation.h"
6#include "PageReservation.h"
8#include <private/qnumeric_p.h>
9#include <private/qv4alloca_p.h>
10#include <private/qv4engine_p.h>
11#include <private/qv4identifiertable_p.h>
12#include <private/qv4mapobject_p.h>
13#include <private/qv4mm_p.h>
14#include <private/qv4object_p.h>
15#include <private/qv4profiling_p.h>
16#include <private/qv4qobjectwrapper_p.h>
17#include <private/qv4setobject_p.h>
18#include <private/qv4stackframe_p.h>
20#include <QtQml/qqmlengine.h>
22#include <QtCore/qalgorithms.h>
23#include <QtCore/qelapsedtimer.h>
24#include <QtCore/qloggingcategory.h>
25#include <QtCore/qmap.h>
26#include <QtCore/qscopedvaluerollback.h>
34#if !defined(MM_STATS) && !defined(QT_NO_DEBUG)
39#define DEBUG qDebug() << "MM:"
41#define DEBUG if (1
) ; else qDebug() << "MM:"
45#include <valgrind/valgrind.h>
46#include <valgrind/memcheck.h>
49#ifdef V4_USE_HEAPTRACK
50#include <heaptrack_api.h>
54#include <sys/storage.h>
57#if USE(PTHREADS) && HAVE(PTHREAD_NP_H)
58#include <pthread_np.h>
62Q_STATIC_LOGGING_CATEGORY(lcGcAllocatorStats,
"qt.qml.gc.allocatorStats")
63Q_STATIC_LOGGING_CATEGORY(lcGcStateTransitions,
"qt.qml.gc.stateTransitions")
64Q_STATIC_LOGGING_CATEGORY(lcGcForcedRuns,
"qt.qml.gc.forcedRuns")
65Q_STATIC_LOGGING_CATEGORY(lcGcStepExecution,
"qt.qml.gc.stepExecution")
81 NumChunks =
sizeof(quint64),
90 size += Chunk::ChunkSize;
94 pageReservation = PageReservation::reserve(size, OSAllocator::JSGCHeapPages);
95 base =
reinterpret_cast<Chunk *>((
reinterpret_cast<quintptr>(pageReservation.base()) + Chunk::ChunkSize - 1) & ~(Chunk::ChunkSize - 1));
97 availableBytes = size - (
reinterpret_cast<quintptr>(base) -
reinterpret_cast<quintptr>(pageReservation.base()));
98 if (availableBytes < SegmentSize)
102 qSwap(pageReservation, other.pageReservation);
103 qSwap(base, other.base);
104 qSwap(allocatedMap, other.allocatedMap);
105 qSwap(availableBytes, other.availableBytes);
111 pageReservation.deallocate();
116 quint64 bit =
static_cast<quint64>(1) << index;
121 quint64 bit =
static_cast<quint64>(1) << index;
122 allocatedMap &= ~bit;
126 quint64 bit =
static_cast<quint64>(1) << index;
127 return (allocatedMap & bit);
131 void free(Chunk *chunk, size_t size) {
132 DEBUG <<
"freeing chunk" << chunk;
133 size_t index =
static_cast<size_t>(chunk - base);
134 size_t end = qMin(
static_cast<size_t>(
NumChunks), index + (size - 1)/Chunk::ChunkSize + 1);
135 while (index < end) {
136 Q_ASSERT(testBit(index));
141 size_t pageSize = WTF::pageSize();
142 size = (size + pageSize - 1) & ~(pageSize - 1);
143#if !defined(Q_OS_LINUX) && !defined(Q_OS_WIN)
148 memset(chunk, 0, size);
150 pageReservation.decommit(chunk, size);
154 return c >= base && c < base + nChunks;
166 if (!allocatedMap && size >= SegmentSize) {
168 Q_ASSERT(availableBytes >= size);
169 pageReservation.commit(base, size);
170 allocatedMap = ~
static_cast<quint64>(0);
173 size_t requiredChunks = (size +
sizeof(Chunk) - 1)/
sizeof(Chunk);
175 Chunk *candidate =
nullptr;
176 for (uint i = 0; i <
nChunks; ++i) {
179 candidate = base + i;
185 if (sequence == requiredChunks) {
186 pageReservation.commit(candidate, size);
187 for (uint i = 0; i < requiredChunks; ++i)
188 setBit(candidate - base + i);
189 DEBUG <<
"allocated chunk " << candidate << Qt::hex << size;
201 size += Chunk::HeaderSize;
202 size_t pageSize = WTF::pageSize();
203 size = (size + pageSize - 1) & ~(pageSize - 1);
204 if (size < Chunk::ChunkSize)
205 size = Chunk::ChunkSize;
210 void free(Chunk *chunk, size_t size = 0);
217 size = requiredChunkSize(size);
218 for (
auto &m : memorySegments) {
219 if (~m.allocatedMap) {
220 Chunk *c = m.allocate(size);
227 memorySegments.push_back(MemorySegment(size));
228 Chunk *c = memorySegments.back().allocate(size);
235 size = requiredChunkSize(size);
236 for (
auto &m : memorySegments) {
237 if (m.contains(chunk)) {
255#define SDUMP if (1
) ; else qDebug
261 SDUMP() <<
"sweeping chunk" <<
this;
268 SDUMP() <<
" index=" <<
i;
299#ifdef V4_USE_HEAPTRACK
345#ifdef V4_USE_HEAPTRACK
365#if QT_POINTER_SIZE == 8
375#if QT_POINTER_SIZE == 8
445 while ((
m = *
last)) {
515#ifdef V4_USE_HEAPTRACK
582#ifdef V4_USE_HEAPTRACK
590 HeapItem *itemToFree = c.chunk->first();
591 Heap::Base *b = *itemToFree;
592 const VTable *v = b->internalClass->vtable;
596 b->_checkIsDestroyed();
600 c.segment->free(c.chunk, c.size);
603 chunkAllocator->free(c.chunk, c.size);
605#ifdef V4_USE_HEAPTRACK
606 heaptrack_report_free(c.chunk);
641using ExtraData = GCStateInfo::ExtraData;
642GCState markStart(GCStateMachine *that, ExtraData &)
645 that->mm->m_markStack = std::make_unique<MarkStack>(that->mm->engine);
646 that->mm->engine->isGCOngoing =
true;
647 return GCState::MarkGlobalObject;
650GCState markGlobalObject(GCStateMachine *that, ExtraData &)
652 that->mm->engine->markObjects(that->mm->m_markStack.get());
653 return GCState::MarkJSStack;
656GCState markJSStack(GCStateMachine *that, ExtraData &)
658 that->mm->collectFromJSStack(that->mm->markStack());
659 return GCState::InitMarkPersistentValues;
662GCState initMarkPersistentValues(GCStateMachine *that, ExtraData &stateData)
664 if (!that->mm->m_persistentValues)
665 return GCState::InitMarkWeakValues;
666 stateData = GCIteratorStorage { that->mm->m_persistentValues->begin() };
667 return GCState::MarkPersistentValues;
671 MarkLoopIterationCount = 1024,
672 MarkLoopIterationCountForDrain = 10240,
675bool wasDrainNecessary(MarkStack *ms, QDeadlineTimer deadline)
677 if (ms->remainingBeforeSoftLimit() > MarkLoopIterationCount)
684GCState markPersistentValues(GCStateMachine *that, ExtraData &stateData) {
685 auto markStack = that->mm->markStack();
686 if (wasDrainNecessary(markStack, that->deadline) && that->deadline.hasExpired())
687 return GCState::MarkPersistentValues;
688 PersistentValueStorage::Iterator& it = get<GCIteratorStorage>(stateData).it;
690 for (
int i = 0; i < MarkLoopIterationCount; ++i) {
692 return GCState::InitMarkWeakValues;
693 if (Managed *m = (*it).as<Managed>())
697 return GCState::MarkPersistentValues;
700GCState initMarkWeakValues(GCStateMachine *that, ExtraData &stateData)
702 stateData = GCIteratorStorage { that->mm->m_weakValues->begin() };
703 return GCState::MarkWeakValues;
706GCState markWeakValues(GCStateMachine *that, ExtraData &stateData)
708 auto markStack = that->mm->markStack();
709 if (wasDrainNecessary(markStack, that->deadline) && that->deadline.hasExpired())
710 return GCState::MarkWeakValues;
711 PersistentValueStorage::Iterator& it = get<GCIteratorStorage>(stateData).it;
713 for (
int i = 0; i < MarkLoopIterationCount; ++i) {
715 return GCState::MarkDrain;
716 QObjectWrapper *qobjectWrapper = (*it).as<QObjectWrapper>();
720 QObject *qobject = qobjectWrapper->object();
723 bool keepAlive = QQmlData::keepAliveDuringGarbageCollection(qobject);
726 if (QObject *parent = qobject->parent()) {
727 while (parent->parent())
728 parent = parent->parent();
729 keepAlive = QQmlData::keepAliveDuringGarbageCollection(parent);
734 qobjectWrapper->mark(that->mm->markStack());
736 return GCState::MarkWeakValues;
739GCState markDrain(GCStateMachine *that, ExtraData &)
741 if (that->deadline.isForever()) {
742 that->mm->markStack()->drain();
743 return GCState::MarkReady;
745 auto drainState = that->mm->m_markStack->drain(that->deadline);
746 return drainState == MarkStack::DrainState::Complete
748 : GCState::MarkDrain;
751GCState markReady(GCStateMachine *that, ExtraData &)
753 auto isIncrementalRun = [](GCStateMachine* that){
754 return !that->mm->aggressiveGC && that->timeLimit.count() > 0;
757 if (that->mm->crossValidateIncrementalGC && isIncrementalRun(that))
758 return GCState::CrossValidateIncrementalMarkPhase;
759 return GCState::InitCallDestroyObjects;
762GCState crossValidateIncrementalMarkPhase(GCStateMachine *that, ExtraData &)
765 Chunk* operator()(Chunk* chunk) {
return chunk; }
766 Chunk* operator()(
const HugeItemAllocator::HugeChunk& chunk) {
return chunk.chunk; }
769 auto takeBlackBitmap = [&getChunk](
auto& allocator, std::vector<quintptr>& storage){
770 for (
auto chunk : allocator.chunks) {
771 for (
auto& bitmap : getChunk(chunk)->blackBitmap) {
772 storage.push_back(bitmap);
774 getChunk(chunk)->resetBlackBits();
778 auto runMarkPhase = [](GCStateMachine* that) {
780 that->mm->m_markStack.reset();
782 while (that->state != GCStateMachine::MarkReady) {
783 GCStateInfo& stateInfo = that->stateInfoMap[
int(that->state)];
784 that->state = stateInfo.execute(that, that->stateData);
788 auto checkBlackBitmap = [&that, &getChunk](
auto& allocator,
const std::vector<quintptr>& storedBitmap) {
789 auto reportError = [&allocator, &getChunk, &that](
std::size_t chunk_index,
std::size_t bitmap_index, uint bit_index){
790 #ifdef QT_BUILD_INTERNAL
792 if (
auto errors = that->bitmapErrors) {
793 errors->emplace_back(chunk_index, bitmap_index, bit_index);
799 auto object =
reinterpret_cast<Heap::Base*>(getChunk(allocator.chunks[chunk_index])->realBase() + (bit_index + (bitmap_index*Chunk::Bits)));
800 qDebug() <<
"Cross Validation Error on chunk" << chunk_index
801 <<
"on bitmap piece" << bitmap_index <<
"and bit" << bit_index
802 << ((object->internalClass) ?
"With type" :
"")
803 << ((object->internalClass) ?
804 Managed::typeToString(Managed::Type(object->internalClass->vtable->type)) : QString());
807 auto original = storedBitmap.begin();
808 for (
std::size_t chunk_index = 0; original != storedBitmap.end() && chunk_index < allocator.chunks.size(); ++chunk_index) {
809 for (
std::size_t bitmap_index = 0; bitmap_index < Chunk::EntriesInBitmap; ++bitmap_index) {
810 if (
auto differences = (~(*original)) & getChunk(allocator.chunks[chunk_index])->blackBitmap[bitmap_index]) {
811 while (differences != 0) {
812 uint bit_index = qCountTrailingZeroBits(differences);
813 reportError(chunk_index, bitmap_index, bit_index);
814 differences ^= quintptr{1} << bit_index;
822 #ifdef QT_BUILD_INTERNAL
823 if (
auto *errors = that->bitmapErrors)
827 std::vector<quintptr> blockBitmap{};
828 blockBitmap.reserve(Chunk::EntriesInBitmap * that->mm->blockAllocator.chunks.size());
829 takeBlackBitmap(that->mm->blockAllocator, blockBitmap);
831 std::vector<quintptr> hugeItemBitmap{};
832 hugeItemBitmap.reserve(Chunk::EntriesInBitmap * that->mm->hugeItemAllocator.chunks.size());
833 takeBlackBitmap(that->mm->hugeItemAllocator, hugeItemBitmap);
835 std::vector<quintptr> internalClassBitmap{};
836 internalClassBitmap.reserve(Chunk::EntriesInBitmap * that->mm->icAllocator.chunks.size());
837 takeBlackBitmap(that->mm->icAllocator, internalClassBitmap);
841 checkBlackBitmap(that->mm->blockAllocator, blockBitmap);
842 checkBlackBitmap(that->mm->hugeItemAllocator, hugeItemBitmap);
843 checkBlackBitmap(that->mm->icAllocator, internalClassBitmap);
845 return GCState::InitCallDestroyObjects;
849
850
851void redrain(GCStateMachine *that)
853 that->mm->collectFromJSStack(that->mm->markStack());
854 that->mm->m_markStack->drain();
857GCState initCallDestroyObjects(GCStateMachine *that, ExtraData &stateData)
861 if (!that->mm->m_weakValues)
862 return GCState::FreeWeakMaps;
863 stateData = GCIteratorStorage { that->mm->m_weakValues->begin() };
864 return GCState::CallDestroyObjects;
866GCState callDestroyObject(GCStateMachine *that, ExtraData &stateData)
868 PersistentValueStorage::Iterator& it = get<GCIteratorStorage>(stateData).it;
870 auto oldState = std::exchange(that->mm->gcBlocked, QV4::MemoryManager::Blockness::InCriticalSection);
871 auto cleanup = qScopeGuard([&]() {
872 that->mm->gcBlocked = oldState;
875 for (
int i = 0; i < MarkLoopIterationCount; ++i) {
877 return GCState::FreeWeakMaps;
878 Managed *m = (*it).managed();
880 if (!m || m->markBit())
884 if (QObjectWrapper *qobjectWrapper = m->as<QObjectWrapper>())
885 qobjectWrapper->destroyObject(
false);
887 return GCState::CallDestroyObjects;
890void freeWeakMaps(MemoryManager *mm)
892 for (
auto [map, lastMap] =
std::tuple {mm->weakMaps, &mm->weakMaps }; map; map = map->nextWeakMap) {
893 if (!map->isMarked())
895 map->removeUnmarkedKeys();
897 lastMap = &map->nextWeakMap;
901GCState freeWeakMaps(GCStateMachine *that, ExtraData &)
903 freeWeakMaps(that->mm);
904 return GCState::FreeWeakSets;
907void freeWeakSets(MemoryManager *mm)
909 for (
auto [set, lastSet] =
std::tuple {mm->weakSets, &mm->weakSets}; set; set = set->nextWeakSet) {
911 if (!set->isMarked())
913 set->removeUnmarkedKeys();
915 lastSet = &set->nextWeakSet;
919GCState freeWeakSets(GCStateMachine *that, ExtraData &)
921 freeWeakSets(that->mm);
922 return GCState::HandleQObjectWrappers;
925GCState handleQObjectWrappers(GCStateMachine *that, ExtraData &)
927 that->mm->cleanupDeletedQObjectWrappersInSweep();
928 return GCState::DoSweep;
931GCState doSweep(GCStateMachine *that, ExtraData &)
935 mm->engine->identifierTable->sweep();
936 mm->blockAllocator.sweep();
937 mm->hugeItemAllocator.sweep();
938 mm->icAllocator.sweep();
941 mm->blockAllocator.resetBlackBits();
942 mm->hugeItemAllocator.resetBlackBits();
943 mm->icAllocator.resetBlackBits();
945 mm->usedSlotsAfterLastFullSweep = mm->blockAllocator.usedSlotsAfterLastSweep + mm->icAllocator.usedSlotsAfterLastSweep;
946 mm->gcBlocked = MemoryManager::Unblocked;
947 mm->m_markStack.reset();
948 mm->engine->isGCOngoing =
false;
950 mm->updateUnmanagedHeapSizeGCLimit();
952 return GCState::Invalid;
972#ifdef V4_USE_VALGRIND
1216
1217
1218
1219
1266 const QLoggingCategory &stats = lcGcAllocatorStats();
1267 size_t totalSlotMem = 0;
1269 qDebug(stats) <<
"Slot map for" << title <<
"allocator:";
1270 for (uint i = 0; i < BlockAllocator::NumBins; ++i) {
1272 HeapItem *h = b->freeBins[i];
1275 totalSlotMem += h->freeData.availableSlots;
1276 h = h->freeData.next;
1279 qDebug(stats) <<
" number of entries in slot" << i <<
":" << nEntries;
1281 SDUMP() <<
" large slot map";
1282 HeapItem *h = b->freeBins[BlockAllocator::NumBins - 1];
1284 SDUMP() <<
" " << Qt::hex << (quintptr(h)/32) << h->freeData.availableSlots;
1285 h = h->freeData.next;
1289 qDebug(stats) <<
" total mem in bins" << totalSlotMem*Chunk::SlotSize;
1290 return totalSlotMem*Chunk::SlotSize;
1294
1295
1296
1297
1298
1299
1300
1301
1306 <<
"Tried to force the GC to complete a run but failed due to being in a critical section.";
1394 qDebug(
stats) <<
"!!!!!!!!!!!!!!!!!!!!! LOST MEM:" <<
lost <<
"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!";
1484#ifdef V4_USE_VALGRIND
1497 qDebug(
stats) <<
"Qml GC memory allocation statistics:";
1501 qDebug(
stats) <<
"Requests for different item sizes:";
1551 auto registerTimingWithResetOnOverflow = [](
1552 GCStateMachine::StepTiming& storage, quint64 timing, GCState state
1554 auto wouldOverflow = [](quint64 lhs, quint64 rhs) {
1555 return rhs > 0 && lhs > std::numeric_limits<quint64>::max() - rhs;
1558 if (wouldOverflow(storage.rolling_sum, timing) || wouldOverflow(storage.count, 1)) {
1559 qDebug(lcGcStepExecution) <<
"Resetting timings storage for"
1560 << QMetaEnum::fromType<GCState>().key(state) <<
"due to overflow.";
1561 storage.rolling_sum = timing;
1564 storage.rolling_sum += timing;
1569 GCStateMachine::StepTiming& storage = that->executionTiming[that->state];
1570 registerTimingWithResetOnOverflow(storage, timing, that->state);
1572 qDebug(lcGcStepExecution) <<
"Performed" << QMetaEnum::fromType<GCState>().key(that->state)
1573 <<
"in" << timing <<
"microseconds";
1574 qDebug(lcGcStepExecution) <<
"This step was performed" << storage.count <<
" time(s), executing in"
1575 << (storage.rolling_sum / storage.count) <<
"microseconds on average.";
1579 if (!that->collectTimings)
1580 return stateInfo.execute(that, that->stateData);
1582 QElapsedTimer timer;
1584 GCState next = stateInfo.execute(that, that->stateData);
1585 logStepTiming(that, timer.nsecsElapsed()/1000);
1596
1597
1598
1599
1600
1601
1602
1603
1651#include "moc_qv4mm_p.cpp"
static void logStepTiming(GCStateMachine *that, quint64 timing)
static size_t dumpBins(BlockAllocator *b, const char *title)
static void freeHugeChunk(ChunkAllocator *chunkAllocator, const HugeItemAllocator::HugeChunk &c)
static GCState executeWithLoggingIfEnabled(GCStateMachine *that, GCStateInfo &stateInfo)
QT_BEGIN_NAMESPACE Q_STATIC_LOGGING_CATEGORY(lcSynthesizedIterableAccess, "qt.iterable.synthesized", QtWarningMsg)
void free(Chunk *chunk, size_t size=0)
size_t requiredChunkSize(size_t size)
Chunk * allocate(size_t size=0)
std::vector< MemorySegment > memorySegments
void free(Chunk *chunk, size_t size)
Chunk * allocate(size_t size)
MemorySegment(size_t size)
PageReservation pageReservation
void setBit(size_t index)
bool testBit(size_t index) const
void clearBit(size_t index)
bool contains(Chunk *c) const
MemorySegment(MemorySegment &&other)