Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qv4mm_p.h
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3// Qt-Security score:critical reason:low-level-memory-management
4
5#ifndef QV4GC_H
6#define QV4GC_H
7
8//
9// W A R N I N G
10// -------------
11//
12// This file is not part of the Qt API. It exists purely as an
13// implementation detail. This header file may change from version to
14// version without notice, or even be removed.
15//
16// We mean it.
17//
18
19#include <private/qv4global_p.h>
20#include <private/qv4value_p.h>
21#include <private/qv4scopedvalue_p.h>
22#include <private/qv4object_p.h>
23#include <private/qv4mmdefs_p.h>
24#include <QList>
25
26#define MM_DEBUG 0
27
29
30namespace QV4 {
31
32// Iterate a potentially growing container without querying the size on each iteration.
33// Return the first index where p doesn't hold, or the end of the container.
34template<typename Container, typename UnaryPred>
35typename Container::size_type reiterate(
36 Container &container, typename Container::size_type first, UnaryPred &&p)
37{
38 auto last = container.size();
39 while (first < last) {
40 do {
41 if (!p(first))
42 return first;
43 } while (++first < last);
44
45 Q_ASSERT(first == last);
46
47 // Re-fetch the container size.
48 // If the container hasn't grown, we're done since last == first, still.
49 // Otherwise, do one more round via the "while (first < last)" above.
50 last = container.size();
51 }
52
53 // Returning last here allows input of an out of range first, saving us
54 // one size check ahead of running this.
55 return last;
56}
57
58// index based version of partition to handle potential growth at the end
59template<typename Container, class UnaryPred>
60typename Container::size_type partition(Container &container, UnaryPred &&p)
61{
62 // Figure out the first entry where p doesn't hold.
63 auto first = reiterate(container, 0, p);
64
65 // Iterate the remaining entries and swap any entry where p holds
66 // to the (moving) end of the front section of the container.
67 // Any time we do that, the front section grows by 1. Therefore, the back
68 // section can only contain entries where p doesn't hold in the end.
69 reiterate(container, first + 1, [&p, &container, &first](const auto i) {
70 if (p(i)) {
71 // It's important to re-resolve container.at(i) for the std::swap.
72 // The container may have grown as result of determining p, thereby
73 // invalidating any reference to an entry taken before.
74 std::swap(container.at(i), container.at(first++));
75 }
76 return true;
77 });
78
79 return first;
80}
81
82struct GCData { virtual ~GCData(){};};
83
87
89 Q_GADGET_EXPORT(Q_QML_EXPORT)
90
91public:
92 enum GCState {
103 // The following needs to be after InitCallDestroyObjects,
104 // even if it normally would run before it, to ensure that in
105 // a normal incremental run the stack is redrained before this
106 // is run as we make use of that knowledge in a test.
115 };
116 Q_ENUM(GCState)
117
122
123 struct GCStateInfo {
125 GCState (*execute)(GCStateMachine *, ExtraData &) = nullptr; // Function to execute for this state, returns true if ready to transition
126 bool breakAfter{false};
127 };
128
135 MemoryManager *mm = nullptr;
136 ExtraData stateData; // extra date for specific states
137 bool collectTimings = false;
138 #ifdef QT_BUILD_INTERNAL
139 // This is used only to simplify testing.
141 std::vector<BitmapError> *bitmapErrors = nullptr;
142 #endif
143
145
146 inline void step() {
147 if (!inProgress()) {
148 reset();
149 }
150 transition();
151 }
152
153 inline bool inProgress() {
154 return state != GCState::Invalid;
155 }
156
157 inline void reset() {
158 state = GCState::MarkStart;
159 }
160
161 Q_QML_EXPORT void transition();
162
163 inline void handleTimeout(GCState state) {
164 Q_UNUSED(state);
165 }
166};
167
170
171struct ChunkAllocator;
172struct MemorySegment;
173
175 BlockAllocator(ChunkAllocator *chunkAllocator, ExecutionEngine *engine)
176 : chunkAllocator(chunkAllocator), engine(engine)
177 {
178 memset(freeBins, 0, sizeof(freeBins));
179 }
180
181 enum { NumBins = 8 };
182
183 static inline size_t binForSlots(size_t nSlots) {
184 return nSlots >= NumBins ? NumBins - 1 : nSlots;
185 }
186
187 HeapItem *allocate(size_t size, bool forceAllocation = false);
188
190 return Chunk::AvailableSlots*chunks.size();
191 }
192
194 return chunks.size()*Chunk::DataSize;
195 }
196 size_t usedMem() const {
197 uint used = 0;
198 for (auto c : chunks)
199 used += c->nUsedSlots()*Chunk::SlotSize;
200 return used;
201 }
202
203 void sweep();
204 void freeAll();
205 void resetBlackBits();
206
207 // bump allocations
208 HeapItem *nextFree = nullptr;
213 ExecutionEngine *engine;
215 uint *allocationStats = nullptr;
216};
217
219 HugeItemAllocator(ChunkAllocator *chunkAllocator, ExecutionEngine *engine)
220 : chunkAllocator(chunkAllocator), engine(engine)
221 {}
222
223 HeapItem *allocate(size_t size);
224 void sweep();
225 void freeAll();
226 void resetBlackBits();
227
228 size_t usedMem() const {
229 size_t used = 0;
230 for (const auto &c : chunks)
231 used += c.size;
232 return used;
233 }
234
236 ExecutionEngine *engine;
242
244};
245
246
247class Q_QML_EXPORT MemoryManager
248{
250
251public:
253 ~MemoryManager();
254
255 template <typename ToBeMarked>
256 friend struct GCCriticalSection;
257
258 // TODO: this is only for 64bit (and x86 with SSE/AVX), so exend it for other architectures to be slightly more efficient (meaning, align on 8-byte boundaries).
259 // Note: all occurrences of "16" in alloc/dealloc are also due to the alignment.
260 constexpr static inline std::size_t align(std::size_t size)
261 { return (size + Chunk::SlotSize - 1) & ~(Chunk::SlotSize - 1); }
262
263 /* NOTE: allocManaged comes in various overloads. If size is not passed explicitly
264 sizeof(ManagedType::Data) is used for size. However, there are quite a few cases
265 where we allocate more than sizeof(ManagedType::Data); that's generally the case
266 when the Object has a ValueArray member.
267 If no internal class pointer is provided, ManagedType::defaultInternalClass(engine)
268 will be used as the internal class.
269 */
270
271 template<typename ManagedType>
272 inline typename ManagedType::Data *allocManaged(std::size_t size, Heap::InternalClass *ic)
273 {
274 static_assert(std::is_trivially_copyable_v<typename ManagedType::Data>);
275 static_assert(std::is_trivially_default_constructible_v<typename ManagedType::Data>);
276 size = align(size);
277 typename ManagedType::Data *d = static_cast<typename ManagedType::Data *>(allocData(size));
278 d->internalClass.set(engine, ic);
279 Q_ASSERT(d->internalClass && d->internalClass->vtable);
280 Q_ASSERT(ic->vtable == ManagedType::staticVTable());
281 return d;
282 }
283
284 template<typename ManagedType>
285 inline typename ManagedType::Data *allocManaged(Heap::InternalClass *ic)
286 {
287 return allocManaged<ManagedType>(sizeof(typename ManagedType::Data), ic);
288 }
289
290 template<typename ManagedType>
291 inline typename ManagedType::Data *allocManaged(std::size_t size, InternalClass *ic)
292 {
293 return allocManaged<ManagedType>(size, ic->d());
294 }
295
296 template<typename ManagedType>
297 inline typename ManagedType::Data *allocManaged(InternalClass *ic)
298 {
299 return allocManaged<ManagedType>(sizeof(typename ManagedType::Data), ic);
300 }
301
302 template<typename ManagedType>
303 inline typename ManagedType::Data *allocManaged(std::size_t size)
304 {
305 Scope scope(engine);
306 Scoped<InternalClass> ic(scope, ManagedType::defaultInternalClass(engine));
307 return allocManaged<ManagedType>(size, ic);
308 }
309
310 template<typename ManagedType>
311 inline typename ManagedType::Data *allocManaged()
312 {
313 auto constexpr size = sizeof(typename ManagedType::Data);
314 Scope scope(engine);
315 Scoped<InternalClass> ic(scope, ManagedType::defaultInternalClass(engine));
316 return allocManaged<ManagedType>(size, ic);
317 }
318
319 template <typename ObjectType>
320 typename ObjectType::Data *allocateObject(Heap::InternalClass *ic)
321 {
322 Heap::Object *o = allocObjectWithMemberData(ObjectType::staticVTable(), ic->size);
323 o->internalClass.set(engine, ic);
324 Q_ASSERT(o->internalClass.get() && o->vtable());
325 Q_ASSERT(o->vtable() == ObjectType::staticVTable());
326 return static_cast<typename ObjectType::Data *>(o);
327 }
328
329 template <typename ObjectType>
330 typename ObjectType::Data *allocateObject(InternalClass *ic)
331 {
332 return allocateObject<ObjectType>(ic->d());
333 }
334
335 template <typename ObjectType>
336 typename ObjectType::Data *allocateObject()
337 {
338 Scope scope(engine);
339 Scoped<InternalClass> ic(scope, ObjectType::defaultInternalClass(engine));
340 ic = ic->changeVTable(ObjectType::staticVTable());
341 ic = ic->changePrototype(ObjectType::defaultPrototype(engine)->d());
342 return allocateObject<ObjectType>(ic);
343 }
344
345 template <typename ManagedType, typename Arg1>
346 typename ManagedType::Data *allocWithStringData(std::size_t unmanagedSize, Arg1 &&arg1)
347 {
348 typename ManagedType::Data *o = reinterpret_cast<typename ManagedType::Data *>(allocString(unmanagedSize));
349 o->internalClass.set(engine, ManagedType::defaultInternalClass(engine));
350 Q_ASSERT(o->internalClass && o->internalClass->vtable);
351 o->init(std::forward<Arg1>(arg1));
352 return o;
353 }
354
355 template <typename ObjectType, typename... Args>
356 typename ObjectType::Data *allocObject(Heap::InternalClass *ic, Args&&... args)
357 {
358 typename ObjectType::Data *d = allocateObject<ObjectType>(ic);
359 d->init(std::forward<Args>(args)...);
360 return d;
361 }
362
363 template <typename ObjectType, typename... Args>
364 typename ObjectType::Data *allocObject(InternalClass *ic, Args&&... args)
365 {
366 typename ObjectType::Data *d = allocateObject<ObjectType>(ic);
367 d->init(std::forward<Args>(args)...);
368 return d;
369 }
370
371 template <typename ObjectType, typename... Args>
372 typename ObjectType::Data *allocate(Args&&... args)
373 {
374 Scope scope(engine);
375 Scoped<ObjectType> t(scope, allocateObject<ObjectType>());
376 t->d_unchecked()->init(std::forward<Args>(args)...);
377 return t->d();
378 }
379
380 template <typename ManagedType, typename... Args>
381 typename ManagedType::Data *alloc(Args&&... args)
382 {
383 Scope scope(engine);
384 Scoped<ManagedType> t(scope, allocManaged<ManagedType>());
385 t->d_unchecked()->init(std::forward<Args>(args)...);
386 return t->d();
387 }
388
389 void runGC();
391 void runFullGC();
392
393 void dumpStats() const;
394
396 size_t getAllocatedMem() const;
397 size_t getLargeItemsMem() const;
398
399 // called when a JS object grows itself. Specifically: Heap::String::append
400 // and InternalClassDataPrivate<PropertyAttributes>.
401 void changeUnmanagedHeapSizeUsage(qptrdiff delta) { unmanagedHeapSize += delta; }
402
403 // called at the end of a gc cycle
405
406 template<typename ManagedType>
407 typename ManagedType::Data *allocIC()
408 {
409 Heap::Base *b = *allocate(&icAllocator, align(sizeof(typename ManagedType::Data)));
410 return static_cast<typename ManagedType::Data *>(b);
411 }
412
413 void registerWeakMap(Heap::MapObject *map);
414 void registerWeakSet(Heap::SetObject *set);
415
416 void onEventLoop();
417
418 //GC related methods
419 void setGCTimeLimit(int timeMs);
420 MarkStack* markStack() { return m_markStack.get(); }
421
424
425protected:
426 /// expects size to be aligned
428 Heap::Base *allocData(std::size_t size);
429 Heap::Object *allocObjectWithMemberData(const QV4::VTable *vtable, uint nMembers);
430
431private:
432 enum {
433 MinUnmanagedHeapSizeGCLimit = 128 * 1024
434 };
435
436public:
438 void sweep(bool lastSweep = false);
441 {
442 const bool incrementalGCIsAlreadyRunning = m_markStack != nullptr;
443 const bool aboveUnmanagedHeapLimit = incrementalGCIsAlreadyRunning
444 ? unmanagedHeapSize > 3 * unmanagedHeapSizeGCLimit / 2
445 : unmanagedHeapSize > unmanagedHeapSizeGCLimit;
446 return aboveUnmanagedHeapLimit;
447 }
448private:
449 bool shouldRunGC() const;
450
451 HeapItem *allocate(BlockAllocator *allocator, std::size_t size)
452 {
453 const bool incrementalGCIsAlreadyRunning = m_markStack != nullptr;
454
455 bool didGCRun = false;
456 if (aggressiveGC) {
458 didGCRun = true;
459 }
460
462 if (!didGCRun)
463 incrementalGCIsAlreadyRunning ? (void) tryForceGCCompletion() : runGC();
464 didGCRun = true;
465 }
466
467 if (size > Chunk::DataSize)
468 return hugeItemAllocator.allocate(size);
469
470 if (HeapItem *m = allocator->allocate(size))
471 return m;
472
473 if (!didGCRun && shouldRunGC())
474 runGC();
475
476 return allocator->allocate(size, true);
477 }
478
479public:
484
485 QV4::ExecutionEngine *engine;
490 PersistentValueStorage *m_persistentValues;
491 PersistentValueStorage *m_weakValues;
495
498
499 // For recording objects from compilation units during GC
501
502 std::size_t unmanagedHeapSize = 0; // the amount of bytes of heap that is not managed by the memory manager, but which is held onto by managed items.
505
508 bool aggressiveGC = false;
510
511#if defined(MM_STATS) || !defined(QT_NO_DEBUG)
514#endif
515
522
524 {
531
532 void start(MemoryManager *mm);
533 void step(MemoryManager *mm);
534 void end(MemoryManager *mm);
535 };
536
539};
540
541/*!
542 \internal
543 GCCriticalSection prevets the gc from running, until it is destructed.
544 In its dtor, it runs a check whether we've reached the unmanaegd heap limit,
545 and triggers a gc run if necessary.
546 Lastly, it can optionally mark an object passed to it before runnig the gc.
547 */
548template <typename ToBeMarked = void>
551
561 {
564 if constexpr (!std::is_same_v<ToBeMarked, void>)
565 if (m_toBeMarked)
567 /* because we blocked the gc, we might be using too much memoryon the unmanaged heap
568 and did not run the normal fixup logic. So recheck again, and trigger a gc run
569 if necessary*/
571 return;
572 if (!m_engine->isGCOngoing) {
574 } else {
577 }
578 }
579
580private:
582 MemoryManager::Blockness m_oldState;
583 ToBeMarked *m_toBeMarked;
584};
585
586}
587
588QT_END_NAMESPACE
589
590#endif // QV4GC_H
ManagedType::Data * allocManaged(InternalClass *ic)
Definition qv4mm_p.h:297
std::unique_ptr< CollectorStatistics > collectorStatistics
Definition qv4mm_p.h:538
bool isAboveUnmanagedHeapLimit()
Definition qv4mm_p.h:440
friend struct GCCriticalSection
Definition qv4mm_p.h:256
Heap::SetObject * weakSets
Definition qv4mm_p.h:494
QV4::ExecutionEngine * engine
Definition qv4mm_p.h:485
bool crossValidateIncrementalGC
Definition qv4mm_p.h:509
ObjectType::Data * allocateObject(Heap::InternalClass *ic)
Definition qv4mm_p.h:320
size_t getRegularItemsMem() const
Definition qv4mm.cpp:1378
Heap::Object * allocObjectWithMemberData(const QV4::VTable *vtable, uint nMembers)
Definition qv4mm.cpp:1080
size_t getLargeItemsMem() const
Definition qv4mm.cpp:1388
std::vector< QObject * > findObjectsForCompilationUnits(std::vector< QQmlRefPointer< CompiledData::CompilationUnit > > &&units)
Definition qv4mm.cpp:1608
ManagedType::Data * allocManaged()
Definition qv4mm_p.h:311
bool tryForceGCCompletion()
Definition qv4mm.cpp:1312
ManagedType::Data * allocManaged(std::size_t size, Heap::InternalClass *ic)
Definition qv4mm_p.h:272
ManagedType::Data * allocManaged(std::size_t size, InternalClass *ic)
Definition qv4mm_p.h:291
size_t lastAllocRequestedSlots
Definition qv4mm_p.h:513
void changeUnmanagedHeapSizeUsage(qptrdiff delta)
Definition qv4mm_p.h:401
QList< Value * > m_pendingFreedObjectWrapperValue
Definition qv4mm_p.h:492
std::unique_ptr< GCStateMachine > gcStateMachine
Definition qv4mm_p.h:496
ObjectType::Data * allocate(Args &&... args)
Definition qv4mm_p.h:372
Blockness gcBlocked
Definition qv4mm_p.h:507
ChunkAllocator * chunkAllocator
Definition qv4mm_p.h:486
PersistentValueStorage * m_persistentValues
Definition qv4mm_p.h:490
ManagedType::Data * allocWithStringData(std::size_t unmanagedSize, Arg1 &&arg1)
Definition qv4mm_p.h:346
void cleanupDeletedQObjectWrappersInSweep()
Definition qv4mm.cpp:1230
PersistentValueStorage * m_weakValues
Definition qv4mm_p.h:491
void sweep(bool lastSweep=false)
Definition qv4mm.cpp:1189
std::size_t unmanagedHeapSize
Definition qv4mm_p.h:502
ManagedType::Data * allocManaged(std::size_t size)
Definition qv4mm_p.h:303
std::unique_ptr< Statistics > statistics
Definition qv4mm_p.h:537
void registerWeakMap(Heap::MapObject *map)
Definition qv4mm.cpp:1416
ManagedType::Data * allocManaged(Heap::InternalClass *ic)
Definition qv4mm_p.h:285
void dumpStats() const
Definition qv4mm.cpp:1459
std::size_t usedSlotsAfterLastFullSweep
Definition qv4mm_p.h:504
BlockAllocator blockAllocator
Definition qv4mm_p.h:487
HugeItemAllocator hugeItemAllocator
Definition qv4mm_p.h:489
std::unique_ptr< MarkStack > m_markStack
Definition qv4mm_p.h:497
Heap::Base * allocData(std::size_t size)
Definition qv4mm.cpp:1065
size_t getAllocatedMem() const
Definition qv4mm.cpp:1383
ManagedType::Data * allocIC()
Definition qv4mm_p.h:407
void setGCTimeLimit(int timeMs)
Definition qv4mm.cpp:1184
ObjectType::Data * allocateObject()
Definition qv4mm_p.h:336
void registerWeakSet(Heap::SetObject *set)
Definition qv4mm.cpp:1422
ObjectType::Data * allocObject(InternalClass *ic, Args &&... args)
Definition qv4mm_p.h:364
Heap::MapObject * weakMaps
Definition qv4mm_p.h:493
void updateUnmanagedHeapSizeGCLimit()
Definition qv4mm.cpp:1393
ManagedType::Data * alloc(Args &&... args)
Definition qv4mm_p.h:381
ObjectType::Data * allocObject(Heap::InternalClass *ic, Args &&... args)
Definition qv4mm_p.h:356
ObjectsForCompilationUnit * m_recordedObjects
Definition qv4mm_p.h:500
std::size_t unmanagedHeapSizeGCLimit
Definition qv4mm_p.h:503
static constexpr std::size_t align(std::size_t size)
Definition qv4mm_p.h:260
ObjectType::Data * allocateObject(InternalClass *ic)
Definition qv4mm_p.h:330
MarkStack * markStack()
Definition qv4mm_p.h:420
BlockAllocator icAllocator
Definition qv4mm_p.h:488
Combined button and popup list for selecting options.
static void updateProtoUsage(Heap::Object *o, Heap::InternalClass *ic)
static Heap::InternalClass * cleanInternalClass(Heap::InternalClass *orig)
static PropertyAttributes attributesFromFlags(int flags)
static void addDummyEntry(InternalClass *newClass, PropertyHash::Entry e)
Definition qjsvalue.h:24
GCStateMachine::GCStateInfo GCStateInfo
Definition qv4mm_p.h:169
Container::size_type partition(Container &container, UnaryPred &&p)
Definition qv4mm_p.h:60
Container::size_type reiterate(Container &container, typename Container::size_type first, UnaryPred &&p)
Definition qv4mm_p.h:35
ChunkAllocator * chunkAllocator
Definition qv4mm_p.h:212
size_t usedMem() const
Definition qv4mm_p.h:196
HeapItem * freeBins[NumBins]
Definition qv4mm_p.h:211
uint * allocationStats
Definition qv4mm_p.h:215
size_t usedSlotsAfterLastSweep
Definition qv4mm_p.h:210
std::vector< Chunk * > chunks
Definition qv4mm_p.h:214
BlockAllocator(ChunkAllocator *chunkAllocator, ExecutionEngine *engine)
Definition qv4mm_p.h:175
void resetBlackBits()
Definition qv4mm.cpp:559
HeapItem * allocate(size_t size, bool forceAllocation=false)
Definition qv4mm.cpp:415
size_t totalSlots() const
Definition qv4mm_p.h:189
static size_t binForSlots(size_t nSlots)
Definition qv4mm_p.h:183
HeapItem * nextFree
Definition qv4mm_p.h:208
size_t allocatedMem() const
Definition qv4mm_p.h:193
ExecutionEngine * engine
Definition qv4mm_p.h:213
virtual ~GCData()
Definition qv4mm_p.h:82
PersistentValueStorage::Iterator it
Definition qv4mm_p.h:85
GCState(* execute)(GCStateMachine *, ExtraData &)
Definition qv4mm_p.h:125
Q_QML_EXPORT void transition()
Definition qv4mm.cpp:1573
std::chrono::microseconds timeLimit
Definition qv4mm_p.h:131
MemoryManager * mm
Definition qv4mm_p.h:135
void handleTimeout(GCState state)
Definition qv4mm_p.h:163
ExtraData stateData
Definition qv4mm_p.h:136
std::array< StepTiming, GCState::Count > executionTiming
Definition qv4mm_p.h:134
std::array< GCStateInfo, GCState::Count > stateInfoMap
Definition qv4mm_p.h:133
QDeadlineTimer deadline
Definition qv4mm_p.h:132
void init(InternalClass *other)
void init(ExecutionEngine *engine)
InternalClassTransition Transition
InternalClassTransition & lookupOrInsertTransition(const InternalClassTransition &t)
static void markObjects(Heap::Base *ic, MarkStack *stack)
static void removeMember(QV4::Object *object, PropertyKey identifier)
PropertyHash::Entry * findEntry(const PropertyKey id)
ExecutionEngine * engine
Definition qv4mm_p.h:236
std::vector< HugeChunk > chunks
Definition qv4mm_p.h:243
HugeItemAllocator(ChunkAllocator *chunkAllocator, ExecutionEngine *engine)
Definition qv4mm_p.h:219
ChunkAllocator * chunkAllocator
Definition qv4mm_p.h:235
size_t usedMem() const
Definition qv4mm_p.h:228
HeapItem * allocate(size_t size)
Definition qv4mm.cpp:565
void start(MemoryManager *mm)
Definition qv4mm.cpp:1621
uint allocations[BlockAllocator::NumBins]
Definition qv4mm_p.h:520
PropertyHash::Entry * entries
void addEntry(const Entry &entry, int classSize)
PropertyHash(const PropertyHash &other)
PropertyHashData * d
void detach(bool grow, int classSize)
bool isStringOrSymbol() const
bool isArrayIndex() const
uint asArrayIndex() const
static PropertyKey invalid()
Scope(ExecutionEngine *e)
SharedInternalClassDataPrivate(const SharedInternalClassDataPrivate &other, uint pos, PropertyKey value)
SharedInternalClassDataPrivate(const SharedInternalClassDataPrivate &other)