Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qv4mm_p.h
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3// Qt-Security score:critical reason:low-level-memory-management
4
5#ifndef QV4GC_H
6#define QV4GC_H
7
8//
9// W A R N I N G
10// -------------
11//
12// This file is not part of the Qt API. It exists purely as an
13// implementation detail. This header file may change from version to
14// version without notice, or even be removed.
15//
16// We mean it.
17//
18
19#include <private/qv4global_p.h>
20#include <private/qv4value_p.h>
21#include <private/qv4scopedvalue_p.h>
22#include <private/qv4object_p.h>
23#include <private/qv4mmdefs_p.h>
24#include <QList>
25
26#define MM_DEBUG 0
27
29
30namespace QV4 {
31
32// Iterate a potentially growing container without querying the size on each iteration.
33// Return the first index where p doesn't hold, or the end of the container.
34template<typename Container, typename UnaryPred>
35typename Container::size_type reiterate(
36 Container &container, typename Container::size_type first, UnaryPred &&p)
37{
38 auto last = container.size();
39 while (first < last) {
40 do {
41 if (!p(first))
42 return first;
43 } while (++first < last);
44
45 Q_ASSERT(first == last);
46
47 // Re-fetch the container size.
48 // If the container hasn't grown, we're done since last == first, still.
49 // Otherwise, do one more round via the "while (first < last)" above.
50 last = container.size();
51 }
52
53 // Returning last here allows input of an out of range first, saving us
54 // one size check ahead of running this.
55 return last;
56}
57
58// index based version of partition to handle potential growth at the end
59template<typename Container, class UnaryPred>
60typename Container::size_type partition(Container &container, UnaryPred &&p)
61{
62 // Figure out the first entry where p doesn't hold.
63 auto first = reiterate(container, 0, p);
64
65 // Iterate the remaining entries and swap any entry where p holds
66 // to the (moving) end of the front section of the container.
67 // Any time we do that, the front section grows by 1. Therefore, the back
68 // section can only contain entries where p doesn't hold in the end.
69 reiterate(container, first + 1, [&p, &container, &first](const auto i) {
70 if (p(i)) {
71 // It's important to re-resolve container.at(i) for the std::swap.
72 // The container may have grown as result of determining p, thereby
73 // invalidating any reference to an entry taken before.
74 std::swap(container.at(i), container.at(first++));
75 }
76 return true;
77 });
78
79 return first;
80}
81
82struct GCData { virtual ~GCData(){};};
83
87
89 Q_GADGET_EXPORT(Q_QML_EXPORT)
90
91public:
92 enum GCState {
103 // The following needs to be after InitCallDestroyObjects,
104 // even if it normally would run before it, to ensure that in
105 // a normal incremental run the stack is redrained before this
106 // is run as we make use of that knowledge in a test.
115 };
116 Q_ENUM(GCState)
117
122
123 struct GCStateInfo {
125 GCState (*execute)(GCStateMachine *, ExtraData &) = nullptr; // Function to execute for this state, returns true if ready to transition
126 bool breakAfter{false};
127 };
128
135 MemoryManager *mm = nullptr;
136 ExtraData stateData; // extra date for specific states
137 bool collectTimings = false;
138 #ifdef QT_BUILD_INTERNAL
139 // This is used only to simplify testing.
141 std::vector<BitmapError> *bitmapErrors = nullptr;
142 #endif
143
145
146 inline void step() {
147 if (!inProgress()) {
148 reset();
149 }
150 transition();
151 }
152
153 inline bool inProgress() {
154 return state != GCState::Invalid;
155 }
156
157 inline void reset() {
158 state = GCState::MarkStart;
159 }
160
161 Q_QML_EXPORT void transition();
162
163 inline void handleTimeout(GCState state) {
164 Q_UNUSED(state);
165 }
166};
167
170
171struct ChunkAllocator;
172struct MemorySegment;
173
175 BlockAllocator(ChunkAllocator *chunkAllocator, ExecutionEngine *engine)
176 : chunkAllocator(chunkAllocator), engine(engine)
177 {
178 memset(freeBins, 0, sizeof(freeBins));
179 }
180
181 enum { NumBins = 8 };
182
183 static inline size_t binForSlots(size_t nSlots) {
184 return nSlots >= NumBins ? NumBins - 1 : nSlots;
185 }
186
187 HeapItem *allocate(size_t size, bool forceAllocation = false);
188
190 return Chunk::AvailableSlots*chunks.size();
191 }
192
194 return chunks.size()*Chunk::DataSize;
195 }
196 size_t usedMem() const {
197 uint used = 0;
198 for (auto c : chunks)
199 used += c->nUsedSlots()*Chunk::SlotSize;
200 return used;
201 }
202
203 void sweep();
204 void freeAll();
205 void resetBlackBits();
206
207 // bump allocations
208 HeapItem *nextFree = nullptr;
213 ExecutionEngine *engine;
215 uint *allocationStats = nullptr;
216};
217
219 HugeItemAllocator(ChunkAllocator *chunkAllocator, ExecutionEngine *engine)
220 : chunkAllocator(chunkAllocator), engine(engine)
221 {}
222
223 HeapItem *allocate(size_t size);
224 void sweep();
225 void freeAll();
226 void resetBlackBits();
227
228 size_t usedMem() const {
229 size_t used = 0;
230 for (const auto &c : chunks)
231 used += c.size;
232 return used;
233 }
234
236 ExecutionEngine *engine;
242
244};
245
246
247class Q_QML_EXPORT MemoryManager
248{
250
251public:
253 ~MemoryManager();
254
255 template <typename ToBeMarked>
256 friend struct GCCriticalSection;
257
258 // TODO: this is only for 64bit (and x86 with SSE/AVX), so exend it for other architectures to be slightly more efficient (meaning, align on 8-byte boundaries).
259 // Note: all occurrences of "16" in alloc/dealloc are also due to the alignment.
260 constexpr static inline std::size_t align(std::size_t size)
261 { return (size + Chunk::SlotSize - 1) & ~(Chunk::SlotSize - 1); }
262
263 /* NOTE: allocManaged comes in various overloads. If size is not passed explicitly
264 sizeof(ManagedType::Data) is used for size. However, there are quite a few cases
265 where we allocate more than sizeof(ManagedType::Data); that's generally the case
266 when the Object has a ValueArray member.
267 If no internal class pointer is provided, ManagedType::defaultInternalClass(engine)
268 will be used as the internal class.
269 */
270
271 template<typename ManagedType>
272 inline typename ManagedType::Data *allocManaged(std::size_t size, Heap::InternalClass *ic)
273 {
274 Q_STATIC_ASSERT(std::is_trivial_v<typename ManagedType::Data>);
275 size = align(size);
276 typename ManagedType::Data *d = static_cast<typename ManagedType::Data *>(allocData(size));
277 d->internalClass.set(engine, ic);
278 Q_ASSERT(d->internalClass && d->internalClass->vtable);
279 Q_ASSERT(ic->vtable == ManagedType::staticVTable());
280 return d;
281 }
282
283 template<typename ManagedType>
284 inline typename ManagedType::Data *allocManaged(Heap::InternalClass *ic)
285 {
286 return allocManaged<ManagedType>(sizeof(typename ManagedType::Data), ic);
287 }
288
289 template<typename ManagedType>
290 inline typename ManagedType::Data *allocManaged(std::size_t size, InternalClass *ic)
291 {
292 return allocManaged<ManagedType>(size, ic->d());
293 }
294
295 template<typename ManagedType>
296 inline typename ManagedType::Data *allocManaged(InternalClass *ic)
297 {
298 return allocManaged<ManagedType>(sizeof(typename ManagedType::Data), ic);
299 }
300
301 template<typename ManagedType>
302 inline typename ManagedType::Data *allocManaged(std::size_t size)
303 {
304 Scope scope(engine);
305 Scoped<InternalClass> ic(scope, ManagedType::defaultInternalClass(engine));
306 return allocManaged<ManagedType>(size, ic);
307 }
308
309 template<typename ManagedType>
310 inline typename ManagedType::Data *allocManaged()
311 {
312 auto constexpr size = sizeof(typename ManagedType::Data);
313 Scope scope(engine);
314 Scoped<InternalClass> ic(scope, ManagedType::defaultInternalClass(engine));
315 return allocManaged<ManagedType>(size, ic);
316 }
317
318 template <typename ObjectType>
319 typename ObjectType::Data *allocateObject(Heap::InternalClass *ic)
320 {
321 Heap::Object *o = allocObjectWithMemberData(ObjectType::staticVTable(), ic->size);
322 o->internalClass.set(engine, ic);
323 Q_ASSERT(o->internalClass.get() && o->vtable());
324 Q_ASSERT(o->vtable() == ObjectType::staticVTable());
325 return static_cast<typename ObjectType::Data *>(o);
326 }
327
328 template <typename ObjectType>
329 typename ObjectType::Data *allocateObject(InternalClass *ic)
330 {
331 return allocateObject<ObjectType>(ic->d());
332 }
333
334 template <typename ObjectType>
335 typename ObjectType::Data *allocateObject()
336 {
337 Scope scope(engine);
338 Scoped<InternalClass> ic(scope, ObjectType::defaultInternalClass(engine));
339 ic = ic->changeVTable(ObjectType::staticVTable());
340 ic = ic->changePrototype(ObjectType::defaultPrototype(engine)->d());
341 return allocateObject<ObjectType>(ic);
342 }
343
344 template <typename ManagedType, typename Arg1>
345 typename ManagedType::Data *allocWithStringData(std::size_t unmanagedSize, Arg1 &&arg1)
346 {
347 typename ManagedType::Data *o = reinterpret_cast<typename ManagedType::Data *>(allocString(unmanagedSize));
348 o->internalClass.set(engine, ManagedType::defaultInternalClass(engine));
349 Q_ASSERT(o->internalClass && o->internalClass->vtable);
350 o->init(std::forward<Arg1>(arg1));
351 return o;
352 }
353
354 template <typename ObjectType, typename... Args>
355 typename ObjectType::Data *allocObject(Heap::InternalClass *ic, Args&&... args)
356 {
357 typename ObjectType::Data *d = allocateObject<ObjectType>(ic);
358 d->init(std::forward<Args>(args)...);
359 return d;
360 }
361
362 template <typename ObjectType, typename... Args>
363 typename ObjectType::Data *allocObject(InternalClass *ic, Args&&... args)
364 {
365 typename ObjectType::Data *d = allocateObject<ObjectType>(ic);
366 d->init(std::forward<Args>(args)...);
367 return d;
368 }
369
370 template <typename ObjectType, typename... Args>
371 typename ObjectType::Data *allocate(Args&&... args)
372 {
373 Scope scope(engine);
374 Scoped<ObjectType> t(scope, allocateObject<ObjectType>());
375 t->d_unchecked()->init(std::forward<Args>(args)...);
376 return t->d();
377 }
378
379 template <typename ManagedType, typename... Args>
380 typename ManagedType::Data *alloc(Args&&... args)
381 {
382 Scope scope(engine);
383 Scoped<ManagedType> t(scope, allocManaged<ManagedType>());
384 t->d_unchecked()->init(std::forward<Args>(args)...);
385 return t->d();
386 }
387
388 void runGC();
390 void runFullGC();
391
392 void dumpStats() const;
393
395 size_t getAllocatedMem() const;
396 size_t getLargeItemsMem() const;
397
398 // called when a JS object grows itself. Specifically: Heap::String::append
399 // and InternalClassDataPrivate<PropertyAttributes>.
400 void changeUnmanagedHeapSizeUsage(qptrdiff delta) { unmanagedHeapSize += delta; }
401
402 // called at the end of a gc cycle
404
405 template<typename ManagedType>
406 typename ManagedType::Data *allocIC()
407 {
408 Heap::Base *b = *allocate(&icAllocator, align(sizeof(typename ManagedType::Data)));
409 return static_cast<typename ManagedType::Data *>(b);
410 }
411
412 void registerWeakMap(Heap::MapObject *map);
413 void registerWeakSet(Heap::SetObject *set);
414
415 void onEventLoop();
416
417 //GC related methods
418 void setGCTimeLimit(int timeMs);
419 MarkStack* markStack() { return m_markStack.get(); }
420
423
424protected:
425 /// expects size to be aligned
427 Heap::Base *allocData(std::size_t size);
428 Heap::Object *allocObjectWithMemberData(const QV4::VTable *vtable, uint nMembers);
429
430private:
431 enum {
432 MinUnmanagedHeapSizeGCLimit = 128 * 1024
433 };
434
435public:
437 void sweep(bool lastSweep = false);
440 {
441 const bool incrementalGCIsAlreadyRunning = m_markStack != nullptr;
442 const bool aboveUnmanagedHeapLimit = incrementalGCIsAlreadyRunning
443 ? unmanagedHeapSize > 3 * unmanagedHeapSizeGCLimit / 2
444 : unmanagedHeapSize > unmanagedHeapSizeGCLimit;
445 return aboveUnmanagedHeapLimit;
446 }
447private:
448 bool shouldRunGC() const;
449
450 HeapItem *allocate(BlockAllocator *allocator, std::size_t size)
451 {
452 const bool incrementalGCIsAlreadyRunning = m_markStack != nullptr;
453
454 bool didGCRun = false;
455 if (aggressiveGC) {
457 didGCRun = true;
458 }
459
461 if (!didGCRun)
462 incrementalGCIsAlreadyRunning ? (void) tryForceGCCompletion() : runGC();
463 didGCRun = true;
464 }
465
466 if (size > Chunk::DataSize)
467 return hugeItemAllocator.allocate(size);
468
469 if (HeapItem *m = allocator->allocate(size))
470 return m;
471
472 if (!didGCRun && shouldRunGC())
473 runGC();
474
475 return allocator->allocate(size, true);
476 }
477
478public:
483
484 QV4::ExecutionEngine *engine;
489 PersistentValueStorage *m_persistentValues;
490 PersistentValueStorage *m_weakValues;
494
497
498 // For recording objects from compilation units during GC
500
501 std::size_t unmanagedHeapSize = 0; // the amount of bytes of heap that is not managed by the memory manager, but which is held onto by managed items.
504
507 bool aggressiveGC = false;
509 bool gcStats = false;
510 bool gcCollectorStats = false;
511
512#if defined(MM_STATS) || !defined(QT_NO_DEBUG)
515#endif
516
517 struct {
522 } statistics;
523};
524
525/*!
526 \internal
527 GCCriticalSection prevets the gc from running, until it is destructed.
528 In its dtor, it runs a check whether we've reached the unmanaegd heap limit,
529 and triggers a gc run if necessary.
530 Lastly, it can optionally mark an object passed to it before runnig the gc.
531 */
532template <typename ToBeMarked = void>
535
545 {
548 if constexpr (!std::is_same_v<ToBeMarked, void>)
549 if (m_toBeMarked)
551 /* because we blocked the gc, we might be using too much memoryon the unmanaged heap
552 and did not run the normal fixup logic. So recheck again, and trigger a gc run
553 if necessary*/
555 return;
556 if (!m_engine->isGCOngoing) {
558 } else {
561 }
562 }
563
564private:
566 MemoryManager::Blockness m_oldState;
567 ToBeMarked *m_toBeMarked;
568};
569
570}
571
572QT_END_NAMESPACE
573
574#endif // QV4GC_H
ManagedType::Data * allocManaged(InternalClass *ic)
Definition qv4mm_p.h:296
bool isAboveUnmanagedHeapLimit()
Definition qv4mm_p.h:439
friend struct GCCriticalSection
Definition qv4mm_p.h:256
Heap::SetObject * weakSets
Definition qv4mm_p.h:493
QV4::ExecutionEngine * engine
Definition qv4mm_p.h:484
bool crossValidateIncrementalGC
Definition qv4mm_p.h:508
ObjectType::Data * allocateObject(Heap::InternalClass *ic)
Definition qv4mm_p.h:319
size_t getRegularItemsMem() const
Definition qv4mm.cpp:1410
Heap::Object * allocObjectWithMemberData(const QV4::VTable *vtable, uint nMembers)
Definition qv4mm.cpp:1077
size_t maxUsedAfterGC
Definition qv4mm_p.h:520
size_t getLargeItemsMem() const
Definition qv4mm.cpp:1420
std::vector< QObject * > findObjectsForCompilationUnits(std::vector< QQmlRefPointer< CompiledData::CompilationUnit > > &&units)
Definition qv4mm.cpp:1634
ManagedType::Data * allocManaged()
Definition qv4mm_p.h:310
bool tryForceGCCompletion()
Definition qv4mm.cpp:1302
ManagedType::Data * allocManaged(std::size_t size, Heap::InternalClass *ic)
Definition qv4mm_p.h:272
ManagedType::Data * allocManaged(std::size_t size, InternalClass *ic)
Definition qv4mm_p.h:290
size_t lastAllocRequestedSlots
Definition qv4mm_p.h:514
void changeUnmanagedHeapSizeUsage(qptrdiff delta)
Definition qv4mm_p.h:400
QList< Value * > m_pendingFreedObjectWrapperValue
Definition qv4mm_p.h:491
std::unique_ptr< GCStateMachine > gcStateMachine
Definition qv4mm_p.h:495
ObjectType::Data * allocate(Args &&... args)
Definition qv4mm_p.h:371
Blockness gcBlocked
Definition qv4mm_p.h:506
ChunkAllocator * chunkAllocator
Definition qv4mm_p.h:485
PersistentValueStorage * m_persistentValues
Definition qv4mm_p.h:489
ManagedType::Data * allocWithStringData(std::size_t unmanagedSize, Arg1 &&arg1)
Definition qv4mm_p.h:345
void cleanupDeletedQObjectWrappersInSweep()
Definition qv4mm.cpp:1220
PersistentValueStorage * m_weakValues
Definition qv4mm_p.h:490
void sweep(bool lastSweep=false)
Definition qv4mm.cpp:1179
std::size_t unmanagedHeapSize
Definition qv4mm_p.h:501
ManagedType::Data * allocManaged(std::size_t size)
Definition qv4mm_p.h:302
void registerWeakMap(Heap::MapObject *map)
Definition qv4mm.cpp:1448
ManagedType::Data * allocManaged(Heap::InternalClass *ic)
Definition qv4mm_p.h:284
void dumpStats() const
Definition qv4mm.cpp:1491
std::size_t usedSlotsAfterLastFullSweep
Definition qv4mm_p.h:503
BlockAllocator blockAllocator
Definition qv4mm_p.h:486
uint allocations[BlockAllocator::NumBins]
Definition qv4mm_p.h:521
HugeItemAllocator hugeItemAllocator
Definition qv4mm_p.h:488
std::unique_ptr< MarkStack > m_markStack
Definition qv4mm_p.h:496
Heap::Base * allocData(std::size_t size)
Definition qv4mm.cpp:1062
size_t getAllocatedMem() const
Definition qv4mm.cpp:1415
ManagedType::Data * allocIC()
Definition qv4mm_p.h:406
void setGCTimeLimit(int timeMs)
Definition qv4mm.cpp:1174
ObjectType::Data * allocateObject()
Definition qv4mm_p.h:335
size_t maxUsedBeforeGC
Definition qv4mm_p.h:519
void registerWeakSet(Heap::SetObject *set)
Definition qv4mm.cpp:1454
ObjectType::Data * allocObject(InternalClass *ic, Args &&... args)
Definition qv4mm_p.h:363
Heap::MapObject * weakMaps
Definition qv4mm_p.h:492
void updateUnmanagedHeapSizeGCLimit()
Definition qv4mm.cpp:1425
ManagedType::Data * alloc(Args &&... args)
Definition qv4mm_p.h:380
ObjectType::Data * allocObject(Heap::InternalClass *ic, Args &&... args)
Definition qv4mm_p.h:355
ObjectsForCompilationUnit * m_recordedObjects
Definition qv4mm_p.h:499
std::size_t unmanagedHeapSizeGCLimit
Definition qv4mm_p.h:502
static constexpr std::size_t align(std::size_t size)
Definition qv4mm_p.h:260
ObjectType::Data * allocateObject(InternalClass *ic)
Definition qv4mm_p.h:329
MarkStack * markStack()
Definition qv4mm_p.h:419
BlockAllocator icAllocator
Definition qv4mm_p.h:487
size_t maxAllocatedMem
Definition qv4mm_p.h:518
Combined button and popup list for selecting options.
static void updateProtoUsage(Heap::Object *o, Heap::InternalClass *ic)
static Heap::InternalClass * cleanInternalClass(Heap::InternalClass *orig)
static PropertyAttributes attributesFromFlags(int flags)
static void addDummyEntry(InternalClass *newClass, PropertyHash::Entry e)
Definition qjsvalue.h:24
GCStateMachine::GCStateInfo GCStateInfo
Definition qv4mm_p.h:169
Container::size_type partition(Container &container, UnaryPred &&p)
Definition qv4mm_p.h:60
Q_STATIC_ASSERT(sizeof(CppStackFrame)==sizeof(JSTypesStackFrame))
Container::size_type reiterate(Container &container, typename Container::size_type first, UnaryPred &&p)
Definition qv4mm_p.h:35
ChunkAllocator * chunkAllocator
Definition qv4mm_p.h:212
size_t usedMem() const
Definition qv4mm_p.h:196
HeapItem * freeBins[NumBins]
Definition qv4mm_p.h:211
uint * allocationStats
Definition qv4mm_p.h:215
size_t usedSlotsAfterLastSweep
Definition qv4mm_p.h:210
std::vector< Chunk * > chunks
Definition qv4mm_p.h:214
BlockAllocator(ChunkAllocator *chunkAllocator, ExecutionEngine *engine)
Definition qv4mm_p.h:175
void resetBlackBits()
Definition qv4mm.cpp:559
HeapItem * allocate(size_t size, bool forceAllocation=false)
Definition qv4mm.cpp:415
size_t totalSlots() const
Definition qv4mm_p.h:189
static size_t binForSlots(size_t nSlots)
Definition qv4mm_p.h:183
HeapItem * nextFree
Definition qv4mm_p.h:208
size_t allocatedMem() const
Definition qv4mm_p.h:193
ExecutionEngine * engine
Definition qv4mm_p.h:213
virtual ~GCData()
Definition qv4mm_p.h:82
PersistentValueStorage::Iterator it
Definition qv4mm_p.h:85
GCState(* execute)(GCStateMachine *, ExtraData &)
Definition qv4mm_p.h:125
Q_QML_EXPORT void transition()
Definition qv4mm.cpp:1589
std::chrono::microseconds timeLimit
Definition qv4mm_p.h:131
MemoryManager * mm
Definition qv4mm_p.h:135
void handleTimeout(GCState state)
Definition qv4mm_p.h:163
ExtraData stateData
Definition qv4mm_p.h:136
std::array< StepTiming, GCState::Count > executionTiming
Definition qv4mm_p.h:134
std::array< GCStateInfo, GCState::Count > stateInfoMap
Definition qv4mm_p.h:133
QDeadlineTimer deadline
Definition qv4mm_p.h:132
void init(InternalClass *other)
void init(ExecutionEngine *engine)
InternalClassTransition Transition
InternalClassTransition & lookupOrInsertTransition(const InternalClassTransition &t)
static void markObjects(Heap::Base *ic, MarkStack *stack)
static void removeMember(QV4::Object *object, PropertyKey identifier)
PropertyHash::Entry * findEntry(const PropertyKey id)
ExecutionEngine * engine
Definition qv4mm_p.h:236
std::vector< HugeChunk > chunks
Definition qv4mm_p.h:243
HugeItemAllocator(ChunkAllocator *chunkAllocator, ExecutionEngine *engine)
Definition qv4mm_p.h:219
ChunkAllocator * chunkAllocator
Definition qv4mm_p.h:235
size_t usedMem() const
Definition qv4mm_p.h:228
HeapItem * allocate(size_t size)
Definition qv4mm.cpp:565
PropertyHash::Entry * entries
void addEntry(const Entry &entry, int classSize)
PropertyHash(const PropertyHash &other)
PropertyHashData * d
void detach(bool grow, int classSize)
bool isStringOrSymbol() const
bool isArrayIndex() const
uint asArrayIndex() const
static PropertyKey invalid()
Scope(ExecutionEngine *e)
SharedInternalClassDataPrivate(const SharedInternalClassDataPrivate &other, uint pos, PropertyKey value)
SharedInternalClassDataPrivate(const SharedInternalClassDataPrivate &other)