Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qv4persistent.cpp
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3// Qt-Security score:significant
4
6#include <private/qv4mm_p.h>
7#include "qv4object_p.h"
9#include "PageAllocation.h"
10
11using namespace QV4;
12
13namespace {
14
15struct Page;
16struct Header {
17 WTF::PageAllocation alloc;
18 ExecutionEngine *engine;
19 Page **prev;
20 Page *next;
21 int refCount;
22 int freeList;
23};
24
25static const int kEntriesPerPage = int((WTF::pageSize() - sizeof(Header)) / sizeof(Value));
26
27struct Page {
28 Header header;
29 Value values[1]; // Really kEntriesPerPage, but keep the compiler happy
30};
31
32Page *getPage(const Value *val) {
33 return reinterpret_cast<Page *>(reinterpret_cast<quintptr>(val) & ~((quintptr)(WTF::pageSize() - 1)));
34}
35
36QML_NEARLY_ALWAYS_INLINE void insertInFront(PersistentValueStorage *storage, Page *p)
37{
38 p->header.next = reinterpret_cast<Page *>(storage->firstPage);
39 p->header.prev = reinterpret_cast<Page **>(&storage->firstPage);
40 if (p->header.next)
41 p->header.next->header.prev = &p->header.next;
42 storage->firstPage = p;
43}
44
45QML_NEARLY_ALWAYS_INLINE void unlink(Page *p)
46{
47 if (p->header.prev)
48 *p->header.prev = p->header.next;
49 if (p->header.next)
50 p->header.next->header.prev = p->header.prev;
51}
52
53Page *allocatePage(PersistentValueStorage *storage)
54{
55 PageAllocation page = WTF::PageAllocation::allocate(WTF::pageSize());
56 Page *p = reinterpret_cast<Page *>(page.base());
57
58 Q_ASSERT(!((quintptr)p & (WTF::pageSize() - 1)));
59
60 p->header.engine = storage->engine;
61 p->header.alloc = page;
62 p->header.refCount = 0;
63 p->header.freeList = 0;
64 insertInFront(storage, p);
65 for (int i = 0; i < kEntriesPerPage - 1; ++i) {
66 p->values[i] = Encode(i + 1);
67 }
68 p->values[kEntriesPerPage - 1] = Encode(-1);
69
70 return p;
71}
72
73
74}
75
76
77PersistentValueStorage::Iterator::Iterator(void *p, int idx)
78 : p(p), index(idx)
79{
80 Page *page = static_cast<Page *>(p);
81 if (page)
82 ++page->header.refCount;
83}
84
85PersistentValueStorage::Iterator::Iterator(const PersistentValueStorage::Iterator &o)
86 : p(o.p), index(o.index)
87{
88 Page *page = static_cast<Page *>(p);
89 if (page)
90 ++page->header.refCount;
91}
92
93PersistentValueStorage::Iterator &PersistentValueStorage::Iterator::operator=(const PersistentValueStorage::Iterator &o)
94{
95 Page *page = static_cast<Page *>(p);
96 if (page && !--page->header.refCount)
97 freePage(p);
98 p = o.p;
99 index = o.index;
100 page = static_cast<Page *>(p);
101 if (page)
102 ++page->header.refCount;
103
104 return *this;
105}
106
107PersistentValueStorage::Iterator::~Iterator()
108{
109 Page *page = static_cast<Page *>(p);
110 if (page && !--page->header.refCount)
111 freePage(page);
112}
113
114PersistentValueStorage::Iterator &PersistentValueStorage::Iterator::operator++() {
115 while (p) {
116 while (index < kEntriesPerPage - 1) {
117 ++index;
118 if (!static_cast<Page *>(p)->values[index].isEmpty())
119 return *this;
120 }
121 index = -1;
122 Page *next = static_cast<Page *>(p)->header.next;
123 if (!--static_cast<Page *>(p)->header.refCount)
124 freePage(p);
125 p = next;
126 if (next)
127 ++next->header.refCount;
128 }
129 index = 0;
130 return *this;
131}
132
133Value &PersistentValueStorage::Iterator::operator *()
134{
135 return static_cast<Page *>(p)->values[index];
136}
137
138PersistentValueStorage::PersistentValueStorage(ExecutionEngine *engine)
139 : engine(engine),
140 firstPage(nullptr)
141{
142}
143
144PersistentValueStorage::~PersistentValueStorage()
145{
146 clearFreePageHint();
147 Page *p = static_cast<Page *>(firstPage);
148 while (p) {
149 for (int i = 0; i < kEntriesPerPage; ++i) {
150 if (!p->values[i].isEmpty())
151 p->values[i] = Encode::undefined();
152 }
153 Page *n = p->header.next;
154 p->header.engine = nullptr;
155 p->header.prev = nullptr;
156 p->header.next = nullptr;
157 Q_ASSERT(p->header.refCount);
158 p = n;
159 }
160}
161
162Value *PersistentValueStorage::allocate()
163{
164 Page *p = static_cast<Page *>(freePageHint);
165 if (p && p->header.freeList == -1)
166 p = static_cast<Page *>(firstPage);
167 while (p) {
168 if (p->header.freeList != -1)
169 break;
170 p = p->header.next;
171 }
172 if (!p)
173 p = allocatePage(this);
174
175 Value *v = p->values + p->header.freeList;
176 p->header.freeList = v->int_32();
177
178 if (p->header.freeList != -1 && p != freePageHint) {
179 if (auto oldHint = static_cast<Page *>(freePageHint)) {
180 oldHint->header.refCount--;
181 // no need to free - if the old page were unused,
182 // we would have used it to serve the allocation
183 Q_ASSERT(oldHint->header.refCount);
184 }
185 freePageHint = p;
186 p->header.refCount++;
187 }
188
189 ++p->header.refCount;
190
191 v->setRawValue(Encode::undefined());
192
193 return v;
194}
195
196void PersistentValueStorage::freeUnchecked(Value *v)
197{
198 Q_ASSERT(v);
199 Page *p = getPage(v);
200
201 *v = Encode(p->header.freeList);
202 p->header.freeList = v - p->values;
203 if (!--p->header.refCount)
204 freePage(p);
205}
206
207void PersistentValueStorage::mark(MarkStack *markStack)
208{
209 Page *p = static_cast<Page *>(firstPage);
210 while (p) {
211 for (int i = 0; i < kEntriesPerPage; ++i) {
212 if (Managed *m = p->values[i].as<Managed>())
213 m->mark(markStack);
214 }
215
216 p = p->header.next;
217 }
218}
219
220void PersistentValueStorage::clearFreePageHint()
221{
222 if (!freePageHint)
223 return;
224 auto page = static_cast<Page *>(freePageHint);
225 if (!--page->header.refCount)
226 freePage(page);
227 freePageHint = nullptr;
228
229}
230
231ExecutionEngine *PersistentValueStorage::getEngine(const Value *v)
232{
233 return getPage(v)->header.engine;
234}
235
236void PersistentValueStorage::freePage(void *page)
237{
238 Page *p = static_cast<Page *>(page);
239 unlink(p);
240 p->header.alloc.deallocate();
241}
242
243
244PersistentValue::PersistentValue(const PersistentValue &other)
245 : val(nullptr)
246{
247 if (other.val)
248 set(other.engine(), *other.val);
249}
250
251PersistentValue::PersistentValue(ExecutionEngine *engine, const Value &value)
252{
253 set(engine, value);
254}
255
256PersistentValue::PersistentValue(ExecutionEngine *engine, ReturnedValue value)
257{
258 set(engine, value);
259}
260
261PersistentValue::PersistentValue(ExecutionEngine *engine, Object *object)
262 : val(nullptr)
263{
264 if (!object)
265 return;
266 set(engine, *object);
267}
268
269PersistentValue &PersistentValue::operator=(const PersistentValue &other)
270{
271 if (!val) {
272 if (!other.val)
273 return *this;
274 val = other.engine()->memoryManager->m_persistentValues->allocate();
275 }
276 if (!other.val) {
277 *val = Encode::undefined();
278 return *this;
279 }
280
281 Q_ASSERT(engine() == other.engine());
282
283 *val = *other.val;
284 return *this;
285}
286
287PersistentValue &PersistentValue::operator=(const WeakValue &other)
288{
289 if (!val && !other.valueRef())
290 return *this;
291 if (!other.valueRef()) {
292 *val = Encode::undefined();
293 return *this;
294 }
295
296 Q_ASSERT(!engine() || engine() == other.engine());
297
298 set(other.engine(), *other.valueRef());
299 return *this;
300}
301
302PersistentValue &PersistentValue::operator=(Object *object)
303{
304 if (!object) {
305 PersistentValueStorage::free(val);
306 return *this;
307 }
308 set(object->engine(), *object);
309 return *this;
310}
311
312void PersistentValue::set(ExecutionEngine *engine, const Value &value)
313{
314 if (!val)
315 val = engine->memoryManager->m_persistentValues->allocate();
316 QV4::WriteBarrier::markCustom(engine, [&](QV4::MarkStack *stack){
317 if (QV4::WriteBarrier::isInsertionBarrier && value.isManaged())
318 value.heapObject()->mark(stack);
319 });
320 *val = value;
321}
322
323void PersistentValue::set(ExecutionEngine *engine, ReturnedValue value)
324{
325 if (!val)
326 val = engine->memoryManager->m_persistentValues->allocate();
327 QV4::WriteBarrier::markCustom(engine, [&](QV4::MarkStack *stack){
328 if constexpr (!QV4::WriteBarrier::isInsertionBarrier)
329 return;
330 auto val = Value::fromReturnedValue(value);
331 if (val.isManaged())
332 val.heapObject()->mark(stack);
333 });
334 *val = value;
335}
336
337void PersistentValue::set(ExecutionEngine *engine, Heap::Base *obj)
338{
339 if (!val)
340 val = engine->memoryManager->m_persistentValues->allocate();
341 QV4::WriteBarrier::markCustom(engine, [&](QV4::MarkStack *stack){
342 if constexpr (QV4::WriteBarrier::isInsertionBarrier)
343 obj->mark(stack);
344 });
345
346 *val = obj;
347}
348
349WeakValue::WeakValue(const WeakValue &other)
350 : val(nullptr)
351{
352 if (other.val) {
353 allocVal(other.engine());
354 *val = *other.val;
355 }
356}
357
358WeakValue::WeakValue(ExecutionEngine *engine, const Value &value)
359{
360 allocVal(engine);
361 *val = value;
362}
363
364WeakValue &WeakValue::operator=(const WeakValue &other)
365{
366 if (!val) {
367 if (!other.val)
368 return *this;
369 allocVal(other.engine());
370 }
371 if (!other.val) {
372 *val = Encode::undefined();
373 return *this;
374 }
375
376 Q_ASSERT(engine() == other.engine());
377
378 *val = *other.val;
379 return *this;
380}
381
382WeakValue::~WeakValue()
383{
384 free();
385}
386
387/*
388 WeakValue::set shold normally not mark objects, after all a weak value
389 is not supposed to keep an object alive.
390 However, if we are past GCState::HandleQObjectWrappers, nothing will
391 reset weak values referencing unmarked values, but those values will
392 still be swept.
393 That lead to stale pointers, and potentially to crashes. To avoid this,
394 we mark the objects here (they might still get collected in the next gc
395 run).
396 This is especially important due to the way we handle QObjectWrappers.
397 */
398void WeakValue::set(ExecutionEngine *engine, const Value &value)
399{
400 if (!val)
401 allocVal(engine);
402 QV4::WriteBarrier::markCustom(engine, [&](QV4::MarkStack *ms) {
403 if (engine->memoryManager->gcStateMachine->state <= GCState::HandleQObjectWrappers)
404 return;
405 if (auto *h = value.heapObject())
406 h->mark(ms);
407 });
408 *val = value;
409}
410
411void WeakValue::set(ExecutionEngine *engine, ReturnedValue value)
412{
413 if (!val)
414 allocVal(engine);
415 QV4::WriteBarrier::markCustom(engine, [&](QV4::MarkStack *ms) {
416 if (engine->memoryManager->gcStateMachine->state <= GCState::HandleQObjectWrappers)
417 return;
418 if (auto *h = QV4::Value::fromReturnedValue(value).heapObject())
419 h->mark(ms);
420 });
421
422 *val = value;
423}
424
425void WeakValue::set(ExecutionEngine *engine, Heap::Base *obj)
426{
427 if (!val)
428 allocVal(engine);
429 QV4::WriteBarrier::markCustom(engine, [&](QV4::MarkStack *ms) {
430 if (engine->memoryManager->gcStateMachine->state <= GCState::HandleQObjectWrappers)
431 return;
432 obj->mark(ms);
433 });
434 *val = obj;
435}
436
437void WeakValue::allocVal(ExecutionEngine *engine)
438{
439 val = engine->memoryManager->m_weakValues->allocate();
440}
441
442void WeakValue::markOnce(MarkStack *markStack)
443{
444 if (!val)
445 return;
446 val->mark(markStack);
447}
448
449void WeakValue::free()
450{
451 if (!val)
452 return;
453
454 ExecutionEngine *e = engine();
455 if (e && val->as<QObjectWrapper>()) {
456 // Some QV4::QObjectWrapper Value will be freed in WeakValue::~WeakValue() before MemoryManager::sweep() is being called,
457 // in this case we will never have a chance to call detroyObject() on those QV4::QObjectWrapper objects.
458 // Here we don't free these Value immediately, instead we keep track of them to free them later in MemoryManager::sweep()
459 e->memoryManager->m_pendingFreedObjectWrapperValue.push_back(val);
460 } else {
461 PersistentValueStorage::free(val);
462 }
463
464 val = nullptr;
465}