117 typedef typename ElementType::ConstReferenceType ConstReferenceType;
118 typedef typename ElementType::ReferenceType ReferenceType;
121 static inline int blockfor(
int &x)
123 x = x & ConstantsType::IndexMask;
124 for (
int i = 0; i < ConstantsType::BlockCount; ++i) {
125 int size = ConstantsType::Sizes[i];
130 Q_UNREACHABLE_RETURN(-1);
134 static inline ElementType *allocate(
int offset,
int size)
137 ElementType *v =
new ElementType[size];
138 for (
int i = 0; i < size; ++i)
139 v[i].next.store(offset + i + 1, std::memory_order_relaxed);
144 static inline int incrementserial(
int o,
int n)
146 return int((uint(n) & ConstantsType::IndexMask) | ((uint(o) + ConstantsType::SerialCounter) & ConstantsType::SerialMask));
150 q20::atomic<ElementType *> _v[ConstantsType::BlockCount];
152 q20::atomic<
int> _next;
162 inline ConstReferenceType
at(
int x)
const;
166
167
168
205 int id = _next.load(std::memory_order_acquire);
207 int at = id & ConstantsType::IndexMask;
208 const int block = blockfor(at);
209 ElementType *v = _v[block].load(std::memory_order_acquire);
212 ElementType*
const alloced = allocate((id & ConstantsType::IndexMask) - at,
213 ConstantsType::Sizes[block]);
214 if (_v[block].compare_exchange_strong(v, alloced, std::memory_order_release, std::memory_order_acquire)) {
219 Q_ASSERT(v !=
nullptr);
223 newid = v[at].next.load(std::memory_order_relaxed) | (id & ~ConstantsType::IndexMask);
224 }
while (!_next.compare_exchange_strong(id, newid, std::memory_order_release, std::memory_order_acquire));
235 int at = id & ConstantsType::IndexMask;
236 const int block = blockfor(at);
237 ElementType *v = _v[block].load(std::memory_order_acquire);
241 x = _next.load(std::memory_order_acquire);
242 v[at].next.store(x & ConstantsType::IndexMask, std::memory_order_relaxed);
244 newid = incrementserial(x, id);
245 }
while (!_next.compare_exchange_weak(x, newid, std::memory_order_release));