this repo has no description
1/* -*- mode: C++; c-basic-offset: 2; indent-tabs-mode: nil -*- */
2
3/*
4 * Main authors:
5 * Guido Tack <guido.tack@monash.edu>
6 */
7
8/* This Source Code Form is subject to the terms of the Mozilla Public
9 * License, v. 2.0. If a copy of the MPL was not distributed with this
10 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
11
12#include <minizinc/ast.hh>
13#include <minizinc/config.hh>
14#include <minizinc/gc.hh>
15#include <minizinc/hash.hh>
16#include <minizinc/model.hh>
17#include <minizinc/timer.hh>
18
19#include <cstring>
20#include <vector>
21
22namespace MiniZinc {
23
24GC*& GC::gc() {
25#if defined(HAS_DECLSPEC_THREAD)
26 __declspec(thread) static GC* gc = nullptr;
27#elif defined(HAS_ATTR_THREAD)
28 static __thread GC* gc = nullptr;
29#else
30#error Need thread-local storage
31#endif
32 return gc;
33}
34
35bool GC::locked() {
36 assert(gc());
37 return gc()->_lockCount > 0;
38}
39
40GCLock::GCLock() { GC::lock(); }
41GCLock::~GCLock() { GC::unlock(); }
42
43class FreeListNode : public ASTNode {
44public:
45 FreeListNode* next;
46 size_t size;
47 FreeListNode(size_t s, FreeListNode* n) : ASTNode(ASTNode::NID_FL), next(n), size(s) {
48 _gcMark = 1;
49 }
50 FreeListNode(size_t s) : ASTNode(ASTNode::NID_FL), next(nullptr), size(s) {}
51};
52
53class HeapPage {
54public:
55 HeapPage* next;
56 size_t size;
57 size_t used;
58 char data[1];
59 HeapPage(HeapPage* n, size_t s) : next(n), size(s), used(0) {}
60};
61
62/// Memory managed by the garbage collector
63class GC::Heap {
64 friend class GC;
65#if defined(MINIZINC_GC_STATS)
66 static const char* _nodeid[Item::II_END + 1];
67 std::map<int, GCStat> gc_stats;
68#endif
69protected:
70 static const size_t _min_gcThreshold;
71
72 HeapPage* _page;
73 GCMarker* _rootset;
74 KeepAlive* _roots;
75 WeakRef* _weakRefs;
76 ASTNodeWeakMap* _nodeWeakMaps;
77 static const int _max_fl = 5;
78 FreeListNode* _fl[_max_fl + 1];
79 static const size_t _fl_size[_max_fl + 1];
80 static int freelistSlot(size_t _size) {
81 size_t size = _size;
82 assert(size <= _fl_size[_max_fl]);
83 assert(size >= _fl_size[0]);
84 size -= sizeof(Item);
85 assert(size % sizeof(void*) == 0);
86 size /= sizeof(void*);
87 assert(size >= 1);
88 int slot = static_cast<int>(size) - 1;
89 return slot;
90 }
91
92 /// Total amount of memory allocated
93 size_t _allocedMem;
94 /// Total amount of memory currently free
95 size_t _freeMem;
96 /// Memory threshold for next garbage collection
97 size_t _gcThreshold;
98 /// High water mark of all allocated memory
99 size_t _maxAllocedMem;
100
101 /// A trail item
102 struct TItem {
103 Expression** l;
104 Expression* v;
105 bool mark;
106 TItem(Expression** l0, Expression* v0) : l(l0), v(v0), mark(false) {}
107 };
108 /// Trail
109 std::vector<TItem> _trail;
110
111 Heap()
112 : _page(nullptr),
113 _rootset(nullptr),
114 _roots(nullptr),
115 _weakRefs(nullptr),
116 _nodeWeakMaps(nullptr),
117 _allocedMem(0),
118 _freeMem(0),
119 _gcThreshold(_min_gcThreshold),
120 _maxAllocedMem(0) {
121 for (int i = _max_fl + 1; (i--) != 0;) {
122 _fl[i] = nullptr;
123 }
124 }
125
126 /// Default size of pages to allocate
127 static const size_t pageSize = 1 << 20;
128
129 HeapPage* allocPage(size_t s, bool exact = false) {
130 if (!exact) {
131 s = std::max(s, pageSize);
132 }
133 auto* newPage = static_cast<HeapPage*>(::malloc(sizeof(HeapPage) + s - 1));
134 if (newPage == nullptr) {
135 throw InternalError("out of memory");
136 }
137#ifndef NDEBUG
138 memset(newPage, 255, sizeof(HeapPage) + s - 1);
139#endif
140 _allocedMem += s;
141 _maxAllocedMem = std::max(_maxAllocedMem, _allocedMem);
142 _freeMem += s;
143 if (exact && (_page != nullptr)) {
144 new (newPage) HeapPage(_page->next, s);
145 _page->next = newPage;
146 } else {
147 if (_page != nullptr) {
148 size_t ns = _page->size - _page->used;
149 assert(ns <= _fl_size[_max_fl]);
150 if (ns >= _fl_size[0]) {
151 // Remainder of page can be added to free lists
152 auto* fln = reinterpret_cast<FreeListNode*>(_page->data + _page->used);
153 _page->used += ns;
154 new (fln) FreeListNode(ns, _fl[freelistSlot(ns)]);
155 _fl[freelistSlot(ns)] = fln;
156 } else {
157 // Waste a little memory (less than smallest free list slot)
158 _freeMem -= ns;
159 assert(_allocedMem >= _freeMem);
160 }
161 }
162 new (newPage) HeapPage(_page, s);
163 _page = newPage;
164 }
165 return newPage;
166 }
167
168 void* alloc(size_t size, bool exact = false) {
169 assert(size <= 80 || exact);
170 /// Align to word boundary
171 size += ((8 - (size & 7)) & 7);
172 HeapPage* p = _page;
173 if (exact || _page == nullptr || _page->used + size >= _page->size) {
174 p = allocPage(size, exact);
175 }
176 char* ret = p->data + p->used;
177 p->used += size;
178 _freeMem -= size;
179 if (p->size - p->used < _fl_size[0]) {
180 _freeMem -= (p->size - p->used);
181 _allocedMem -= (p->size - p->used);
182 p->size = p->used;
183 }
184 assert(_allocedMem >= _freeMem);
185 return ret;
186 }
187
188 /// Allocate one object of type T (no initialisation)
189 template <typename T>
190 T* alloc() {
191 return static_cast<T*>(alloc(sizeof(T)));
192 }
193
194 /// Allocate \a n objects of type T (no initialisation)
195 template <typename T>
196 T* alloc(int n) {
197 return static_cast<T*>(alloc(n * sizeof(T)));
198 }
199
200 void* fl(size_t size) {
201 int slot = freelistSlot(size);
202 assert(slot <= _max_fl);
203 if (_fl[slot] != nullptr) {
204 FreeListNode* p = _fl[slot];
205 _fl[slot] = p->next;
206 _freeMem -= size;
207 return p;
208 }
209 return alloc(size);
210 }
211
212 void trigger() {
213#ifdef MINIZINC_GC_STATS
214 std::cerr << "GC\n\talloced " << (_allocedMem / 1024) << "\n\tfree " << (_freeMem / 1024)
215 << "\n\tdiff " << ((_allocedMem - _freeMem) / 1024) << "\n\tthreshold "
216 << (_gcThreshold / 1024) << "\n";
217#endif
218 size_t old_free = _freeMem;
219 mark();
220 sweep();
221 // GC strategy:
222 // increase threshold if either
223 // a) we haven't been able to put much on the free list (comapred to before GC), or
224 // b) the free list memory (after GC) is less than 50% of the allocated memory
225 // otherwise (i.e., we have been able to increase the free list, and it is now more
226 // than 50% of overall allocated memory), keep threshold at allocated memory
227 if ((old_free != 0 && static_cast<double>(old_free) / static_cast<double>(_freeMem) > 0.9) ||
228 static_cast<double>(_freeMem) / _allocedMem < 0.5) {
229 _gcThreshold = std::max(_min_gcThreshold, static_cast<size_t>(_allocedMem * 1.5));
230 } else {
231 _gcThreshold = std::max(_min_gcThreshold, _allocedMem);
232 }
233#ifdef MINIZINC_GC_STATS
234 std::cerr << "done\n\talloced " << (_allocedMem / 1024) << "\n\tfree " << (_freeMem / 1024)
235 << "\n\tdiff " << ((_allocedMem - _freeMem) / 1024) << "\n\tthreshold "
236 << (_gcThreshold / 1024) << "\n";
237#endif
238 }
239 void rungc() {
240 if (_allocedMem > _gcThreshold) {
241 trigger();
242 }
243 }
244 void mark();
245 void sweep();
246
247 static size_t nodesize(ASTNode* n) {
248 static const size_t _nodesize[Item::II_END + 1] = {
249 0, // NID_FL
250 0, // NID_CHUNK
251 0, // NID_VEC
252 0, // NID_STR
253 sizeof(IntLit), // E_INTLIT
254 sizeof(FloatLit), // E_FLOATLIT
255 sizeof(SetLit), // E_SETLIT
256 sizeof(BoolLit), // E_BOOLLIT
257 sizeof(StringLit), // E_STRINGLIT
258 sizeof(Id), // E_ID
259 sizeof(AnonVar), // E_ANON
260 sizeof(ArrayLit), // E_ARRAYLIT
261 sizeof(ArrayAccess), // E_ARRAYACCESS
262 sizeof(Comprehension), // E_COMP
263 sizeof(ITE), // E_ITE
264 sizeof(BinOp), // E_BINOP
265 sizeof(UnOp), // E_UNOP
266 sizeof(Call), // E_CALL
267 sizeof(VarDecl), // E_VARDECL
268 sizeof(Let), // E_LET
269 sizeof(TypeInst), // E_TI
270 sizeof(TIId), // E_TIID
271 sizeof(IncludeI), // II_INC
272 sizeof(VarDeclI), // II_VD
273 sizeof(AssignI), // II_ASN
274 sizeof(ConstraintI), // II_CON
275 sizeof(SolveI), // II_SOL
276 sizeof(OutputI), // II_OUT
277 sizeof(FunctionI) // II_FUN
278 };
279 size_t ns;
280 switch (static_cast<int>(n->_id)) {
281 case ASTNode::NID_FL:
282 ns = static_cast<FreeListNode*>(n)->size;
283 break;
284 case ASTNode::NID_CHUNK:
285 case ASTNode::NID_STR:
286 ns = static_cast<ASTChunk*>(n)->memsize();
287 break;
288 case ASTNode::NID_VEC:
289 ns = static_cast<ASTVec*>(n)->memsize();
290 break;
291 case Call::eid:
292 ns = n->_flag1 ? _nodesize[BinOp::eid] : _nodesize[Call::eid];
293 break;
294 default:
295 assert(n->_id <= Item::II_END);
296 ns = _nodesize[n->_id];
297 break;
298 }
299 ns += ((8 - (ns & 7)) & 7);
300 return ns;
301 }
302};
303
304const size_t GC::Heap::_min_gcThreshold = 10LL * 1024LL;
305
306#ifdef MINIZINC_GC_STATS
307const char* GC::Heap::_nodeid[] = {
308 "FreeList ", // NID_FL
309 "Chunk ", // NID_CHUNK
310 "Vec ", // NID_VEC
311 "Str ", // NID_STR
312 "IntLit ", // E_INTLIT
313 "FloatLit ", // E_FLOATLIT
314 "SetLit ", // E_SETLIT
315 "BoolLit ", // E_BOOLLIT
316 "StringLit ", // E_STRINGLIT
317 "Id ", // E_ID
318 "AnonVar ", // E_ANON
319 "ArrayLit ", // E_ARRAYLIT
320 "ArrayAccess ", // E_ARRAYACCESS
321 "Comprehension ", // E_COMP
322 "ITE ", // E_ITE
323 "BinOp ", // E_BINOP
324 "UnOp ", // E_UNOP
325 "Call ", // E_CALL
326 "VarDecl ", // E_VARDECL
327 "Let ", // E_LET
328 "TypeInst ", // E_TI
329 "TIId ", // E_TIID
330 "IncludeI ", // II_INC
331 "VarDeclI ", // II_VD
332 "AssignI ", // II_ASN
333 "ConstraintI ", // II_CON
334 "SolveI ", // II_SOL
335 "OutputI ", // II_OUT
336 "FunctionI " // II_FUN
337};
338#endif
339
340void GC::setTimeout(unsigned long long int t) {
341 if (gc() == nullptr) {
342 gc() = new GC();
343 }
344 gc()->_timeout = t;
345 gc()->_timeoutTimer.reset();
346}
347
348void GC::lock() {
349 if (gc() == nullptr) {
350 gc() = new GC();
351 }
352 // If a timeout has been specified, first check counter
353 // before checking timer (counter is much cheaper, introduces
354 // less overhead)
355 if (gc()->_timeout > 0 && gc()->_timeoutCount++ > 500) {
356 gc()->_timeoutCount = 0;
357 if (gc()->_timeoutTimer.ms() > gc()->_timeout) {
358 gc()->_timeout = 0;
359 gc()->_timeoutCount = 0;
360 throw Timeout();
361 }
362 }
363 if (gc()->_lockCount == 0) {
364 gc()->_heap->rungc();
365 }
366 gc()->_lockCount++;
367}
368void GC::unlock() {
369 assert(locked());
370 gc()->_lockCount--;
371}
372
373void GC::trigger() {
374 if (!locked()) {
375 gc()->_heap->trigger();
376 }
377}
378
379const size_t GC::Heap::pageSize;
380
381const size_t GC::Heap::_fl_size[GC::Heap::_max_fl + 1] = {
382 sizeof(Item) + 1 * sizeof(void*), sizeof(Item) + 2 * sizeof(void*),
383 sizeof(Item) + 3 * sizeof(void*), sizeof(Item) + 4 * sizeof(void*),
384 sizeof(Item) + 5 * sizeof(void*), sizeof(Item) + 6 * sizeof(void*),
385};
386
387GC::GC() : _heap(new Heap()), _lockCount(0), _timeout(0), _timeoutCount(0) {}
388
389void GC::add(GCMarker* m) {
390 GC* gc = GC::gc();
391 if (gc->_heap->_rootset != nullptr) {
392 m->_rootsNext = gc->_heap->_rootset;
393 m->_rootsPrev = m->_rootsNext->_rootsPrev;
394 m->_rootsPrev->_rootsNext = m;
395 m->_rootsNext->_rootsPrev = m;
396 } else {
397 gc->_heap->_rootset = m->_rootsNext = m->_rootsPrev = m;
398 }
399}
400
401void GC::remove(GCMarker* m) {
402 GC* gc = GC::gc();
403 if (m->_rootsNext == m) {
404 gc->_heap->_rootset = nullptr;
405 } else {
406 m->_rootsNext->_rootsPrev = m->_rootsPrev;
407 m->_rootsPrev->_rootsNext = m->_rootsNext;
408 if (m == gc->_heap->_rootset) {
409 gc->_heap->_rootset = m->_rootsPrev;
410 }
411 }
412}
413
414void* GC::alloc(size_t size) {
415 assert(locked());
416 void* ret;
417 if (size < GC::Heap::_fl_size[0] || size > GC::Heap::_fl_size[GC::Heap::_max_fl]) {
418 ret = _heap->alloc(size, true);
419 } else {
420 ret = _heap->fl(size);
421 }
422 new (ret) FreeListNode(size);
423 return ret;
424}
425
426void GC::Heap::mark() {
427#if defined(MINIZINC_GC_STATS)
428 std::cerr << "================= mark =================: ";
429 gc_stats.clear();
430#endif
431
432 for (KeepAlive* e = _roots; e != nullptr; e = e->next()) {
433 if (((*e)() != nullptr) && (*e)()->_gcMark == 0U) {
434 Expression::mark((*e)());
435#if defined(MINIZINC_GC_STATS)
436 gc_stats[(*e)()->_id].keepalive++;
437#endif
438 }
439 }
440#if defined(MINIZINC_GC_STATS)
441 std::cerr << "+";
442#endif
443
444 GCMarker* m = _rootset;
445 if (m == nullptr) {
446 return;
447 }
448 do {
449 m->mark();
450 m = m->_rootsNext;
451 } while (m != _rootset);
452
453 for (auto i = static_cast<unsigned int>(_trail.size()); (i--) != 0U;) {
454 Expression::mark(_trail[i].v);
455 }
456
457 bool fixPrev = false;
458 WeakRef* prevWr = nullptr;
459 for (WeakRef* wr = _weakRefs; wr != nullptr; wr = wr->next()) {
460 if (fixPrev) {
461 fixPrev = false;
462 removeWeakRef(prevWr);
463 prevWr->_n = nullptr;
464 prevWr->_p = nullptr;
465 }
466 if (((*wr)() != nullptr) && (*wr)()->_gcMark == 0U) {
467 wr->_e = nullptr;
468 wr->_valid = false;
469 fixPrev = true;
470 prevWr = wr;
471 }
472 }
473 if (fixPrev) {
474 removeWeakRef(prevWr);
475 prevWr->_n = nullptr;
476 prevWr->_p = nullptr;
477 }
478
479 for (ASTNodeWeakMap* wr = _nodeWeakMaps; wr != nullptr; wr = wr->next()) {
480 std::vector<ASTNode*> toRemove;
481 for (auto n : wr->_m) {
482 if (n.first->_gcMark == 0U || n.second->_gcMark == 0U) {
483 toRemove.push_back(n.first);
484 }
485 }
486 for (auto* n : toRemove) {
487 wr->_m.erase(n);
488 }
489 }
490
491#if defined(MINIZINC_GC_STATS)
492 std::cerr << "+";
493 std::cerr << "\n";
494#endif
495}
496
497void GC::Heap::sweep() {
498#if defined(MINIZINC_GC_STATS)
499 std::cerr << "=============== GC sweep =============\n";
500#endif
501 HeapPage* p = _page;
502 HeapPage* prev = nullptr;
503 while (p != nullptr) {
504 size_t off = 0;
505 bool wholepage = true;
506 struct NodeInfo {
507 ASTNode* n;
508 size_t ns;
509 NodeInfo(ASTNode* n0, size_t ns0) : n(n0), ns(ns0) {}
510 };
511 std::vector<NodeInfo> freeNodes;
512 freeNodes.reserve(100);
513 while (off < p->used) {
514 auto* n = reinterpret_cast<ASTNode*>(p->data + off);
515 size_t ns = nodesize(n);
516 assert(ns != 0);
517#if defined(MINIZINC_GC_STATS)
518 GCStat& stats = gc_stats[n->_id];
519 stats.first++;
520 stats.total += ns;
521#endif
522 if (n->_gcMark == 0U) {
523 switch (static_cast<int>(n->_id)) {
524 case Item::II_FUN:
525 static_cast<FunctionI*>(n)->ann().~Annotation();
526 break;
527 case Item::II_SOL:
528 static_cast<SolveI*>(n)->ann().~Annotation();
529 break;
530 case ASTNode::NID_STR:
531 static_cast<ASTStringData*>(n)->destroy();
532 break;
533 case Expression::E_VARDECL:
534 // Reset WeakRef inside VarDecl
535 static_cast<VarDecl*>(n)->flat(nullptr);
536 // fall through
537 default:
538 if (n->_id >= static_cast<unsigned int>(ASTNode::NID_END) + 1 &&
539 n->_id <= static_cast<unsigned int>(Expression::EID_END)) {
540 static_cast<Expression*>(n)->ann().~Annotation();
541 }
542 }
543 if (ns >= _fl_size[0] && ns <= _fl_size[_max_fl]) {
544 freeNodes.emplace_back(n, ns);
545 } else {
546 assert(off == 0);
547 assert(p->used == p->size);
548 }
549 } else {
550#if defined(MINIZINC_GC_STATS)
551 stats.second++;
552#endif
553 wholepage = false;
554 if (n->_id != static_cast<unsigned int>(ASTNode::NID_FL)) {
555 n->_gcMark = 0;
556 }
557 }
558 off += ns;
559 }
560 if (wholepage) {
561#ifndef NDEBUG
562 memset(p->data, 42, p->size);
563#endif
564 if (prev != nullptr) {
565 prev->next = p->next;
566 } else {
567 assert(p == _page);
568 _page = p->next;
569 }
570 HeapPage* pf = p;
571 p = p->next;
572 _allocedMem -= pf->size;
573 if (pf->size - pf->used >= _fl_size[0]) {
574 _freeMem -= (pf->size - pf->used);
575 }
576 assert(_allocedMem >= _freeMem);
577 ::free(pf);
578 } else {
579 for (auto ni : freeNodes) {
580 auto* fln = static_cast<FreeListNode*>(ni.n);
581 new (fln) FreeListNode(ni.ns, _fl[freelistSlot(ni.ns)]);
582 _fl[freelistSlot(ni.ns)] = fln;
583 _freeMem += ni.ns;
584#if defined(MINIZINC_GC_STATS)
585 gc_stats[fln->_id].second++;
586#endif
587 }
588 assert(_allocedMem >= _freeMem);
589 prev = p;
590 p = p->next;
591 }
592 }
593#if defined(MINIZINC_GC_STATS)
594 for (auto stat : gc_stats) {
595 std::cerr << _nodeid[stat.first] << ":\t" << stat.second.first << " / " << stat.second.second
596 << " / " << stat.second.keepalive << " / " << stat.second.inmodel << " / ";
597 if (stat.second.total > 1024) {
598 if (stat.second.total > 1024 * 1024) {
599 std::cerr << (stat.second.total / 1024 / 1024) << "M";
600 } else {
601 std::cerr << (stat.second.total / 1024) << "K";
602 }
603 } else {
604 std::cerr << (stat.second.total);
605 }
606 std::cerr << std::endl;
607 }
608#endif
609}
610
611ASTVec::ASTVec(size_t size) : ASTNode(NID_VEC), _size(size) {}
612void* ASTVec::alloc(size_t size) {
613 size_t s = sizeof(ASTVec) + (size <= 2 ? 0 : size - 2) * sizeof(void*);
614 s += ((8 - (s & 7)) & 7);
615 return GC::gc()->alloc(s);
616}
617
618ASTChunk::ASTChunk(size_t size, unsigned int id) : ASTNode(id), _size(size) {}
619void* ASTChunk::alloc(size_t size) {
620 size_t s = sizeof(ASTChunk) + (size <= 4 ? 0 : size - 4) * sizeof(char);
621 s += ((8 - (s & 7)) & 7);
622 return GC::gc()->alloc(s);
623}
624
625void GC::mark() {
626 GC* gc = GC::gc();
627 gc->_heap->_trail.emplace_back(nullptr, nullptr);
628 gc->_heap->_trail.back().mark = true;
629}
630void GC::trail(Expression** l, Expression* v) {
631 GC* gc = GC::gc();
632 gc->_heap->_trail.emplace_back(l, v);
633}
634void GC::untrail() {
635 GC* gc = GC::gc();
636 while (!gc->_heap->_trail.back().mark) {
637 *gc->_heap->_trail.back().l = gc->_heap->_trail.back().v;
638 gc->_heap->_trail.pop_back();
639 }
640 assert(gc->_heap->_trail.back().mark);
641 assert(!gc->_heap->_trail.empty());
642 gc->_heap->_trail.pop_back();
643}
644size_t GC::maxMem() {
645 GC* gc = GC::gc();
646 return gc->_heap->_maxAllocedMem;
647}
648
649void* ASTNode::operator new(size_t size) { return GC::gc()->alloc(size); }
650
651void GC::addKeepAlive(KeepAlive* e) {
652 assert(e->_p == nullptr);
653 assert(e->_n == nullptr);
654 e->_n = GC::gc()->_heap->_roots;
655 if (GC::gc()->_heap->_roots != nullptr) {
656 GC::gc()->_heap->_roots->_p = e;
657 }
658 GC::gc()->_heap->_roots = e;
659}
660void GC::removeKeepAlive(KeepAlive* e) {
661 if (e->_p != nullptr) {
662 e->_p->_n = e->_n;
663 } else {
664 assert(GC::gc()->_heap->_roots == e);
665 GC::gc()->_heap->_roots = e->_n;
666 }
667 if (e->_n != nullptr) {
668 e->_n->_p = e->_p;
669 }
670}
671
672KeepAlive::KeepAlive(Expression* e) : _e(e), _p(nullptr), _n(nullptr) {
673 if ((_e != nullptr) && !_e->isUnboxedVal()) {
674 GC::gc()->addKeepAlive(this);
675 }
676}
677KeepAlive::~KeepAlive() {
678 if ((_e != nullptr) && !_e->isUnboxedVal()) {
679 GC::gc()->removeKeepAlive(this);
680 }
681}
682KeepAlive::KeepAlive(const KeepAlive& e) : _e(e._e), _p(nullptr), _n(nullptr) {
683 if ((_e != nullptr) && !_e->isUnboxedVal()) {
684 GC::gc()->addKeepAlive(this);
685 }
686}
687KeepAlive& KeepAlive::operator=(const KeepAlive& e) {
688 if (this != &e) {
689 if ((_e != nullptr) && !_e->isUnboxedVal()) {
690 if (e._e == nullptr || e._e->isUnboxedVal()) {
691 GC::gc()->removeKeepAlive(this);
692 _p = _n = nullptr;
693 }
694 } else {
695 if (e._e != nullptr && !e._e->isUnboxedVal()) {
696 GC::gc()->addKeepAlive(this);
697 }
698 }
699 _e = e._e;
700 }
701 return *this;
702}
703
704void GC::addWeakRef(WeakRef* e) {
705 assert(e->_p == nullptr);
706 assert(e->_n == nullptr);
707 e->_n = GC::gc()->_heap->_weakRefs;
708 if (GC::gc()->_heap->_weakRefs != nullptr) {
709 GC::gc()->_heap->_weakRefs->_p = e;
710 }
711 GC::gc()->_heap->_weakRefs = e;
712}
713void GC::removeWeakRef(MiniZinc::WeakRef* e) {
714 if (e->_p != nullptr) {
715 e->_p->_n = e->_n;
716 } else {
717 assert(GC::gc()->_heap->_weakRefs == e);
718 GC::gc()->_heap->_weakRefs = e->_n;
719 }
720 if (e->_n != nullptr) {
721 e->_n->_p = e->_p;
722 }
723}
724void GC::addNodeWeakMap(ASTNodeWeakMap* m) {
725 assert(m->_p == nullptr);
726 assert(m->_n == nullptr);
727 m->_n = GC::gc()->_heap->_nodeWeakMaps;
728 if (GC::gc()->_heap->_nodeWeakMaps != nullptr) {
729 GC::gc()->_heap->_nodeWeakMaps->_p = m;
730 }
731 GC::gc()->_heap->_nodeWeakMaps = m;
732}
733void GC::removeNodeWeakMap(ASTNodeWeakMap* m) {
734 if (m->_p != nullptr) {
735 m->_p->_n = m->_n;
736 } else {
737 assert(GC::gc()->_heap->_nodeWeakMaps == m);
738 GC::gc()->_heap->_nodeWeakMaps = m->_n;
739 }
740 if (m->_n != nullptr) {
741 m->_n->_p = m->_p;
742 }
743}
744
745WeakRef::WeakRef(Expression* e) : _e(e), _p(nullptr), _n(nullptr), _valid(true) {
746 if ((_e != nullptr) && !_e->isUnboxedVal()) {
747 GC::gc()->addWeakRef(this);
748 }
749}
750WeakRef::~WeakRef() {
751 if ((_e != nullptr) && !_e->isUnboxedVal()) {
752 GC::gc()->removeWeakRef(this);
753 }
754}
755WeakRef::WeakRef(const WeakRef& e) : _e(e()), _p(nullptr), _n(nullptr), _valid(true) {
756 if ((_e != nullptr) && !_e->isUnboxedVal()) {
757 GC::gc()->addWeakRef(this);
758 }
759}
760WeakRef& WeakRef::operator=(const WeakRef& e) {
761 if (this != &e) {
762 // Test if this WeakRef is currently active in the GC
763 bool isActive = ((_e != nullptr) && !_e->isUnboxedVal());
764 if (isActive) {
765 // Yes, active WeakRef.
766 // If after assigning WeakRef should be inactive, remove it.
767 if (e() == nullptr || e()->isUnboxedVal()) {
768 GC::gc()->removeWeakRef(this);
769 _n = _p = nullptr;
770 }
771 }
772 _e = e();
773 _valid = true;
774
775 // If this WeakRef was not active but now should be, add it
776 if (!isActive && _e != nullptr && !_e->isUnboxedVal()) {
777 GC::gc()->addWeakRef(this);
778 }
779 }
780 return *this;
781}
782
783ASTNodeWeakMap::ASTNodeWeakMap() : _p(nullptr), _n(nullptr) { GC::gc()->addNodeWeakMap(this); }
784
785ASTNodeWeakMap::~ASTNodeWeakMap() { GC::gc()->removeNodeWeakMap(this); }
786
787void ASTNodeWeakMap::insert(ASTNode* n0, ASTNode* n1) { _m.insert(std::make_pair(n0, n1)); }
788
789ASTNode* ASTNodeWeakMap::find(ASTNode* n) {
790 auto it = _m.find(n);
791 if (it == _m.end()) {
792 return nullptr;
793 }
794 return it->second;
795}
796
797} // namespace MiniZinc