this repo has no description
1/* -*- mode: C++; c-basic-offset: 2; indent-tabs-mode: nil -*- */
2
3/*
4 * Main authors:
5 * Guido Tack <guido.tack@monash.edu>
6 */
7
8/* This Source Code Form is subject to the terms of the Mozilla Public
9 * License, v. 2.0. If a copy of the MPL was not distributed with this
10 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
11
12#include <minizinc/ast.hh>
13#include <minizinc/config.hh>
14#include <minizinc/gc.hh>
15#include <minizinc/hash.hh>
16#include <minizinc/model.hh>
17#include <minizinc/support/dtrace.h>
18#include <minizinc/timer.hh>
19
20#include <cstring>
21#include <vector>
22
23//#define MINIZINC_GC_STATS
24
25#if defined(MINIZINC_GC_STATS)
26#include <map>
27#endif
28
29namespace MiniZinc {
30
31GC*& GC::gc(void) {
32#if defined(HAS_DECLSPEC_THREAD)
33 __declspec(thread) static GC* gc = NULL;
34#elif defined(HAS_ATTR_THREAD)
35 static __thread GC* gc = NULL;
36#else
37#error Need thread-local storage
38#endif
39 return gc;
40}
41
42bool GC::locked(void) {
43 assert(gc());
44 return gc()->_lock_count > 0;
45}
46
47GCLock::GCLock(void) { GC::lock(); }
48GCLock::~GCLock(void) { GC::unlock(); }
49
50class FreeListNode : public ASTNode {
51public:
52 FreeListNode* next;
53 size_t size;
54 FreeListNode(size_t s, FreeListNode* n) : ASTNode(ASTNode::NID_FL), next(n), size(s) {
55 _gc_mark = 1;
56 }
57 FreeListNode(size_t s) : ASTNode(ASTNode::NID_FL), next(NULL), size(s) {}
58};
59
60class HeapPage {
61public:
62 HeapPage* next;
63 size_t size;
64 size_t used;
65 char data[1];
66 HeapPage(HeapPage* n, size_t s) : next(n), size(s), used(0) {}
67};
68
69/// Memory managed by the garbage collector
70class GC::Heap {
71 friend class GC;
72#if defined(MINIZINC_GC_STATS)
73 static const char* _nodeid[Item::II_END + 1];
74 struct GCStat {
75 int first;
76 int second;
77 int keepalive;
78 int inmodel;
79 size_t total;
80 GCStat(void) : first(0), second(0), keepalive(0), inmodel(0), total(0) {}
81 };
82 std::map<int, GCStat> gc_stats;
83#endif
84protected:
85 static const size_t _min_gc_threshold;
86
87 HeapPage* _page;
88 Model* _rootset;
89 KeepAlive* _roots;
90 WeakRef* _weakRefs;
91 ASTNodeWeakMap* _nodeWeakMaps;
92 static const int _max_fl = 5;
93 FreeListNode* _fl[_max_fl + 1];
94 static const size_t _fl_size[_max_fl + 1];
95 int _fl_slot(size_t _size) {
96 size_t size = _size;
97 assert(size <= _fl_size[_max_fl]);
98 assert(size >= _fl_size[0]);
99 size -= sizeof(Item);
100 assert(size % sizeof(void*) == 0);
101 size /= sizeof(void*);
102 assert(size >= 1);
103 int slot = static_cast<int>(size) - 1;
104 return slot;
105 }
106
107 /// Total amount of memory allocated
108 size_t _alloced_mem;
109 /// Total amount of memory currently free
110 size_t _free_mem;
111 /// Memory threshold for next garbage collection
112 size_t _gc_threshold;
113 /// High water mark of all allocated memory
114 size_t _max_alloced_mem;
115
116 /// A trail item
117 struct TItem {
118 Expression** l;
119 Expression* v;
120 bool mark;
121 TItem(Expression** l0, Expression* v0) : l(l0), v(v0), mark(false) {}
122 };
123 /// Trail
124 std::vector<TItem> trail;
125
126 Heap(void)
127 : _page(NULL),
128 _rootset(NULL),
129 _roots(NULL),
130 _weakRefs(NULL),
131 _nodeWeakMaps(NULL),
132 _alloced_mem(0),
133 _free_mem(0),
134 _gc_threshold(_min_gc_threshold),
135 _max_alloced_mem(0) {
136 for (int i = _max_fl + 1; i--;) _fl[i] = NULL;
137 }
138
139 /// Default size of pages to allocate
140 static const size_t pageSize = 1 << 20;
141
142 HeapPage* allocPage(size_t s, bool exact = false) {
143 if (!exact) s = std::max(s, pageSize);
144 HeapPage* newPage = static_cast<HeapPage*>(::malloc(sizeof(HeapPage) + s - 1));
145 if (newPage == NULL) {
146 throw InternalError("out of memory");
147 }
148#ifndef NDEBUG
149 memset(newPage, 255, sizeof(HeapPage) + s - 1);
150#endif
151 _alloced_mem += s;
152 _max_alloced_mem = std::max(_max_alloced_mem, _alloced_mem);
153 _free_mem += s;
154 if (exact && _page) {
155 new (newPage) HeapPage(_page->next, s);
156 _page->next = newPage;
157 } else {
158 if (_page) {
159 size_t ns = _page->size - _page->used;
160 assert(ns <= _fl_size[_max_fl]);
161 if (ns >= _fl_size[0]) {
162 // Remainder of page can be added to free lists
163 FreeListNode* fln = reinterpret_cast<FreeListNode*>(_page->data + _page->used);
164 _page->used += ns;
165 new (fln) FreeListNode(ns, _fl[_fl_slot(ns)]);
166 _fl[_fl_slot(ns)] = fln;
167 } else {
168 // Waste a little memory (less than smallest free list slot)
169 _free_mem -= ns;
170 assert(_alloced_mem >= _free_mem);
171 }
172 }
173 new (newPage) HeapPage(_page, s);
174 _page = newPage;
175 }
176 return newPage;
177 }
178
179 void* alloc(size_t size, bool exact = false) {
180 assert(size <= 80 || exact);
181 /// Align to word boundary
182 size += ((8 - (size & 7)) & 7);
183 HeapPage* p = _page;
184 if (exact || _page == NULL || _page->used + size >= _page->size) p = allocPage(size, exact);
185 char* ret = p->data + p->used;
186 p->used += size;
187 _free_mem -= size;
188 if (p->size - p->used < _fl_size[0]) {
189 _free_mem -= (p->size - p->used);
190 _alloced_mem -= (p->size - p->used);
191 p->size = p->used;
192 }
193 assert(_alloced_mem >= _free_mem);
194 return ret;
195 }
196
197 /// Allocate one object of type T (no initialisation)
198 template <typename T>
199 T* alloc(void) {
200 return static_cast<T*>(alloc(sizeof(T)));
201 }
202
203 /// Allocate \a n objects of type T (no initialisation)
204 template <typename T>
205 T* alloc(int n) {
206 return static_cast<T*>(alloc(n * sizeof(T)));
207 }
208
209 void* fl(size_t size) {
210 int slot = _fl_slot(size);
211 assert(slot <= _max_fl);
212 if (_fl[slot]) {
213 FreeListNode* p = _fl[slot];
214 _fl[slot] = p->next;
215 _free_mem -= size;
216 return p;
217 }
218 return alloc(size);
219 }
220
221 void trigger(void) {
222 DTRACE0(GC_START);
223#ifdef MINIZINC_GC_STATS
224 std::cerr << "GC\n\talloced " << (_alloced_mem / 1024) << "\n\tfree " << (_free_mem / 1024)
225 << "\n\tdiff " << ((_alloced_mem - _free_mem) / 1024) << "\n\tthreshold "
226 << (_gc_threshold / 1024) << "\n";
227#endif
228 size_t old_free = _free_mem;
229 mark();
230 sweep();
231 // GC strategy:
232 // increase threshold if either
233 // a) we haven't been able to put much on the free list (comapred to before GC), or
234 // b) the free list memory (after GC) is less than 50% of the allocated memory
235 // otherwise (i.e., we have been able to increase the free list, and it is now more
236 // than 50% of overall allocated memory), keep threshold at allocated memory
237 if ((old_free != 0 && static_cast<double>(old_free) / static_cast<double>(_free_mem) > 0.9) ||
238 static_cast<double>(_free_mem) / _alloced_mem < 0.5) {
239 _gc_threshold = std::max(_min_gc_threshold, static_cast<size_t>(_alloced_mem * 1.5));
240 } else {
241 _gc_threshold = std::max(_min_gc_threshold, _alloced_mem);
242 }
243#ifdef MINIZINC_GC_STATS
244 std::cerr << "done\n\talloced " << (_alloced_mem / 1024) << "\n\tfree " << (_free_mem / 1024)
245 << "\n\tdiff " << ((_alloced_mem - _free_mem) / 1024) << "\n\tthreshold "
246 << (_gc_threshold / 1024) << "\n";
247#endif
248 DTRACE0(GC_END);
249 }
250 void rungc(void) {
251 if (_alloced_mem > _gc_threshold) {
252 trigger();
253 }
254 }
255 void mark(void);
256 void sweep(void);
257
258 static size_t nodesize(ASTNode* n) {
259 static const size_t _nodesize[Item::II_END + 1] = {
260 0, // NID_FL
261 0, // NID_CHUNK
262 0, // NID_VEC
263 sizeof(IntLit), // E_INTLIT
264 sizeof(FloatLit), // E_FLOATLIT
265 sizeof(SetLit), // E_SETLIT
266 sizeof(BoolLit), // E_BOOLLIT
267 sizeof(StringLit), // E_STRINGLIT
268 sizeof(Id), // E_ID
269 sizeof(AnonVar), // E_ANON
270 sizeof(ArrayLit), // E_ARRAYLIT
271 sizeof(ArrayAccess), // E_ARRAYACCESS
272 sizeof(Comprehension), // E_COMP
273 sizeof(ITE), // E_ITE
274 sizeof(BinOp), // E_BINOP
275 sizeof(UnOp), // E_UNOP
276 sizeof(Call), // E_CALL
277 sizeof(VarDecl), // E_VARDECL
278 sizeof(Let), // E_LET
279 sizeof(TypeInst), // E_TI
280 sizeof(TIId), // E_TIID
281 sizeof(IncludeI), // II_INC
282 sizeof(VarDeclI), // II_VD
283 sizeof(AssignI), // II_ASN
284 sizeof(ConstraintI), // II_CON
285 sizeof(SolveI), // II_SOL
286 sizeof(OutputI), // II_OUT
287 sizeof(FunctionI) // II_FUN
288 };
289 size_t ns;
290 switch (n->_id) {
291 case ASTNode::NID_FL:
292 ns = static_cast<FreeListNode*>(n)->size;
293 break;
294 case ASTNode::NID_CHUNK:
295 ns = static_cast<ASTChunk*>(n)->memsize();
296 break;
297 case ASTNode::NID_VEC:
298 ns = static_cast<ASTVec*>(n)->memsize();
299 break;
300 default:
301 assert(n->_id <= Item::II_END);
302 ns = _nodesize[n->_id];
303 break;
304 }
305 ns += ((8 - (ns & 7)) & 7);
306 return ns;
307 }
308};
309
310const size_t GC::Heap::_min_gc_threshold = 10ll * 1024ll;
311
312#ifdef MINIZINC_GC_STATS
313const char* GC::Heap::_nodeid[] = {
314 "FreeList ", // NID_FL
315 "Chunk ", // NID_CHUNK
316 "Vec ", // NID_VEC
317 "IntLit ", // E_INTLIT
318 "FloatLit ", // E_FLOATLIT
319 "SetLit ", // E_SETLIT
320 "BoolLit ", // E_BOOLLIT
321 "StringLit ", // E_STRINGLIT
322 "Id ", // E_ID
323 "AnonVar ", // E_ANON
324 "ArrayLit ", // E_ARRAYLIT
325 "ArrayAccess ", // E_ARRAYACCESS
326 "Comprehension ", // E_COMP
327 "ITE ", // E_ITE
328 "BinOp ", // E_BINOP
329 "UnOp ", // E_UNOP
330 "Call ", // E_CALL
331 "VarDecl ", // E_VARDECL
332 "Let ", // E_LET
333 "TypeInst ", // E_TI
334 "TIId ", // E_TIID
335 "IncludeI ", // II_INC
336 "VarDeclI ", // II_VD
337 "AssignI ", // II_ASN
338 "ConstraintI ", // II_CON
339 "SolveI ", // II_SOL
340 "OutputI ", // II_OUT
341 "FunctionI " // II_FUN
342};
343#endif
344
345void GC::setTimeout(unsigned long long int t) {
346 if (gc() == NULL) {
347 gc() = new GC();
348 }
349 gc()->_timeout = t;
350 gc()->_timeout_timer.reset();
351}
352
353void GC::lock(void) {
354 if (gc() == NULL) {
355 gc() = new GC();
356 }
357 // If a timeout has been specified, first check counter
358 // before checking timer (counter is much cheaper, introduces
359 // less overhead)
360 if (gc()->_timeout > 0 && gc()->_timeout_counter++ > 500) {
361 gc()->_timeout_counter = 0;
362 if (gc()->_timeout_timer.ms() > gc()->_timeout) {
363 gc()->_timeout = 0;
364 gc()->_timeout_counter = 0;
365 throw Timeout();
366 }
367 }
368 if (gc()->_lock_count == 0) {
369 gc()->_heap->rungc();
370 }
371 gc()->_lock_count++;
372}
373void GC::unlock(void) {
374 assert(locked());
375 gc()->_lock_count--;
376}
377
378void GC::trigger(void) {
379 if (!locked()) gc()->_heap->trigger();
380}
381
382const size_t GC::Heap::pageSize;
383
384const size_t GC::Heap::_fl_size[GC::Heap::_max_fl + 1] = {
385 sizeof(Item) + 1 * sizeof(void*), sizeof(Item) + 2 * sizeof(void*),
386 sizeof(Item) + 3 * sizeof(void*), sizeof(Item) + 4 * sizeof(void*),
387 sizeof(Item) + 5 * sizeof(void*), sizeof(Item) + 6 * sizeof(void*),
388};
389
390GC::GC(void) : _heap(new Heap()), _lock_count(0), _timeout(0), _timeout_counter(0) {}
391
392void GC::add(Model* m) {
393 GC* gc = GC::gc();
394 if (gc->_heap->_rootset) {
395 m->_roots_next = gc->_heap->_rootset;
396 m->_roots_prev = m->_roots_next->_roots_prev;
397 m->_roots_prev->_roots_next = m;
398 m->_roots_next->_roots_prev = m;
399 } else {
400 gc->_heap->_rootset = m->_roots_next = m->_roots_prev = m;
401 }
402}
403
404void GC::remove(Model* m) {
405 GC* gc = GC::gc();
406 if (m->_roots_next == m) {
407 gc->_heap->_rootset = NULL;
408 } else {
409 m->_roots_next->_roots_prev = m->_roots_prev;
410 m->_roots_prev->_roots_next = m->_roots_next;
411 if (m == gc->_heap->_rootset) gc->_heap->_rootset = m->_roots_prev;
412 }
413}
414
415void* GC::alloc(size_t size) {
416 assert(locked());
417 void* ret;
418 if (size < _heap->_fl_size[0] || size > _heap->_fl_size[_heap->_max_fl]) {
419 ret = _heap->alloc(size, true);
420 } else {
421 ret = _heap->fl(size);
422 }
423 new (ret) FreeListNode(size);
424 return ret;
425}
426
427void GC::Heap::mark(void) {
428#if defined(MINIZINC_GC_STATS)
429 std::cerr << "================= mark =================: ";
430 gc_stats.clear();
431#endif
432
433 for (KeepAlive* e = _roots; e != NULL; e = e->next()) {
434 if ((*e)() && (*e)()->_gc_mark == 0) {
435 Expression::mark((*e)());
436#if defined(MINIZINC_GC_STATS)
437 gc_stats[(*e)()->_id].keepalive++;
438#endif
439 }
440 }
441#if defined(MINIZINC_GC_STATS)
442 std::cerr << "+";
443#endif
444
445 Model* m = _rootset;
446 if (m == NULL) return;
447 do {
448 m->_filepath.mark();
449 m->_filename.mark();
450 for (unsigned int j = 0; j < m->_items.size(); j++) {
451 Item* i = m->_items[j];
452 if (i->_gc_mark == 0) {
453 i->_gc_mark = 1;
454 i->loc().mark();
455 switch (i->iid()) {
456 case Item::II_INC:
457 i->cast<IncludeI>()->f().mark();
458 break;
459 case Item::II_VD:
460 Expression::mark(i->cast<VarDeclI>()->e());
461#if defined(MINIZINC_GC_STATS)
462 gc_stats[i->cast<VarDeclI>()->e()->Expression::eid()].inmodel++;
463#endif
464 break;
465 case Item::II_ASN:
466 i->cast<AssignI>()->id().mark();
467 Expression::mark(i->cast<AssignI>()->e());
468 Expression::mark(i->cast<AssignI>()->decl());
469 break;
470 case Item::II_CON:
471 Expression::mark(i->cast<ConstraintI>()->e());
472#if defined(MINIZINC_GC_STATS)
473 gc_stats[i->cast<ConstraintI>()->e()->Expression::eid()].inmodel++;
474#endif
475 break;
476 case Item::II_SOL: {
477 SolveI* si = i->cast<SolveI>();
478 for (ExpressionSetIter it = si->ann().begin(); it != si->ann().end(); ++it) {
479 Expression::mark(*it);
480 }
481 }
482 Expression::mark(i->cast<SolveI>()->e());
483 break;
484 case Item::II_OUT:
485 Expression::mark(i->cast<OutputI>()->e());
486 break;
487 case Item::II_FUN: {
488 FunctionI* fi = i->cast<FunctionI>();
489 fi->id().mark();
490 Expression::mark(fi->ti());
491 for (ExpressionSetIter it = fi->ann().begin(); it != fi->ann().end(); ++it) {
492 Expression::mark(*it);
493 }
494 Expression::mark(fi->e());
495 fi->params().mark();
496 for (unsigned int k = 0; k < fi->params().size(); k++) {
497 Expression::mark(fi->params()[k]);
498 }
499 } break;
500 }
501 }
502 }
503 m = m->_roots_next;
504 } while (m != _rootset);
505
506 for (unsigned int i = static_cast<unsigned int>(trail.size()); i--;) {
507 Expression::mark(trail[i].v);
508 }
509
510 bool fixPrev = false;
511 WeakRef* prevWr = NULL;
512 for (WeakRef* wr = _weakRefs; wr != NULL; wr = wr->next()) {
513 if (fixPrev) {
514 fixPrev = false;
515 removeWeakRef(prevWr);
516 prevWr->_n = NULL;
517 prevWr->_p = NULL;
518 }
519 if ((*wr)() && (*wr)()->_gc_mark == 0) {
520 wr->_e = NULL;
521 wr->_valid = false;
522 fixPrev = true;
523 prevWr = wr;
524 }
525 }
526 if (fixPrev) {
527 removeWeakRef(prevWr);
528 prevWr->_n = NULL;
529 prevWr->_p = NULL;
530 }
531
532 for (ASTNodeWeakMap* wr = _nodeWeakMaps; wr != NULL; wr = wr->next()) {
533 std::vector<ASTNode*> toRemove;
534 for (auto n : wr->_m) {
535 if (n.first->_gc_mark == 0 || n.second->_gc_mark == 0) toRemove.push_back(n.first);
536 }
537 for (auto n : toRemove) {
538 wr->_m.erase(n);
539 }
540 }
541
542#if defined(MINIZINC_GC_STATS)
543 std::cerr << "+";
544 std::cerr << "\n";
545#endif
546}
547
548void GC::Heap::sweep(void) {
549#if defined(MINIZINC_GC_STATS)
550 std::cerr << "=============== GC sweep =============\n";
551#endif
552 HeapPage* p = _page;
553 HeapPage* prev = NULL;
554 while (p) {
555 size_t off = 0;
556 bool wholepage = true;
557 struct NodeInfo {
558 ASTNode* n;
559 size_t ns;
560 NodeInfo(ASTNode* n0, size_t ns0) : n(n0), ns(ns0) {}
561 };
562 std::vector<NodeInfo> freeNodes;
563 freeNodes.reserve(100);
564 while (off < p->used) {
565 ASTNode* n = reinterpret_cast<ASTNode*>(p->data + off);
566 size_t ns = nodesize(n);
567 assert(ns != 0);
568#if defined(MINIZINC_GC_STATS)
569 GCStat& stats = gc_stats[n->_id];
570 stats.first++;
571 stats.total += ns;
572#endif
573 if (n->_gc_mark == 0) {
574 switch (n->_id) {
575 case Item::II_FUN:
576 static_cast<FunctionI*>(n)->ann().~Annotation();
577 break;
578 case Item::II_SOL:
579 static_cast<SolveI*>(n)->ann().~Annotation();
580 break;
581 case Expression::E_VARDECL:
582 // Reset WeakRef inside VarDecl
583 static_cast<VarDecl*>(n)->flat(NULL);
584 // fall through
585 default:
586 if (n->_id >= ASTNode::NID_END + 1 && n->_id <= Expression::EID_END) {
587 static_cast<Expression*>(n)->ann().~Annotation();
588 }
589 }
590 if (ns >= _fl_size[0] && ns <= _fl_size[_max_fl]) {
591 freeNodes.push_back(NodeInfo(n, ns));
592 } else {
593 assert(off == 0);
594 assert(p->used == p->size);
595 }
596 } else {
597#if defined(MINIZINC_GC_STATS)
598 stats.second++;
599#endif
600 wholepage = false;
601 if (n->_id != ASTNode::NID_FL) n->_gc_mark = 0;
602 }
603 off += ns;
604 }
605 if (wholepage) {
606#ifndef NDEBUG
607 memset(p->data, 42, p->size);
608#endif
609 if (prev) {
610 prev->next = p->next;
611 } else {
612 assert(p == _page);
613 _page = p->next;
614 }
615 HeapPage* pf = p;
616 p = p->next;
617 _alloced_mem -= pf->size;
618 if (pf->size - pf->used >= _fl_size[0]) _free_mem -= (pf->size - pf->used);
619 assert(_alloced_mem >= _free_mem);
620 ::free(pf);
621 } else {
622 for (auto ni : freeNodes) {
623 FreeListNode* fln = static_cast<FreeListNode*>(ni.n);
624 new (fln) FreeListNode(ni.ns, _fl[_fl_slot(ni.ns)]);
625 _fl[_fl_slot(ni.ns)] = fln;
626 _free_mem += ni.ns;
627#if defined(MINIZINC_GC_STATS)
628 gc_stats[fln->_id].second++;
629#endif
630 }
631 assert(_alloced_mem >= _free_mem);
632 prev = p;
633 p = p->next;
634 }
635 }
636#if defined(MINIZINC_GC_STATS)
637 for (auto stat : gc_stats) {
638 std::cerr << _nodeid[stat.first] << ":\t" << stat.second.first << " / " << stat.second.second
639 << " / " << stat.second.keepalive << " / " << stat.second.inmodel << " / ";
640 if (stat.second.total > 1024) {
641 if (stat.second.total > 1024 * 1024) {
642 std::cerr << (stat.second.total / 1024 / 1024) << "M";
643 } else {
644 std::cerr << (stat.second.total / 1024) << "K";
645 }
646 } else {
647 std::cerr << (stat.second.total);
648 }
649 std::cerr << std::endl;
650 }
651#endif
652}
653
654ASTVec::ASTVec(size_t size) : ASTNode(NID_VEC), _size(size) {}
655void* ASTVec::alloc(size_t size) {
656 size_t s = sizeof(ASTVec) + (size <= 2 ? 0 : size - 2) * sizeof(void*);
657 s += ((8 - (s & 7)) & 7);
658 return GC::gc()->alloc(s);
659}
660
661ASTChunk::ASTChunk(size_t size) : ASTNode(NID_CHUNK), _size(size) {}
662void* ASTChunk::alloc(size_t size) {
663 size_t s = sizeof(ASTChunk) + (size <= 4 ? 0 : size - 4) * sizeof(char);
664 s += ((8 - (s & 7)) & 7);
665 return GC::gc()->alloc(s);
666}
667
668void GC::mark(void) {
669 GC* gc = GC::gc();
670 if (!gc->_heap->trail.empty()) gc->_heap->trail.back().mark = true;
671}
672void GC::trail(Expression** l, Expression* v) {
673 GC* gc = GC::gc();
674 gc->_heap->trail.push_back(GC::Heap::TItem(l, v));
675}
676void GC::untrail(void) {
677 GC* gc = GC::gc();
678 while (!gc->_heap->trail.empty() && !gc->_heap->trail.back().mark) {
679 *gc->_heap->trail.back().l = gc->_heap->trail.back().v;
680 gc->_heap->trail.pop_back();
681 }
682 if (!gc->_heap->trail.empty()) gc->_heap->trail.back().mark = false;
683}
684size_t GC::maxMem(void) {
685 GC* gc = GC::gc();
686 return gc->_heap->_max_alloced_mem;
687}
688
689void* ASTNode::operator new(size_t size) { return GC::gc()->alloc(size); }
690
691void GC::addKeepAlive(KeepAlive* e) {
692 assert(e->_p == NULL);
693 assert(e->_n == NULL);
694 e->_n = GC::gc()->_heap->_roots;
695 if (GC::gc()->_heap->_roots) GC::gc()->_heap->_roots->_p = e;
696 GC::gc()->_heap->_roots = e;
697}
698void GC::removeKeepAlive(KeepAlive* e) {
699 if (e->_p) {
700 e->_p->_n = e->_n;
701 } else {
702 assert(GC::gc()->_heap->_roots == e);
703 GC::gc()->_heap->_roots = e->_n;
704 }
705 if (e->_n) {
706 e->_n->_p = e->_p;
707 }
708}
709
710KeepAlive::KeepAlive(Expression* e) : _e(e), _p(NULL), _n(NULL) {
711 if (_e && !_e->isUnboxedVal()) GC::gc()->addKeepAlive(this);
712}
713KeepAlive::~KeepAlive(void) {
714 if (_e && !_e->isUnboxedVal()) GC::gc()->removeKeepAlive(this);
715}
716KeepAlive::KeepAlive(const KeepAlive& e) : _e(e._e), _p(NULL), _n(NULL) {
717 if (_e && !_e->isUnboxedVal()) GC::gc()->addKeepAlive(this);
718}
719KeepAlive& KeepAlive::operator=(const KeepAlive& e) {
720 if (_e && !_e->isUnboxedVal()) {
721 if (e._e == NULL || e._e->isUnboxedVal()) {
722 GC::gc()->removeKeepAlive(this);
723 _p = _n = NULL;
724 }
725 } else {
726 if (e._e != NULL && !e._e->isUnboxedVal()) GC::gc()->addKeepAlive(this);
727 }
728 _e = e._e;
729 return *this;
730}
731
732void GC::addWeakRef(WeakRef* e) {
733 assert(e->_p == NULL);
734 assert(e->_n == NULL);
735 e->_n = GC::gc()->_heap->_weakRefs;
736 if (GC::gc()->_heap->_weakRefs) GC::gc()->_heap->_weakRefs->_p = e;
737 GC::gc()->_heap->_weakRefs = e;
738}
739void GC::removeWeakRef(MiniZinc::WeakRef* e) {
740 if (e->_p) {
741 e->_p->_n = e->_n;
742 } else {
743 assert(GC::gc()->_heap->_weakRefs == e);
744 GC::gc()->_heap->_weakRefs = e->_n;
745 }
746 if (e->_n) {
747 e->_n->_p = e->_p;
748 }
749}
750void GC::addNodeWeakMap(ASTNodeWeakMap* m) {
751 assert(m->_p == NULL);
752 assert(m->_n == NULL);
753 m->_n = GC::gc()->_heap->_nodeWeakMaps;
754 if (GC::gc()->_heap->_nodeWeakMaps) GC::gc()->_heap->_nodeWeakMaps->_p = m;
755 GC::gc()->_heap->_nodeWeakMaps = m;
756}
757void GC::removeNodeWeakMap(ASTNodeWeakMap* m) {
758 if (m->_p) {
759 m->_p->_n = m->_n;
760 } else {
761 assert(GC::gc()->_heap->_nodeWeakMaps == m);
762 GC::gc()->_heap->_nodeWeakMaps = m->_n;
763 }
764 if (m->_n) {
765 m->_n->_p = m->_p;
766 }
767}
768
769WeakRef::WeakRef(Expression* e) : _e(e), _p(NULL), _n(NULL), _valid(true) {
770 if (_e && !_e->isUnboxedVal()) GC::gc()->addWeakRef(this);
771}
772WeakRef::~WeakRef(void) {
773 if (_e && !_e->isUnboxedVal()) GC::gc()->removeWeakRef(this);
774}
775WeakRef::WeakRef(const WeakRef& e) : _e(e()), _p(NULL), _n(NULL), _valid(true) {
776 if (_e && !_e->isUnboxedVal()) GC::gc()->addWeakRef(this);
777}
778WeakRef& WeakRef::operator=(const WeakRef& e) {
779 // Test if this WeakRef is currently active in the GC
780 bool isActive = (_e && !_e->isUnboxedVal());
781 if (isActive) {
782 // Yes, active WeakRef.
783 // If after assigning WeakRef should be inactive, remove it.
784 if (e() == NULL || e()->isUnboxedVal()) {
785 GC::gc()->removeWeakRef(this);
786 _n = _p = NULL;
787 }
788 }
789 _e = e();
790 _valid = true;
791
792 // If this WeakRef was not active but now should be, add it
793 if (!isActive && _e != NULL && !_e->isUnboxedVal()) GC::gc()->addWeakRef(this);
794 return *this;
795}
796
797ASTNodeWeakMap::ASTNodeWeakMap(void) : _p(NULL), _n(NULL) { GC::gc()->addNodeWeakMap(this); }
798
799ASTNodeWeakMap::~ASTNodeWeakMap(void) { GC::gc()->removeNodeWeakMap(this); }
800
801void ASTNodeWeakMap::insert(ASTNode* n0, ASTNode* n1) { _m.insert(std::make_pair(n0, n1)); }
802
803ASTNode* ASTNodeWeakMap::find(ASTNode* n) {
804 NodeMap::iterator it = _m.find(n);
805 if (it == _m.end()) return NULL;
806 return it->second;
807}
808
809} // namespace MiniZinc