Commit | Line | Data |
---|---|---|
7f918cf1 CE |
1 | /* Copyright (C) 2009,2012 Matthew Fluet. |
2 | * Copyright (C) 1999-2008 Henry Cejtin, Matthew Fluet, Suresh | |
3 | * Jagannathan, and Stephen Weeks. | |
4 | * Copyright (C) 1997-2000 NEC Research Institute. | |
5 | * | |
6 | * MLton is released under a BSD-style license. | |
7 | * See the file MLton-LICENSE for details. | |
8 | */ | |
9 | ||
10 | void displayGCState (GC_state s, FILE *stream) { | |
11 | fprintf (stream, | |
12 | "GC state\n"); | |
13 | fprintf (stream, "\tcurrentThread = "FMTOBJPTR"\n", s->currentThread); | |
14 | displayThread (s, (GC_thread)(objptrToPointer (s->currentThread, s->heap.start) | |
15 | + offsetofThread (s)), | |
16 | stream); | |
17 | fprintf (stream, "\tgenerational\n"); | |
18 | displayGenerationalMaps (s, &s->generationalMaps, | |
19 | stream); | |
20 | fprintf (stream, "\theap\n"); | |
21 | displayHeap (s, &s->heap, | |
22 | stream); | |
23 | fprintf (stream, | |
24 | "\tlimit = "FMTPTR"\n" | |
25 | "\tstackBottom = "FMTPTR"\n" | |
26 | "\tstackTop = "FMTPTR"\n", | |
27 | (uintptr_t)s->limit, | |
28 | (uintptr_t)s->stackBottom, | |
29 | (uintptr_t)s->stackTop); | |
30 | } | |
31 | ||
32 | size_t sizeofGCStateCurrentStackUsed (GC_state s) { | |
33 | return (size_t)(s->stackTop - s->stackBottom); | |
34 | } | |
35 | ||
36 | void setGCStateCurrentThreadAndStack (GC_state s) { | |
37 | GC_thread thread; | |
38 | GC_stack stack; | |
39 | ||
40 | thread = getThreadCurrent (s); | |
41 | s->exnStack = thread->exnStack; | |
42 | stack = getStackCurrent (s); | |
43 | s->stackBottom = getStackBottom (s, stack); | |
44 | s->stackTop = getStackTop (s, stack); | |
45 | s->stackLimit = getStackLimit (s, stack); | |
46 | markCard (s, (pointer)stack); | |
47 | } | |
48 | ||
49 | void setGCStateCurrentHeap (GC_state s, | |
50 | size_t oldGenBytesRequested, | |
51 | size_t nurseryBytesRequested) { | |
52 | GC_heap h; | |
53 | pointer nursery; | |
54 | size_t nurserySize; | |
55 | pointer genNursery; | |
56 | size_t genNurserySize; | |
57 | ||
58 | if (DEBUG_DETAILED) | |
59 | fprintf (stderr, "setGCStateCurrentHeap(%s, %s)\n", | |
60 | uintmaxToCommaString(oldGenBytesRequested), | |
61 | uintmaxToCommaString(nurseryBytesRequested)); | |
62 | h = &s->heap; | |
63 | assert (isFrontierAligned (s, h->start + h->oldGenSize + oldGenBytesRequested)); | |
64 | s->limitPlusSlop = h->start + h->size; | |
65 | s->limit = s->limitPlusSlop - GC_HEAP_LIMIT_SLOP; | |
66 | nurserySize = h->size - (h->oldGenSize + oldGenBytesRequested); | |
67 | assert (isFrontierAligned (s, s->limitPlusSlop - nurserySize)); | |
68 | nursery = s->limitPlusSlop - nurserySize; | |
69 | genNursery = alignFrontier (s, s->limitPlusSlop - (nurserySize / 2)); | |
70 | genNurserySize = (size_t)(s->limitPlusSlop - genNursery); | |
71 | if (/* The mutator marks cards. */ | |
72 | s->mutatorMarksCards | |
73 | /* There is enough space in the generational nursery. */ | |
74 | and (nurseryBytesRequested <= genNurserySize) | |
75 | /* The nursery is large enough to be worth it. */ | |
76 | and (((float)(h->size - s->lastMajorStatistics.bytesLive) | |
77 | / (float)nurserySize) | |
78 | <= s->controls.ratios.nursery) | |
79 | and /* There is a reason to use generational GC. */ | |
80 | ( | |
81 | /* We must use it for debugging purposes. */ | |
82 | FORCE_GENERATIONAL | |
83 | /* We just did a mark compact, so it will be advantageous to to use it. */ | |
84 | or (s->lastMajorStatistics.kind == GC_MARK_COMPACT) | |
85 | /* The live ratio is low enough to make it worthwhile. */ | |
86 | or ((float)h->size / (float)s->lastMajorStatistics.bytesLive | |
87 | <= (h->withMapsSize < s->sysvals.ram | |
88 | ? s->controls.ratios.copyGenerational | |
89 | : s->controls.ratios.markCompactGenerational)) | |
90 | )) { | |
91 | s->canMinor = TRUE; | |
92 | nursery = genNursery; | |
93 | nurserySize = genNurserySize; | |
94 | clearCardMap (s); | |
95 | } else { | |
96 | unless (nurseryBytesRequested <= nurserySize) | |
97 | die ("Out of memory. Insufficient space in nursery."); | |
98 | s->canMinor = FALSE; | |
99 | } | |
100 | assert (nurseryBytesRequested <= nurserySize); | |
101 | s->heap.nursery = nursery; | |
102 | s->frontier = nursery; | |
103 | assert (nurseryBytesRequested <= (size_t)(s->limitPlusSlop - s->frontier)); | |
104 | assert (isFrontierAligned (s, s->heap.nursery)); | |
105 | assert (hasHeapBytesFree (s, oldGenBytesRequested, nurseryBytesRequested)); | |
106 | } | |
107 | ||
108 | bool GC_getAmOriginal (GC_state s) { | |
109 | return s->amOriginal; | |
110 | } | |
111 | void GC_setAmOriginal (GC_state s, bool b) { | |
112 | s->amOriginal = b; | |
113 | } | |
114 | ||
115 | void GC_setControlsMessages (GC_state s, bool b) { | |
116 | s->controls.messages = b; | |
117 | } | |
118 | ||
119 | void GC_setControlsSummary (GC_state s, bool b) { | |
120 | s->controls.summary = b; | |
121 | } | |
122 | ||
123 | void GC_setControlsRusageMeasureGC (GC_state s, bool b) { | |
124 | s->controls.rusageMeasureGC = b; | |
125 | } | |
126 | ||
127 | uintmax_t GC_getCumulativeStatisticsBytesAllocated (GC_state s) { | |
128 | return s->cumulativeStatistics.bytesAllocated; | |
129 | } | |
130 | ||
131 | uintmax_t GC_getCumulativeStatisticsNumCopyingGCs (GC_state s) { | |
132 | return s->cumulativeStatistics.numCopyingGCs; | |
133 | } | |
134 | ||
135 | uintmax_t GC_getCumulativeStatisticsNumMarkCompactGCs (GC_state s) { | |
136 | return s->cumulativeStatistics.numMarkCompactGCs; | |
137 | } | |
138 | ||
139 | uintmax_t GC_getCumulativeStatisticsNumMinorGCs (GC_state s) { | |
140 | return s->cumulativeStatistics.numMinorGCs; | |
141 | } | |
142 | ||
143 | size_t GC_getCumulativeStatisticsMaxBytesLive (GC_state s) { | |
144 | return s->cumulativeStatistics.maxBytesLive; | |
145 | } | |
146 | ||
147 | void GC_setHashConsDuringGC (GC_state s, bool b) { | |
148 | s->hashConsDuringGC = b; | |
149 | } | |
150 | ||
151 | size_t GC_getLastMajorStatisticsBytesLive (GC_state s) { | |
152 | return s->lastMajorStatistics.bytesLive; | |
153 | } | |
154 | ||
155 | ||
156 | pointer GC_getCallFromCHandlerThread (GC_state s) { | |
157 | pointer p = objptrToPointer (s->callFromCHandlerThread, s->heap.start); | |
158 | return p; | |
159 | } | |
160 | ||
161 | void GC_setCallFromCHandlerThread (GC_state s, pointer p) { | |
162 | objptr op = pointerToObjptr (p, s->heap.start); | |
163 | s->callFromCHandlerThread = op; | |
164 | } | |
165 | ||
166 | pointer GC_getCurrentThread (GC_state s) { | |
167 | pointer p = objptrToPointer (s->currentThread, s->heap.start); | |
168 | return p; | |
169 | } | |
170 | ||
171 | pointer GC_getSavedThread (GC_state s) { | |
172 | pointer p; | |
173 | ||
174 | assert(s->savedThread != BOGUS_OBJPTR); | |
175 | p = objptrToPointer (s->savedThread, s->heap.start); | |
176 | s->savedThread = BOGUS_OBJPTR; | |
177 | return p; | |
178 | } | |
179 | ||
180 | void GC_setSavedThread (GC_state s, pointer p) { | |
181 | objptr op; | |
182 | ||
183 | assert(s->savedThread == BOGUS_OBJPTR); | |
184 | op = pointerToObjptr (p, s->heap.start); | |
185 | s->savedThread = op; | |
186 | } | |
187 | ||
188 | void GC_setSignalHandlerThread (GC_state s, pointer p) { | |
189 | objptr op = pointerToObjptr (p, s->heap.start); | |
190 | s->signalHandlerThread = op; | |
191 | } | |
192 | ||
193 | struct rusage* GC_getRusageGCAddr (GC_state s) { | |
194 | return &(s->cumulativeStatistics.ru_gc); | |
195 | } | |
196 | ||
197 | sigset_t* GC_getSignalsHandledAddr (GC_state s) { | |
198 | return &(s->signalsInfo.signalsHandled); | |
199 | } | |
200 | ||
201 | sigset_t* GC_getSignalsPendingAddr (GC_state s) { | |
202 | return &(s->signalsInfo.signalsPending); | |
203 | } | |
204 | ||
205 | void GC_setGCSignalHandled (GC_state s, bool b) { | |
206 | s->signalsInfo.gcSignalHandled = b; | |
207 | } | |
208 | ||
209 | bool GC_getGCSignalPending (GC_state s) { | |
210 | return (s->signalsInfo.gcSignalPending); | |
211 | } | |
212 | ||
213 | void GC_setGCSignalPending (GC_state s, bool b) { | |
214 | s->signalsInfo.gcSignalPending = b; | |
215 | } |