1 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=8 sw=4 et tw=78:
3 : *
4 : * ***** BEGIN LICENSE BLOCK *****
5 : * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 : *
7 : * The contents of this file are subject to the Mozilla Public License Version
8 : * 1.1 (the "License"); you may not use this file except in compliance with
9 : * the License. You may obtain a copy of the License at
10 : * http://www.mozilla.org/MPL/
11 : *
12 : * Software distributed under the License is distributed on an "AS IS" basis,
13 : * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 : * for the specific language governing rights and limitations under the
15 : * License.
16 : *
17 : * The Original Code is SpiderMonkey global object code.
18 : *
19 : * The Initial Developer of the Original Code is
20 : * the Mozilla Foundation.
21 : * Portions created by the Initial Developer are Copyright (C) 2011
22 : * the Initial Developer. All Rights Reserved.
23 : *
24 : * Contributor(s):
25 : *
26 : * Alternatively, the contents of this file may be used under the terms of
27 : * either of the GNU General Public License Version 2 or later (the "GPL"),
28 : * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
29 : * in which case the provisions of the GPL or the LGPL are applicable instead
30 : * of those above. If you wish to allow use of your version of this file only
31 : * under the terms of either the GPL or the LGPL, and not to allow others to
32 : * use your version of this file under the terms of the MPL, indicate your
33 : * decision by deleting the provisions above and replace them with the notice
34 : * and other provisions required by the GPL or the LGPL. If you do not delete
35 : * the provisions above, a recipient may use your version of this file under
36 : * the terms of any one of the MPL, the GPL or the LGPL.
37 : *
38 : * ***** END LICENSE BLOCK ***** */
39 :
40 : #ifndef jsgc_barrier_h___
41 : #define jsgc_barrier_h___
42 :
43 : #include "jsapi.h"
44 : #include "jscell.h"
45 :
46 : #include "js/HashTable.h"
47 :
48 : /*
49 : * A write barrier is a mechanism used by incremental or generation GCs to
50 : * ensure that every value that needs to be marked is marked. In general, the
51 : * write barrier should be invoked whenever a write can cause the set of things
52 : * traced through by the GC to change. This includes:
53 : * - writes to object properties
54 : * - writes to array slots
55 : * - writes to fields like JSObject::shape_ that we trace through
56 : * - writes to fields in private data, like JSGenerator::obj
57 : * - writes to non-markable fields like JSObject::private that point to
58 : * markable data
59 : * The last category is the trickiest. Even though the private pointers does not
60 : * point to a GC thing, changing the private pointer may change the set of
61 : * objects that are traced by the GC. Therefore it needs a write barrier.
62 : *
63 : * Every barriered write should have the following form:
64 : * <pre-barrier>
65 : * obj->field = value; // do the actual write
66 : * <post-barrier>
67 : * The pre-barrier is used for incremental GC and the post-barrier is for
68 : * generational GC.
69 : *
70 : * PRE-BARRIER
71 : *
72 : * To understand the pre-barrier, let's consider how incremental GC works. The
73 : * GC itself is divided into "slices". Between each slice, JS code is allowed to
74 : * run. Each slice should be short so that the user doesn't notice the
75 : * interruptions. In our GC, the structure of the slices is as follows:
76 : *
77 : * 1. ... JS work, which leads to a request to do GC ...
78 : * 2. [first GC slice, which performs all root marking and possibly more marking]
79 : * 3. ... more JS work is allowed to run ...
80 : * 4. [GC mark slice, which runs entirely in drainMarkStack]
81 : * 5. ... more JS work ...
82 : * 6. [GC mark slice, which runs entirely in drainMarkStack]
83 : * 7. ... more JS work ...
84 : * 8. [GC marking finishes; sweeping done non-incrementally; GC is done]
85 : * 9. ... JS continues uninterrupted now that GC is finishes ...
86 : *
87 : * Of course, there may be a different number of slices depending on how much
88 : * marking is to be done.
89 : *
90 : * The danger inherent in this scheme is that the JS code in steps 3, 5, and 7
91 : * might change the heap in a way that causes the GC to collect an object that
92 : * is actually reachable. The write barrier prevents this from happening. We use
93 : * a variant of incremental GC called "snapshot at the beginning." This approach
94 : * guarantees the invariant that if an object is reachable in step 2, then we
95 : * will mark it eventually. The name comes from the idea that we take a
96 : * theoretical "snapshot" of all reachable objects in step 2; all objects in
97 : * that snapshot should eventually be marked. (Note that the write barrier
98 : * verifier code takes an actual snapshot.)
99 : *
100 : * The basic correctness invariant of a snapshot-at-the-beginning collector is
101 : * that any object reachable at the end of the GC (step 9) must either:
102 : * (1) have been reachable at the beginning (step 2) and thus in the snapshot
103 : * (2) or must have been newly allocated, in steps 3, 5, or 7.
104 : * To deal with case (2), any objects allocated during an incremental GC are
105 : * automatically marked black.
106 : *
107 : * This strategy is actually somewhat conservative: if an object becomes
108 : * unreachable between steps 2 and 8, it would be safe to collect it. We won't,
109 : * mainly for simplicity. (Also, note that the snapshot is entirely
110 : * theoretical. We don't actually do anything special in step 2 that we wouldn't
111 : * do in a non-incremental GC.
112 : *
113 : * It's the pre-barrier's job to maintain the snapshot invariant. Consider the
114 : * write "obj->field = value". Let the prior value of obj->field be
115 : * value0. Since it's possible that value0 may have been what obj->field
116 : * contained in step 2, when the snapshot was taken, the barrier marks
117 : * value0. Note that it only does this if we're in the middle of an incremental
118 : * GC. Since this is rare, the cost of the write barrier is usually just an
119 : * extra branch.
120 : *
121 : * In practice, we implement the pre-barrier differently based on the type of
122 : * value0. E.g., see JSObject::writeBarrierPre, which is used if obj->field is
123 : * a JSObject*. It takes value0 as a parameter.
124 : *
125 : * POST-BARRIER
126 : *
127 : * These are not yet implemented. Once we get generational GC, they will allow
128 : * us to keep track of pointers from non-nursery space into the nursery.
129 : *
130 : * IMPLEMENTATION DETAILS
131 : *
132 : * Since it would be awkward to change every write to memory into a function
133 : * call, this file contains a bunch of C++ classes and templates that use
134 : * operator overloading to take care of barriers automatically. In many cases,
135 : * all that's necessary to make some field be barriered is to replace
136 : * Type *field;
137 : * with
138 : * HeapPtr<Type> field;
139 : * There are also special classes HeapValue and HeapId, which barrier js::Value
140 : * and jsid, respectively.
141 : *
142 : * One additional note: not all object writes need to be barriered. Writes to
143 : * newly allocated objects do not need a barrier as long as the GC is not
144 : * allowed to run in between the allocation and the write. In these cases, we
145 : * use the "obj->field.init(value)" method instead of "obj->field = value".
146 : * We use the init naming idiom in many places to signify that a field is being
147 : * assigned for the first time, and that no GCs have taken place between the
148 : * object allocation and the assignment.
149 : */
150 :
151 : struct JSXML;
152 :
153 : namespace js {
154 :
155 : template<class T, typename Unioned = uintptr_t>
156 : class HeapPtr
157 : {
158 : union {
159 : T *value;
160 : Unioned other;
161 : };
162 :
163 : public:
164 14105425 : HeapPtr() : value(NULL) {}
165 46684547 : explicit HeapPtr(T *v) : value(v) { post(); }
166 255833 : explicit HeapPtr(const HeapPtr<T> &v) : value(v.value) { post(); }
167 :
168 14582531 : ~HeapPtr() { pre(); }
169 :
170 : /* Use this to install a ptr into a newly allocated object. */
171 109431186 : void init(T *v) {
172 109431186 : JS_ASSERT(!IsPoisonedPtr<T>(v));
173 109431186 : value = v;
174 109431186 : post();
175 109431186 : }
176 :
177 : /* Use to set the pointer to NULL. */
178 : void clear() {
179 : pre();
180 : value = NULL;
181 : }
182 :
183 : /* Use this if the automatic coercion to T* isn't working. */
184 163460501 : T *get() const { return value; }
185 :
186 : /*
187 : * Use these if you want to change the value without invoking the barrier.
188 : * Obviously this is dangerous unless you know the barrier is not needed.
189 : */
190 34108503 : T **unsafeGet() { return &value; }
191 4507868 : void unsafeSet(T *v) { value = v; }
192 :
193 292610 : Unioned *unsafeGetUnioned() { return &other; }
194 :
195 89451153 : HeapPtr<T, Unioned> &operator=(T *v) {
196 89451153 : pre();
197 89451153 : JS_ASSERT(!IsPoisonedPtr<T>(v));
198 89451153 : value = v;
199 89451153 : post();
200 89451153 : return *this;
201 : }
202 :
203 10148738 : HeapPtr<T, Unioned> &operator=(const HeapPtr<T> &v) {
204 10148738 : pre();
205 10148738 : JS_ASSERT(!IsPoisonedPtr<T>(v.value));
206 10148738 : value = v.value;
207 10148738 : post();
208 10148738 : return *this;
209 : }
210 :
211 89745 : T &operator*() const { return *value; }
212 504603626 : T *operator->() const { return value; }
213 :
214 -1 : operator T*() const { return value; }
215 :
216 : private:
217 114182422 : void pre() { T::writeBarrierPre(value); }
218 260479325 : void post() { T::writeBarrierPost(value, (void *)&value); }
219 :
220 : /* Make this friend so it can access pre() and post(). */
221 : template<class T1, class T2>
222 : friend inline void
223 : BarrieredSetPair(JSCompartment *comp,
224 : HeapPtr<T1> &v1, T1 *val1,
225 : HeapPtr<T2> &v2, T2 *val2);
226 : };
227 :
228 : /*
229 : * This is a hack for RegExpStatics::updateFromMatch. It allows us to do two
230 : * barriers with only one branch to check if we're in an incremental GC.
231 : */
232 : template<class T1, class T2>
233 : static inline void
234 2253934 : BarrieredSetPair(JSCompartment *comp,
235 : HeapPtr<T1> &v1, T1 *val1,
236 : HeapPtr<T2> &v2, T2 *val2)
237 : {
238 2253934 : if (T1::needWriteBarrierPre(comp)) {
239 0 : v1.pre();
240 0 : v2.pre();
241 : }
242 2253934 : v1.unsafeSet(val1);
243 2253934 : v2.unsafeSet(val2);
244 2253934 : v1.post();
245 2253934 : v2.post();
246 2253934 : }
247 :
248 : struct Shape;
249 : class BaseShape;
250 : namespace types { struct TypeObject; }
251 :
252 : typedef HeapPtr<JSAtom> HeapPtrAtom;
253 : typedef HeapPtr<JSObject> HeapPtrObject;
254 : typedef HeapPtr<JSFunction> HeapPtrFunction;
255 : typedef HeapPtr<JSString> HeapPtrString;
256 : typedef HeapPtr<JSScript> HeapPtrScript;
257 : typedef HeapPtr<Shape> HeapPtrShape;
258 : typedef HeapPtr<BaseShape> HeapPtrBaseShape;
259 : typedef HeapPtr<types::TypeObject> HeapPtrTypeObject;
260 : typedef HeapPtr<JSXML> HeapPtrXML;
261 :
262 : /* Useful for hashtables with a HeapPtr as key. */
263 : template<class T>
264 : struct HeapPtrHasher
265 : {
266 : typedef HeapPtr<T> Key;
267 : typedef T *Lookup;
268 :
269 23529 : static HashNumber hash(Lookup obj) { return DefaultHasher<T *>::hash(obj); }
270 8281 : static bool match(const Key &k, Lookup l) { return k.get() == l; }
271 : };
272 :
273 : /* Specialized hashing policy for HeapPtrs. */
274 : template <class T>
275 : struct DefaultHasher< HeapPtr<T> >: HeapPtrHasher<T> { };
276 :
277 : class EncapsulatedValue
278 : {
279 : protected:
280 : Value value;
281 :
282 : /*
283 : * Ensure that EncapsulatedValue is not constructable, except by our
284 : * implementations.
285 : */
286 : EncapsulatedValue() MOZ_DELETE;
287 : EncapsulatedValue(const EncapsulatedValue &v) MOZ_DELETE;
288 : EncapsulatedValue &operator=(const Value &v) MOZ_DELETE;
289 : EncapsulatedValue &operator=(const EncapsulatedValue &v) MOZ_DELETE;
290 :
291 9549209 : EncapsulatedValue(const Value &v) : value(v) {}
292 9619007 : ~EncapsulatedValue() {}
293 :
294 : public:
295 13500 : const Value &get() const { return value; }
296 10025097 : Value *unsafeGet() { return &value; }
297 344315222 : operator const Value &() const { return value; }
298 :
299 4255654 : bool isUndefined() const { return value.isUndefined(); }
300 2369115 : bool isNull() const { return value.isNull(); }
301 2368980 : bool isBoolean() const { return value.isBoolean(); }
302 0 : bool isTrue() const { return value.isTrue(); }
303 108221 : bool isFalse() const { return value.isFalse(); }
304 2368755 : bool isNumber() const { return value.isNumber(); }
305 0 : bool isInt32() const { return value.isInt32(); }
306 0 : bool isDouble() const { return value.isDouble(); }
307 2378034 : bool isString() const { return value.isString(); }
308 1719 : bool isObject() const { return value.isObject(); }
309 17400 : bool isMagic(JSWhyMagic why) const { return value.isMagic(why); }
310 : bool isGCThing() const { return value.isGCThing(); }
311 172835 : bool isMarkable() const { return value.isMarkable(); }
312 :
313 : bool toBoolean() const { return value.toBoolean(); }
314 : double toNumber() const { return value.toNumber(); }
315 0 : int32_t toInt32() const { return value.toInt32(); }
316 0 : double toDouble() const { return value.toDouble(); }
317 6768 : JSString *toString() const { return value.toString(); }
318 : JSObject &toObject() const { return value.toObject(); }
319 : JSObject *toObjectOrNull() const { return value.toObjectOrNull(); }
320 85951 : void *toGCThing() const { return value.toGCThing(); }
321 :
322 0 : JSGCTraceKind gcKind() const { return value.gcKind(); }
323 :
324 4733145 : uint64_t asRawBits() const { return value.asRawBits(); }
325 :
326 : #ifdef DEBUG
327 : JSWhyMagic whyMagic() const { return value.whyMagic(); }
328 : #endif
329 :
330 : static inline void writeBarrierPre(const Value &v);
331 : static inline void writeBarrierPre(JSCompartment *comp, const Value &v);
332 :
333 : protected:
334 : inline void pre();
335 : inline void pre(JSCompartment *comp);
336 : };
337 :
338 : class HeapValue : public EncapsulatedValue
339 : {
340 : public:
341 : explicit inline HeapValue();
342 : explicit inline HeapValue(const Value &v);
343 : explicit inline HeapValue(const HeapValue &v);
344 : inline ~HeapValue();
345 :
346 : inline void init(const Value &v);
347 : inline void init(JSCompartment *comp, const Value &v);
348 :
349 : inline HeapValue &operator=(const Value &v);
350 : inline HeapValue &operator=(const HeapValue &v);
351 :
352 : /*
353 : * This is a faster version of operator=. Normally, operator= has to
354 : * determine the compartment of the value before it can decide whether to do
355 : * the barrier. If you already know the compartment, it's faster to pass it
356 : * in.
357 : */
358 : inline void set(JSCompartment *comp, const Value &v);
359 :
360 : static inline void writeBarrierPost(const Value &v, void *addr);
361 : static inline void writeBarrierPost(JSCompartment *comp, const Value &v, void *addr);
362 :
363 : private:
364 : inline void post();
365 : inline void post(JSCompartment *comp);
366 : };
367 :
368 : class HeapSlot : public EncapsulatedValue
369 : {
370 : /*
371 : * Operator= is not valid for HeapSlot because is must take the object and
372 : * slot offset to provide to the post/generational barrier.
373 : */
374 : inline HeapSlot &operator=(const Value &v) MOZ_DELETE;
375 : inline HeapSlot &operator=(const HeapValue &v) MOZ_DELETE;
376 : inline HeapSlot &operator=(const HeapSlot &v) MOZ_DELETE;
377 :
378 : public:
379 : explicit inline HeapSlot() MOZ_DELETE;
380 : explicit inline HeapSlot(JSObject *obj, uint32_t slot, const Value &v);
381 : explicit inline HeapSlot(JSObject *obj, uint32_t slot, const HeapSlot &v);
382 : inline ~HeapSlot();
383 :
384 : inline void init(JSObject *owner, uint32_t slot, const Value &v);
385 : inline void init(JSCompartment *comp, JSObject *owner, uint32_t slot, const Value &v);
386 :
387 : inline void set(JSObject *owner, uint32_t slot, const Value &v);
388 : inline void set(JSCompartment *comp, JSObject *owner, uint32_t slot, const Value &v);
389 :
390 : static inline void writeBarrierPost(JSObject *obj, uint32_t slot);
391 : static inline void writeBarrierPost(JSCompartment *comp, JSObject *obj, uint32_t slotno);
392 :
393 : private:
394 : inline void post(JSObject *owner, uint32_t slot);
395 : inline void post(JSCompartment *comp, JSObject *owner, uint32_t slot);
396 : };
397 :
398 : /*
399 : * NOTE: This is a placeholder for bug 619558.
400 : *
401 : * Run a post write barrier that encompasses multiple contiguous slots in a
402 : * single step.
403 : */
404 : static inline void
405 1363 : SlotRangeWriteBarrierPost(JSCompartment *comp, JSObject *obj, uint32_t start, uint32_t count)
406 : {
407 1363 : }
408 :
409 : static inline const Value *
410 762182 : Valueify(const EncapsulatedValue *array)
411 : {
412 : JS_STATIC_ASSERT(sizeof(HeapValue) == sizeof(Value));
413 : JS_STATIC_ASSERT(sizeof(HeapSlot) == sizeof(Value));
414 762182 : return (const Value *)array;
415 : }
416 :
417 : class HeapSlotArray
418 : {
419 : HeapSlot *array;
420 :
421 : public:
422 1395337 : HeapSlotArray(HeapSlot *array) : array(array) {}
423 :
424 761877 : operator const Value *() const { return Valueify(array); }
425 630957 : operator HeapSlot *() const { return array; }
426 :
427 : HeapSlotArray operator +(int offset) const { return HeapSlotArray(array + offset); }
428 2503 : HeapSlotArray operator +(uint32_t offset) const { return HeapSlotArray(array + offset); }
429 : };
430 :
431 : class HeapId
432 : {
433 : jsid value;
434 :
435 : public:
436 25698 : explicit HeapId() : value(JSID_VOID) {}
437 : explicit inline HeapId(jsid id);
438 :
439 : inline ~HeapId();
440 :
441 : inline void init(jsid id);
442 :
443 : inline HeapId &operator=(jsid id);
444 : inline HeapId &operator=(const HeapId &v);
445 :
446 930843451 : bool operator==(jsid id) const { return value == id; }
447 3025 : bool operator!=(jsid id) const { return value != id; }
448 :
449 21934579 : jsid get() const { return value; }
450 3672790 : jsid *unsafeGet() { return &value; }
451 -1 : operator jsid() const { return value; }
452 :
453 : private:
454 : inline void pre();
455 : inline void post();
456 :
457 : HeapId(const HeapId &v);
458 : };
459 :
460 : /*
461 : * Incremental GC requires that weak pointers have read barriers. This is mostly
462 : * an issue for empty shapes stored in JSCompartment. The problem happens when,
463 : * during an incremental GC, some JS code stores one of the compartment's empty
464 : * shapes into an object already marked black. Normally, this would not be a
465 : * problem, because the empty shape would have been part of the initial snapshot
466 : * when the GC started. However, since this is a weak pointer, it isn't. So we
467 : * may collect the empty shape even though a live object points to it. To fix
468 : * this, we mark these empty shapes black whenever they get read out.
469 : */
470 : template<class T>
471 : class ReadBarriered
472 : {
473 : T *value;
474 :
475 : public:
476 32838968 : ReadBarriered() : value(NULL) {}
477 6086757 : ReadBarriered(T *value) : value(value) {}
478 :
479 130475662 : T *get() const {
480 130475662 : if (!value)
481 0 : return NULL;
482 130475662 : T::readBarrier(value);
483 130475662 : return value;
484 : }
485 :
486 88286166 : operator T*() const { return get(); }
487 :
488 : T &operator*() const { return *get(); }
489 42189496 : T *operator->() const { return get(); }
490 :
491 : T *unsafeGet() { return value; }
492 :
493 : void set(T *v) { value = v; }
494 :
495 : operator bool() { return !!value; }
496 : };
497 :
498 : class ReadBarrieredValue
499 : {
500 : Value value;
501 :
502 : public:
503 1530574 : ReadBarrieredValue() : value(UndefinedValue()) {}
504 45145 : ReadBarrieredValue(const Value &value) : value(value) {}
505 :
506 : inline const Value &get() const;
507 : inline operator const Value &() const;
508 :
509 : inline JSObject &toObject() const;
510 : };
511 :
512 : }
513 :
514 : #endif /* jsgc_barrier_h___ */
|