1 : /* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=4 sw=4 et tw=79 ft=cpp:
3 : *
4 : * ***** BEGIN LICENSE BLOCK *****
5 : * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 : *
7 : * The contents of this file are subject to the Mozilla Public License Version
8 : * 1.1 (the "License"); you may not use this file except in compliance with
9 : * the License. You may obtain a copy of the License at
10 : * http://www.mozilla.org/MPL/
11 : *
12 : * Software distributed under the License is distributed on an "AS IS" basis,
13 : * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 : * for the specific language governing rights and limitations under the
15 : * License.
16 : *
17 : * The Original Code is SpiderMonkey JavaScript engine.
18 : *
19 : * The Initial Developer of the Original Code is
20 : * Mozilla Corporation.
21 : * Portions created by the Initial Developer are Copyright (C) 2009
22 : * the Initial Developer. All Rights Reserved.
23 : *
24 : * Contributor(s):
25 : * Luke Wagner <luke@mozilla.com>
26 : *
27 : * Alternatively, the contents of this file may be used under the terms of
28 : * either the GNU General Public License Version 2 or later (the "GPL"), or
29 : * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 : * in which case the provisions of the GPL or the LGPL are applicable instead
31 : * of those above. If you wish to allow use of your version of this file only
32 : * under the terms of either the GPL or the LGPL, and not to allow others to
33 : * use your version of this file under the terms of the MPL, indicate your
34 : * decision by deleting the provisions above and replace them with the notice
35 : * and other provisions required by the GPL or the LGPL. If you do not delete
36 : * the provisions above, a recipient may use your version of this file under
37 : * the terms of any one of the MPL, the GPL or the LGPL.
38 : *
39 : * ***** END LICENSE BLOCK ***** */
40 :
41 : #include "jscntxt.h"
42 : #include "jsgcmark.h"
43 : #include "methodjit/MethodJIT.h"
44 : #include "Stack.h"
45 :
46 : #include "jsgcinlines.h"
47 : #include "jsobjinlines.h"
48 :
49 : #include "Stack-inl.h"
50 :
51 : /* Includes to get to low-level memory-mapping functionality. */
52 : #ifdef XP_WIN
53 : # include "jswin.h"
54 : #elif defined(XP_OS2)
55 : # define INCL_DOSMEMMGR
56 : # include <os2.h>
57 : #else
58 : # include <unistd.h>
59 : # include <sys/mman.h>
60 : # if !defined(MAP_ANONYMOUS)
61 : # if defined(MAP_ANON)
62 : # define MAP_ANONYMOUS MAP_ANON
63 : # else
64 : # define MAP_ANONYMOUS 0
65 : # endif
66 : # endif
67 : #endif
68 :
69 : using namespace js;
70 :
71 : /*****************************************************************************/
72 :
73 : void
74 179170 : StackFrame::initExecuteFrame(JSScript *script, StackFrame *prev, FrameRegs *regs,
75 : const Value &thisv, JSObject &scopeChain, ExecuteType type)
76 : {
77 : /*
78 : * See encoding of ExecuteType. When GLOBAL isn't set, we are executing a
79 : * script in the context of another frame and the frame type is determined
80 : * by the context.
81 : */
82 179170 : flags_ = type | HAS_SCOPECHAIN | HAS_BLOCKCHAIN | HAS_PREVPC;
83 179170 : if (!(flags_ & GLOBAL))
84 81403 : flags_ |= (prev->flags_ & (FUNCTION | GLOBAL));
85 :
86 179170 : Value *dstvp = (Value *)this - 2;
87 179170 : dstvp[1] = thisv;
88 :
89 179170 : if (isFunctionFrame()) {
90 79201 : dstvp[0] = prev->calleev();
91 79201 : exec = prev->exec;
92 79201 : u.evalScript = script;
93 : } else {
94 99969 : JS_ASSERT(isGlobalFrame());
95 99969 : dstvp[0] = NullValue();
96 99969 : exec.script = script;
97 : #ifdef DEBUG
98 99969 : u.evalScript = (JSScript *)0xbad;
99 : #endif
100 : }
101 :
102 179170 : scopeChain_ = &scopeChain;
103 179170 : prev_ = prev;
104 179170 : prevpc_ = regs ? regs->pc : (jsbytecode *)0xbad;
105 179170 : prevInline_ = regs ? regs->inlined() : NULL;
106 179170 : blockChain_ = NULL;
107 :
108 : #ifdef DEBUG
109 179170 : ncode_ = (void *)0xbad;
110 179170 : Debug_SetValueRangeToCrashOnTouch(&rval_, 1);
111 179170 : hookData_ = (void *)0xbad;
112 179170 : annotation_ = (void *)0xbad;
113 : #endif
114 :
115 179170 : if (prev && prev->annotation())
116 0 : setAnnotation(prev->annotation());
117 179170 : }
118 :
119 : void
120 281706 : StackFrame::initDummyFrame(JSContext *cx, JSObject &chain)
121 : {
122 281706 : PodZero(this);
123 281706 : flags_ = DUMMY | HAS_PREVPC | HAS_SCOPECHAIN;
124 281706 : initPrev(cx);
125 281706 : JS_ASSERT(chain.isGlobal());
126 281706 : setScopeChainNoCallObj(chain);
127 281706 : }
128 :
129 : template <class T, class U, StackFrame::TriggerPostBarriers doPostBarrier>
130 : void
131 116877 : StackFrame::stealFrameAndSlots(StackFrame *fp, T *vp, StackFrame *otherfp, U *othervp,
132 : Value *othersp)
133 : {
134 116877 : JS_ASSERT((U *)vp == (U *)this - ((U *)otherfp - othervp));
135 116877 : JS_ASSERT((Value *)othervp == otherfp->actualArgs() - 2);
136 116877 : JS_ASSERT(othersp >= otherfp->slots());
137 116877 : JS_ASSERT(othersp <= otherfp->base() + otherfp->numSlots());
138 116877 : JS_ASSERT((T *)fp - vp == (U *)otherfp - othervp);
139 :
140 : /* Copy args, StackFrame, and slots. */
141 116877 : U *srcend = (U *)otherfp->formalArgsEnd();
142 116877 : T *dst = vp;
143 410774 : for (U *src = othervp; src < srcend; src++, dst++)
144 293897 : *dst = *src;
145 :
146 116877 : *fp = *otherfp;
147 : if (doPostBarrier)
148 64556 : fp->writeBarrierPost();
149 :
150 116877 : srcend = (U *)othersp;
151 116877 : dst = (T *)fp->slots();
152 419873 : for (U *src = (U *)otherfp->slots(); src < srcend; src++, dst++)
153 302996 : *dst = *src;
154 :
155 : /*
156 : * Repoint Call, Arguments, Block and With objects to the new live frame.
157 : * Call and Arguments are done directly because we have pointers to them.
158 : * Block and With objects are done indirectly through 'liveFrame'. See
159 : * js_LiveFrameToFloating comment in jsiter.h.
160 : */
161 116877 : if (hasCallObj()) {
162 3844 : CallObject &obj = callObj();
163 3844 : obj.setStackFrame(this);
164 3844 : otherfp->flags_ &= ~HAS_CALL_OBJ;
165 3844 : if (js_IsNamedLambda(fun())) {
166 396 : DeclEnvObject &env = obj.enclosingScope().asDeclEnv();
167 396 : env.setStackFrame(this);
168 : }
169 : }
170 116877 : if (hasArgsObj()) {
171 225 : ArgumentsObject &argsobj = argsObj();
172 225 : if (argsobj.isNormalArguments())
173 54 : argsobj.setStackFrame(this);
174 : else
175 171 : JS_ASSERT(!argsobj.maybeStackFrame());
176 225 : otherfp->flags_ &= ~HAS_ARGS_OBJ;
177 : }
178 116877 : }
179 :
180 : /* Note: explicit instantiation for js_NewGenerator located in jsiter.cpp. */
181 : template void StackFrame::stealFrameAndSlots<Value, HeapValue, StackFrame::NoPostBarrier>(
182 : StackFrame *, Value *,
183 : StackFrame *, HeapValue *, Value *);
184 : template void StackFrame::stealFrameAndSlots<HeapValue, Value, StackFrame::DoPostBarrier>(
185 : StackFrame *, HeapValue *,
186 : StackFrame *, Value *, Value *);
187 :
188 : void
189 64556 : StackFrame::writeBarrierPost()
190 : {
191 : /* This needs to follow the same rules as in js_TraceStackFrame. */
192 64556 : if (scopeChain_)
193 64556 : JSObject::writeBarrierPost(scopeChain_, (void *)&scopeChain_);
194 64556 : if (isDummyFrame())
195 0 : return;
196 64556 : if (hasArgsObj())
197 126 : JSObject::writeBarrierPost(argsObj_, (void *)&argsObj_);
198 64556 : if (isScriptFrame()) {
199 64556 : if (isFunctionFrame()) {
200 64556 : JSFunction::writeBarrierPost((JSObject *)exec.fun, (void *)&exec.fun);
201 64556 : if (isEvalFrame())
202 0 : JSScript::writeBarrierPost(u.evalScript, (void *)&u.evalScript);
203 : } else {
204 0 : JSScript::writeBarrierPost(exec.script, (void *)&exec.script);
205 : }
206 : }
207 64556 : if (hasReturnValue())
208 50678 : HeapValue::writeBarrierPost(rval_, &rval_);
209 : }
210 :
211 : #ifdef DEBUG
212 : JSObject *const StackFrame::sInvalidScopeChain = (JSObject *)0xbeef;
213 : #endif
214 :
215 : jsbytecode *
216 1277983 : StackFrame::prevpcSlow(JSInlinedSite **pinlined)
217 : {
218 1277983 : JS_ASSERT(!(flags_ & HAS_PREVPC));
219 : #if defined(JS_METHODJIT) && defined(JS_MONOIC)
220 1277983 : StackFrame *p = prev();
221 1277983 : mjit::JITScript *jit = p->script()->getJIT(p->isConstructing());
222 1277983 : prevpc_ = jit->nativeToPC(ncode_, &prevInline_);
223 1277983 : flags_ |= HAS_PREVPC;
224 1277983 : if (pinlined)
225 1277983 : *pinlined = prevInline_;
226 1277983 : return prevpc_;
227 : #else
228 : JS_NOT_REACHED("Unknown PC for frame");
229 : return NULL;
230 : #endif
231 : }
232 :
233 : jsbytecode *
234 28703455 : StackFrame::pcQuadratic(const ContextStack &stack, StackFrame *next, JSInlinedSite **pinlined)
235 : {
236 28703455 : JS_ASSERT_IF(next, next->prev() == this);
237 :
238 28703455 : StackSegment &seg = stack.space().containingSegment(this);
239 28703455 : FrameRegs ®s = seg.regs();
240 :
241 : /*
242 : * This isn't just an optimization; seg->computeNextFrame(fp) is only
243 : * defined if fp != seg->currentFrame.
244 : */
245 28703455 : if (regs.fp() == this) {
246 27517960 : if (pinlined)
247 0 : *pinlined = regs.inlined();
248 27517960 : return regs.pc;
249 : }
250 :
251 1185495 : if (!next)
252 1184021 : next = seg.computeNextFrame(this);
253 1185495 : return next->prevpc(pinlined);
254 : }
255 :
256 : void
257 614435 : StackFrame::mark(JSTracer *trc)
258 : {
259 : /*
260 : * Normally we would use MarkRoot here, except that generators also take
261 : * this path. However, generators use a special write barrier when the stack
262 : * frame is copied to the floating frame. Therefore, no barrier is needed.
263 : */
264 614435 : if (flags_ & HAS_SCOPECHAIN)
265 614430 : gc::MarkObjectUnbarriered(trc, &scopeChain_, "scope chain");
266 614435 : if (isDummyFrame())
267 2857 : return;
268 611578 : if (hasArgsObj())
269 1402 : gc::MarkObjectUnbarriered(trc, &argsObj_, "arguments");
270 611578 : if (isFunctionFrame()) {
271 586308 : gc::MarkObjectUnbarriered(trc, &exec.fun, "fun");
272 586308 : if (isEvalFrame())
273 170 : gc::MarkScriptUnbarriered(trc, &u.evalScript, "eval script");
274 : } else {
275 25270 : gc::MarkScriptUnbarriered(trc, &exec.script, "script");
276 : }
277 611578 : if (IS_GC_MARKING_TRACER(trc))
278 603033 : script()->compartment()->active = true;
279 611578 : gc::MarkValueUnbarriered(trc, &returnValue(), "rval");
280 : }
281 :
282 : /*****************************************************************************/
283 :
284 : bool
285 105359160 : StackSegment::contains(const StackFrame *fp) const
286 : {
287 : /* NB: this depends on the continuity of segments in memory. */
288 105359160 : return (Value *)fp >= slotsBegin() && (Value *)fp <= (Value *)maybefp();
289 : }
290 :
291 : bool
292 56686301 : StackSegment::contains(const FrameRegs *regs) const
293 : {
294 56686301 : return regs && contains(regs->fp());
295 : }
296 :
297 : bool
298 111609098 : StackSegment::contains(const CallArgsList *call) const
299 : {
300 111609098 : if (!call || !calls_)
301 55186375 : return false;
302 :
303 : /* NB: this depends on the continuity of segments in memory. */
304 56422723 : Value *vp = call->array();
305 56422723 : return vp > slotsBegin() && vp <= calls_->array();
306 : }
307 :
308 : StackFrame *
309 1184026 : StackSegment::computeNextFrame(const StackFrame *f) const
310 : {
311 1184026 : JS_ASSERT(contains(f) && f != fp());
312 :
313 1184026 : StackFrame *next = fp();
314 : StackFrame *prev;
315 10663349 : while ((prev = next->prev()) != f)
316 8295297 : next = prev;
317 1184026 : return next;
318 : }
319 :
320 : Value *
321 107466770 : StackSegment::end() const
322 : {
323 : /* NB: this depends on the continuity of segments in memory. */
324 107466770 : JS_ASSERT_IF(calls_ || regs_, contains(calls_) || contains(regs_));
325 : Value *p = calls_
326 : ? regs_
327 55411934 : ? Max(regs_->sp, calls_->end())
328 592742 : : calls_->end()
329 : : regs_
330 : ? regs_->sp
331 163471446 : : slotsBegin();
332 107466770 : JS_ASSERT(p >= slotsBegin());
333 107466770 : return p;
334 : }
335 :
336 : FrameRegs *
337 5730928 : StackSegment::pushRegs(FrameRegs ®s)
338 : {
339 5730928 : JS_ASSERT_IF(contains(regs_), regs.fp()->prev() == regs_->fp());
340 5730928 : FrameRegs *prev = regs_;
341 5730928 : regs_ = ®s;
342 5730928 : return prev;
343 : }
344 :
345 : void
346 5730928 : StackSegment::popRegs(FrameRegs *regs)
347 : {
348 5730928 : JS_ASSERT_IF(regs && contains(regs->fp()), regs->fp() == regs_->fp()->prev());
349 5730928 : regs_ = regs;
350 5730928 : }
351 :
352 : void
353 6169747 : StackSegment::pushCall(CallArgsList &callList)
354 : {
355 6169747 : callList.prev_ = calls_;
356 6169747 : calls_ = &callList;
357 6169747 : }
358 :
359 : void
360 2156 : StackSegment::pointAtCall(CallArgsList &callList)
361 : {
362 2156 : calls_ = &callList;
363 2156 : }
364 :
365 : void
366 6169747 : StackSegment::popCall()
367 : {
368 6169747 : calls_ = calls_->prev_;
369 6169747 : }
370 :
371 : /*****************************************************************************/
372 :
373 19869 : StackSpace::StackSpace()
374 : : seg_(NULL),
375 : base_(NULL),
376 : conservativeEnd_(NULL),
377 : #ifdef XP_WIN
378 : commitEnd_(NULL),
379 : #endif
380 : defaultEnd_(NULL),
381 19869 : trustedEnd_(NULL)
382 : {
383 19869 : assertInvariants();
384 19869 : }
385 :
386 : bool
387 19869 : StackSpace::init()
388 : {
389 : void *p;
390 : #ifdef XP_WIN
391 : p = VirtualAlloc(NULL, CAPACITY_BYTES, MEM_RESERVE, PAGE_READWRITE);
392 : if (!p)
393 : return false;
394 : void *check = VirtualAlloc(p, COMMIT_BYTES, MEM_COMMIT, PAGE_READWRITE);
395 : if (p != check)
396 : return false;
397 : base_ = reinterpret_cast<Value *>(p);
398 : conservativeEnd_ = commitEnd_ = base_ + COMMIT_VALS;
399 : trustedEnd_ = base_ + CAPACITY_VALS;
400 : defaultEnd_ = trustedEnd_ - BUFFER_VALS;
401 : #elif defined(XP_OS2)
402 : if (DosAllocMem(&p, CAPACITY_BYTES, PAG_COMMIT | PAG_READ | PAG_WRITE | OBJ_ANY) &&
403 : DosAllocMem(&p, CAPACITY_BYTES, PAG_COMMIT | PAG_READ | PAG_WRITE))
404 : return false;
405 : base_ = reinterpret_cast<Value *>(p);
406 : trustedEnd_ = base_ + CAPACITY_VALS;
407 : conservativeEnd_ = defaultEnd_ = trustedEnd_ - BUFFER_VALS;
408 : #else
409 19869 : JS_ASSERT(CAPACITY_BYTES % getpagesize() == 0);
410 19869 : p = mmap(NULL, CAPACITY_BYTES, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
411 19869 : if (p == MAP_FAILED)
412 0 : return false;
413 19869 : base_ = reinterpret_cast<Value *>(p);
414 19869 : trustedEnd_ = base_ + CAPACITY_VALS;
415 19869 : conservativeEnd_ = defaultEnd_ = trustedEnd_ - BUFFER_VALS;
416 : #endif
417 19869 : assertInvariants();
418 19869 : return true;
419 : }
420 :
421 19868 : StackSpace::~StackSpace()
422 : {
423 19868 : assertInvariants();
424 19868 : JS_ASSERT(!seg_);
425 19868 : if (!base_)
426 0 : return;
427 : #ifdef XP_WIN
428 : VirtualFree(base_, (commitEnd_ - base_) * sizeof(Value), MEM_DECOMMIT);
429 : VirtualFree(base_, 0, MEM_RELEASE);
430 : #elif defined(XP_OS2)
431 : DosFreeMem(base_);
432 : #else
433 : #ifdef SOLARIS
434 : munmap((caddr_t)base_, CAPACITY_BYTES);
435 : #else
436 19868 : munmap(base_, CAPACITY_BYTES);
437 : #endif
438 : #endif
439 19868 : }
440 :
441 : StackSegment &
442 28703460 : StackSpace::containingSegment(const StackFrame *target) const
443 : {
444 28703516 : for (StackSegment *s = seg_; s; s = s->prevInMemory()) {
445 28703516 : if (s->contains(target))
446 28703460 : return *s;
447 : }
448 0 : JS_NOT_REACHED("frame not in stack space");
449 : return *(StackSegment *)NULL;
450 : }
451 :
452 : void
453 611587 : StackSpace::markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc)
454 : {
455 611587 : Value *slotsBegin = fp->slots();
456 :
457 611587 : if (!fp->isScriptFrame()) {
458 2857 : JS_ASSERT(fp->isDummyFrame());
459 2857 : gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
460 2857 : return;
461 : }
462 :
463 : /* If it's a scripted frame, we should have a pc. */
464 608730 : JS_ASSERT(pc);
465 :
466 608730 : JSScript *script = fp->script();
467 608730 : if (!script->hasAnalysis() || !script->analysis()->ranLifetimes()) {
468 598442 : gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
469 598442 : return;
470 : }
471 :
472 : /*
473 : * If the JIT ran a lifetime analysis, then it may have left garbage in the
474 : * slots considered not live. We need to avoid marking them. Additionally,
475 : * in case the analysis information is thrown out later, we overwrite these
476 : * dead slots with valid values so that future GCs won't crash. Analysis
477 : * results are thrown away during the sweeping phase, so we always have at
478 : * least one GC to do this.
479 : */
480 20576 : analyze::AutoEnterAnalysis aea(script->compartment());
481 10288 : analyze::ScriptAnalysis *analysis = script->analysis();
482 10288 : uint32_t offset = pc - script->code;
483 10288 : Value *fixedEnd = slotsBegin + script->nfixed;
484 21447 : for (Value *vp = slotsBegin; vp < fixedEnd; vp++) {
485 11159 : uint32_t slot = analyze::LocalSlot(script, vp - slotsBegin);
486 :
487 : /* Will this slot be synced by the JIT? */
488 11159 : if (!analysis->trackSlot(slot) || analysis->liveness(slot).live(offset))
489 4955 : gc::MarkValueRoot(trc, vp, "vm_stack");
490 : else
491 6204 : *vp = UndefinedValue();
492 : }
493 :
494 10288 : gc::MarkValueRootRange(trc, fixedEnd, slotsEnd, "vm_stack");
495 : }
496 :
497 : void
498 54181 : StackSpace::mark(JSTracer *trc)
499 : {
500 : /*
501 : * JIT code can leave values in an incoherent (i.e., unsafe for precise
502 : * marking) state, hence MarkStackRangeConservatively.
503 : */
504 :
505 : /* NB: this depends on the continuity of segments in memory. */
506 54181 : Value *nextSegEnd = firstUnused();
507 82554 : for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) {
508 : /*
509 : * A segment describes a linear region of memory that contains a stack
510 : * of native and interpreted calls. For marking purposes, though, we
511 : * only need to distinguish between frames and values and mark
512 : * accordingly. Since native calls only push values on the stack, we
513 : * can effectively lump them together and just iterate over interpreted
514 : * calls. Thus, marking can view the stack as the regex:
515 : * (segment slots (frame slots)*)*
516 : * which gets marked in reverse order.
517 : */
518 28373 : Value *slotsEnd = nextSegEnd;
519 28373 : jsbytecode *pc = seg->maybepc();
520 639960 : for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) {
521 : /* Mark from fp->slots() to slotsEnd. */
522 611587 : markFrameSlots(trc, fp, slotsEnd, pc);
523 :
524 611587 : fp->mark(trc);
525 611587 : slotsEnd = (Value *)fp;
526 :
527 : JSInlinedSite *site;
528 611587 : pc = fp->prevpc(&site);
529 611587 : JS_ASSERT_IF(fp->prev(), !site);
530 : }
531 28373 : gc::MarkValueRootRange(trc, seg->slotsBegin(), slotsEnd, "vm_stack");
532 28373 : nextSegEnd = (Value *)seg;
533 : }
534 54181 : }
535 :
536 : void
537 0 : StackSpace::markActiveCompartments()
538 : {
539 0 : for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) {
540 0 : for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev())
541 0 : MarkCompartmentActive(fp);
542 : }
543 0 : }
544 :
545 : JS_FRIEND_API(bool)
546 231 : StackSpace::ensureSpaceSlow(JSContext *cx, MaybeReportError report, Value *from, ptrdiff_t nvals,
547 : JSCompartment *dest) const
548 : {
549 231 : assertInvariants();
550 :
551 : /* See CX_COMPARTMENT comment. */
552 231 : if (dest == (JSCompartment *)CX_COMPARTMENT)
553 231 : dest = cx->compartment;
554 :
555 231 : bool trusted = !dest || dest->principals == cx->runtime->trustedPrincipals();
556 231 : Value *end = trusted ? trustedEnd_ : defaultEnd_;
557 :
558 : /*
559 : * conservativeEnd_ must stay below defaultEnd_: if conservativeEnd_ were
560 : * to be bumped past defaultEnd_, untrusted JS would be able to consume the
561 : * buffer space at the end of the stack reserved for trusted JS.
562 : */
563 :
564 231 : if (end - from < nvals) {
565 129 : if (report)
566 101 : js_ReportOverRecursed(cx);
567 129 : return false;
568 : }
569 :
570 : #ifdef XP_WIN
571 : if (commitEnd_ - from < nvals) {
572 : Value *newCommit = commitEnd_;
573 : Value *request = from + nvals;
574 :
575 : /* Use a dumb loop; will probably execute once. */
576 : JS_ASSERT((trustedEnd_ - newCommit) % COMMIT_VALS == 0);
577 : do {
578 : newCommit += COMMIT_VALS;
579 : JS_ASSERT((trustedEnd_ - newCommit) >= 0);
580 : } while (newCommit < request);
581 :
582 : /* The cast is safe because CAPACITY_BYTES is small. */
583 : int32_t size = static_cast<int32_t>(newCommit - commitEnd_) * sizeof(Value);
584 :
585 : if (!VirtualAlloc(commitEnd_, size, MEM_COMMIT, PAGE_READWRITE)) {
586 : if (report)
587 : js_ReportOverRecursed(cx);
588 : return false;
589 : }
590 :
591 : commitEnd_ = newCommit;
592 : conservativeEnd_ = Min(commitEnd_, defaultEnd_);
593 : assertInvariants();
594 : }
595 : #endif
596 :
597 102 : return true;
598 : }
599 :
600 : bool
601 0 : StackSpace::tryBumpLimit(JSContext *cx, Value *from, unsigned nvals, Value **limit)
602 : {
603 0 : if (!ensureSpace(cx, REPORT_ERROR, from, nvals))
604 0 : return false;
605 0 : *limit = conservativeEnd_;
606 0 : return true;
607 : }
608 :
609 : size_t
610 6 : StackSpace::sizeOfCommitted()
611 : {
612 : #ifdef XP_WIN
613 : return (commitEnd_ - base_) * sizeof(Value);
614 : #else
615 6 : return (trustedEnd_ - base_) * sizeof(Value);
616 : #endif
617 : }
618 :
619 : /*****************************************************************************/
620 :
621 108369 : ContextStack::ContextStack(JSContext *cx)
622 : : seg_(NULL),
623 : space_(&cx->runtime->stackSpace),
624 108369 : cx_(cx)
625 108369 : {}
626 :
627 108367 : ContextStack::~ContextStack()
628 : {
629 108367 : JS_ASSERT(!seg_);
630 108367 : }
631 :
632 : bool
633 63800786 : ContextStack::onTop() const
634 : {
635 63800786 : return seg_ && seg_ == space().seg_;
636 : }
637 :
638 : bool
639 52117 : ContextStack::containsSlow(const StackFrame *target) const
640 : {
641 52633 : for (StackSegment *s = seg_; s; s = s->prevInContext()) {
642 52633 : if (s->contains(target))
643 52117 : return true;
644 : }
645 0 : return false;
646 : }
647 :
648 : /*
649 : * This helper function brings the ContextStack to the top of the thread stack
650 : * (so that it can be extended to push a frame and/or arguments) by potentially
651 : * pushing a StackSegment. The 'pushedSeg' outparam indicates whether such a
652 : * segment was pushed (and hence whether the caller needs to call popSegment).
653 : *
654 : * Additionally, to minimize calls to ensureSpace, ensureOnTop ensures that
655 : * there is space for nvars slots on top of the stack.
656 : */
657 : Value *
658 7040930 : ContextStack::ensureOnTop(JSContext *cx, MaybeReportError report, unsigned nvars,
659 : MaybeExtend extend, bool *pushedSeg, JSCompartment *dest)
660 : {
661 7040930 : Value *firstUnused = space().firstUnused();
662 :
663 : #ifdef JS_METHODJIT
664 : /*
665 : * The only calls made by inlined methodjit frames can be to other JIT
666 : * frames associated with the same VMFrame. If we try to Invoke(),
667 : * Execute() or so forth, any topmost inline frame will need to be
668 : * expanded (along with other inline frames in the compartment).
669 : * To avoid pathological behavior here, make sure to mark any topmost
670 : * function as uninlineable, which will expand inline frames if there are
671 : * any and prevent the function from being inlined in the future.
672 : */
673 7040930 : if (FrameRegs *regs = cx->maybeRegs()) {
674 6688720 : JSFunction *fun = NULL;
675 6688720 : if (JSInlinedSite *site = regs->inlined()) {
676 1 : mjit::JITChunk *chunk = regs->fp()->jit()->chunk(regs->pc);
677 1 : fun = chunk->inlineFrames()[site->inlineIndex].fun;
678 : } else {
679 6688719 : StackFrame *fp = regs->fp();
680 6688719 : if (fp->isFunctionFrame()) {
681 6319239 : JSFunction *f = fp->fun();
682 6319239 : if (f->isInterpreted())
683 6319239 : fun = f;
684 : }
685 : }
686 :
687 6688720 : if (fun) {
688 6319240 : fun->script()->uninlineable = true;
689 6319240 : types::MarkTypeObjectFlags(cx, fun, types::OBJECT_FLAG_UNINLINEABLE);
690 : }
691 : }
692 7040930 : JS_ASSERT_IF(cx->hasfp(), !cx->regs().inlined());
693 : #endif
694 :
695 7040930 : if (onTop() && extend) {
696 6412331 : if (!space().ensureSpace(cx, report, firstUnused, nvars, dest))
697 0 : return NULL;
698 6412331 : return firstUnused;
699 : }
700 :
701 628599 : if (!space().ensureSpace(cx, report, firstUnused, VALUES_PER_STACK_SEGMENT + nvars, dest))
702 0 : return NULL;
703 :
704 : FrameRegs *regs;
705 : CallArgsList *calls;
706 628599 : if (seg_ && extend) {
707 2741 : regs = seg_->maybeRegs();
708 2741 : calls = seg_->maybeCalls();
709 : } else {
710 625858 : regs = NULL;
711 625858 : calls = NULL;
712 : }
713 :
714 628599 : seg_ = new(firstUnused) StackSegment(seg_, space().seg_, regs, calls);
715 628599 : space().seg_ = seg_;
716 628599 : *pushedSeg = true;
717 628599 : return seg_->slotsBegin();
718 : }
719 :
720 : void
721 628599 : ContextStack::popSegment()
722 : {
723 628599 : space().seg_ = seg_->prevInMemory();
724 628599 : seg_ = seg_->prevInContext();
725 :
726 628599 : if (!seg_)
727 265354 : cx_->maybeMigrateVersionOverride();
728 628599 : }
729 :
730 : bool
731 6169747 : ContextStack::pushInvokeArgs(JSContext *cx, unsigned argc, InvokeArgsGuard *iag)
732 : {
733 6169747 : JS_ASSERT(argc <= StackSpace::ARGS_LENGTH_MAX);
734 :
735 6169747 : unsigned nvars = 2 + argc;
736 6169747 : Value *firstUnused = ensureOnTop(cx, REPORT_ERROR, nvars, CAN_EXTEND, &iag->pushedSeg_);
737 6169747 : if (!firstUnused)
738 0 : return false;
739 :
740 6169747 : MakeRangeGCSafe(firstUnused, nvars);
741 :
742 6169747 : ImplicitCast<CallArgs>(*iag) = CallArgsFromVp(argc, firstUnused);
743 :
744 6169747 : seg_->pushCall(*iag);
745 6169747 : JS_ASSERT(space().firstUnused() == iag->end());
746 6169747 : iag->setPushed(*this);
747 6169747 : return true;
748 : }
749 :
750 : void
751 6169747 : ContextStack::popInvokeArgs(const InvokeArgsGuard &iag)
752 : {
753 6169747 : JS_ASSERT(iag.pushed());
754 6169747 : JS_ASSERT(onTop());
755 6169747 : JS_ASSERT(space().firstUnused() == seg_->calls().end());
756 :
757 6169747 : seg_->popCall();
758 6169747 : if (iag.pushedSeg_)
759 48271 : popSegment();
760 6169747 : }
761 :
762 : bool
763 5217759 : ContextStack::pushInvokeFrame(JSContext *cx, const CallArgs &args,
764 : InitialFrameFlags initial, InvokeFrameGuard *ifg)
765 : {
766 5217759 : JS_ASSERT(onTop());
767 5217759 : JS_ASSERT(space().firstUnused() == args.end());
768 :
769 5217759 : JSObject &callee = args.callee();
770 5217759 : JSFunction *fun = callee.toFunction();
771 5217759 : JSScript *script = fun->script();
772 :
773 5217759 : StackFrame::Flags flags = ToFrameFlags(initial);
774 5217759 : StackFrame *fp = getCallFrame(cx, REPORT_ERROR, args, fun, script, &flags);
775 5217759 : if (!fp)
776 28 : return false;
777 :
778 5217731 : fp->initCallFrame(cx, *fun, script, args.length(), flags);
779 5217731 : ifg->regs_.prepareToRun(*fp, script);
780 :
781 5217731 : ifg->prevRegs_ = seg_->pushRegs(ifg->regs_);
782 5217731 : JS_ASSERT(space().firstUnused() == ifg->regs_.sp);
783 5217731 : ifg->setPushed(*this);
784 5217731 : return true;
785 : }
786 :
787 : bool
788 179170 : ContextStack::pushExecuteFrame(JSContext *cx, JSScript *script, const Value &thisv,
789 : JSObject &scopeChain, ExecuteType type,
790 : StackFrame *evalInFrame, ExecuteFrameGuard *efg)
791 : {
792 : /*
793 : * Even though global code and indirect eval do not execute in the context
794 : * of the current frame, prev-link these to the current frame so that the
795 : * callstack looks right to the debugger (via CAN_EXTEND). This is safe
796 : * since the scope chain is what determines name lookup and access, not
797 : * prev-links.
798 : *
799 : * Eval-in-frame is the exception since it prev-links to an arbitrary frame
800 : * (possibly in the middle of some previous segment). Thus pass CANT_EXTEND
801 : * (to start a new segment) and link the frame and call chain manually
802 : * below.
803 : */
804 179170 : CallArgsList *evalInFrameCalls = NULL; /* quell overwarning */
805 : StackFrame *prev;
806 : MaybeExtend extend;
807 179170 : if (evalInFrame) {
808 : /* Though the prev-frame is given, need to search for prev-call. */
809 2795 : StackIter iter(cx, StackIter::GO_THROUGH_SAVED);
810 11661 : while (!iter.isScript() || iter.fp() != evalInFrame)
811 6071 : ++iter;
812 2795 : evalInFrameCalls = iter.calls_;
813 2795 : prev = evalInFrame;
814 2795 : extend = CANT_EXTEND;
815 : } else {
816 176375 : prev = maybefp();
817 176375 : extend = CAN_EXTEND;
818 : }
819 :
820 179170 : unsigned nvars = 2 /* callee, this */ + VALUES_PER_STACK_FRAME + script->nslots;
821 179170 : Value *firstUnused = ensureOnTop(cx, REPORT_ERROR, nvars, extend, &efg->pushedSeg_);
822 179170 : if (!firstUnused)
823 0 : return NULL;
824 :
825 179170 : StackFrame *fp = reinterpret_cast<StackFrame *>(firstUnused + 2);
826 179170 : fp->initExecuteFrame(script, prev, seg_->maybeRegs(), thisv, scopeChain, type);
827 179170 : SetValueRangeToUndefined(fp->slots(), script->nfixed);
828 179170 : efg->regs_.prepareToRun(*fp, script);
829 :
830 : /* pushRegs() below links the prev-frame; manually link the prev-call. */
831 179170 : if (evalInFrame && evalInFrameCalls)
832 2156 : seg_->pointAtCall(*evalInFrameCalls);
833 :
834 179170 : efg->prevRegs_ = seg_->pushRegs(efg->regs_);
835 179170 : JS_ASSERT(space().firstUnused() == efg->regs_.sp);
836 179170 : efg->setPushed(*this);
837 179170 : return true;
838 : }
839 :
840 : bool
841 281706 : ContextStack::pushDummyFrame(JSContext *cx, JSCompartment *dest, JSObject &scopeChain, DummyFrameGuard *dfg)
842 : {
843 281706 : JS_ASSERT(dest == scopeChain.compartment());
844 :
845 281706 : unsigned nvars = VALUES_PER_STACK_FRAME;
846 281706 : Value *firstUnused = ensureOnTop(cx, REPORT_ERROR, nvars, CAN_EXTEND, &dfg->pushedSeg_, dest);
847 281706 : if (!firstUnused)
848 0 : return false;
849 :
850 281706 : StackFrame *fp = reinterpret_cast<StackFrame *>(firstUnused);
851 281706 : fp->initDummyFrame(cx, scopeChain);
852 281706 : dfg->regs_.initDummyFrame(*fp);
853 :
854 281706 : cx->setCompartment(dest);
855 281706 : dfg->prevRegs_ = seg_->pushRegs(dfg->regs_);
856 281706 : JS_ASSERT(space().firstUnused() == dfg->regs_.sp);
857 281706 : dfg->setPushed(*this);
858 281706 : return true;
859 : }
860 :
861 : void
862 5730928 : ContextStack::popFrame(const FrameGuard &fg)
863 : {
864 5730928 : JS_ASSERT(fg.pushed());
865 5730928 : JS_ASSERT(onTop());
866 5730928 : JS_ASSERT(space().firstUnused() == fg.regs_.sp);
867 5730928 : JS_ASSERT(&fg.regs_ == &seg_->regs());
868 :
869 5730928 : if (fg.regs_.fp()->isNonEvalFunctionFrame())
870 5270052 : fg.regs_.fp()->functionEpilogue();
871 :
872 5730928 : seg_->popRegs(fg.prevRegs_);
873 5730928 : if (fg.pushedSeg_)
874 222342 : popSegment();
875 :
876 : /*
877 : * NB: this code can call out and observe the stack (e.g., through GC), so
878 : * it should only be called from a consistent stack state.
879 : */
880 5730928 : if (!hasfp())
881 350439 : cx_->resetCompartment();
882 5730928 : }
883 :
884 : bool
885 52321 : ContextStack::pushGeneratorFrame(JSContext *cx, JSGenerator *gen, GeneratorFrameGuard *gfg)
886 : {
887 52321 : StackFrame *genfp = gen->floatingFrame();
888 52321 : HeapValue *genvp = gen->floatingStack;
889 52321 : unsigned vplen = (HeapValue *)genfp - genvp;
890 :
891 52321 : unsigned nvars = vplen + VALUES_PER_STACK_FRAME + genfp->numSlots();
892 52321 : Value *firstUnused = ensureOnTop(cx, REPORT_ERROR, nvars, CAN_EXTEND, &gfg->pushedSeg_);
893 52321 : if (!firstUnused)
894 0 : return false;
895 :
896 52321 : StackFrame *stackfp = reinterpret_cast<StackFrame *>(firstUnused + vplen);
897 52321 : Value *stackvp = (Value *)stackfp - vplen;
898 :
899 : /* Save this for popGeneratorFrame. */
900 52321 : gfg->gen_ = gen;
901 52321 : gfg->stackvp_ = stackvp;
902 :
903 : /*
904 : * Trigger incremental barrier on the floating frame's generator object.
905 : * This is normally traced through only by associated arguments/call
906 : * objects, but only when the generator is not actually on the stack.
907 : * We don't need to worry about generational barriers as the generator
908 : * object has a trace hook and cannot be nursery allocated.
909 : */
910 52321 : JSObject *genobj = js_FloatingFrameToGenerator(genfp)->obj;
911 52321 : JS_ASSERT(genobj->getClass()->trace);
912 52321 : JSObject::writeBarrierPre(genobj);
913 :
914 : /* Copy from the generator's floating frame to the stack. */
915 : stackfp->stealFrameAndSlots<Value, HeapValue, StackFrame::NoPostBarrier>(
916 52321 : stackfp, stackvp, genfp, genvp, gen->regs.sp);
917 52321 : stackfp->resetGeneratorPrev(cx);
918 52321 : stackfp->unsetFloatingGenerator();
919 52321 : gfg->regs_.rebaseFromTo(gen->regs, *stackfp);
920 :
921 52321 : gfg->prevRegs_ = seg_->pushRegs(gfg->regs_);
922 52321 : JS_ASSERT(space().firstUnused() == gfg->regs_.sp);
923 52321 : gfg->setPushed(*this);
924 52321 : return true;
925 : }
926 :
927 : void
928 52321 : ContextStack::popGeneratorFrame(const GeneratorFrameGuard &gfg)
929 : {
930 52321 : JSGenerator *gen = gfg.gen_;
931 52321 : StackFrame *genfp = gen->floatingFrame();
932 52321 : HeapValue *genvp = gen->floatingStack;
933 :
934 52321 : const FrameRegs &stackRegs = gfg.regs_;
935 52321 : StackFrame *stackfp = stackRegs.fp();
936 52321 : Value *stackvp = gfg.stackvp_;
937 :
938 : /* Copy from the stack to the generator's floating frame. */
939 52321 : gen->regs.rebaseFromTo(stackRegs, *genfp);
940 : genfp->stealFrameAndSlots<HeapValue, Value, StackFrame::DoPostBarrier>(
941 52321 : genfp, genvp, stackfp, stackvp, stackRegs.sp);
942 52321 : genfp->setFloatingGenerator();
943 :
944 : /* ~FrameGuard/popFrame will finish the popping. */
945 52321 : JS_ASSERT(ImplicitCast<const FrameGuard>(gfg).pushed());
946 52321 : }
947 :
948 : bool
949 357986 : ContextStack::saveFrameChain()
950 : {
951 357986 : JSCompartment *dest = NULL;
952 :
953 : bool pushedSeg;
954 357986 : if (!ensureOnTop(cx_, REPORT_ERROR, 0, CANT_EXTEND, &pushedSeg, dest))
955 0 : return false;
956 :
957 357986 : JS_ASSERT(pushedSeg);
958 357986 : JS_ASSERT(!hasfp());
959 357986 : JS_ASSERT(onTop() && seg_->isEmpty());
960 :
961 357986 : cx_->resetCompartment();
962 357986 : return true;
963 : }
964 :
965 : void
966 357986 : ContextStack::restoreFrameChain()
967 : {
968 357986 : JS_ASSERT(onTop() && seg_->isEmpty());
969 :
970 357986 : popSegment();
971 357986 : cx_->resetCompartment();
972 357986 : }
973 :
974 : /*****************************************************************************/
975 :
976 : void
977 12409 : StackIter::poisonRegs()
978 : {
979 12409 : sp_ = (Value *)0xbad;
980 12409 : pc_ = (jsbytecode *)0xbad;
981 12409 : script_ = (JSScript *)0xbad;
982 12409 : }
983 :
984 : void
985 4402897 : StackIter::popFrame()
986 : {
987 4402897 : StackFrame *oldfp = fp_;
988 4402897 : JS_ASSERT(seg_->contains(oldfp));
989 4402897 : fp_ = fp_->prev();
990 4402897 : if (seg_->contains(fp_)) {
991 : JSInlinedSite *inline_;
992 4392184 : pc_ = oldfp->prevpc(&inline_);
993 4392184 : JS_ASSERT(!inline_);
994 :
995 : /*
996 : * If there is a CallArgsList element between oldfp and fp_, then sp_
997 : * is ignored, so we only consider the case where there is no
998 : * intervening CallArgsList. The stack representation is not optimized
999 : * for this operation so we need to do a full case analysis of how
1000 : * frames are pushed by considering each ContextStack::push*Frame.
1001 : */
1002 4392184 : if (oldfp->isGeneratorFrame()) {
1003 : /* Generator's args do not overlap with the caller's expr stack. */
1004 2777 : sp_ = (Value *)oldfp->actualArgs() - 2;
1005 4389407 : } else if (oldfp->isNonEvalFunctionFrame()) {
1006 : /*
1007 : * When Invoke is called from a native, there will be an enclosing
1008 : * pushInvokeArgs which pushes a CallArgsList element so we can
1009 : * ignore that case. The other two cases of function call frames are
1010 : * Invoke called directly from script and pushInlineFrmae. In both
1011 : * cases, the actual arguments of the callee should be included in
1012 : * the caller's expr stack.
1013 : */
1014 4363523 : sp_ = oldfp->actualArgsEnd();
1015 25884 : } else if (oldfp->isFramePushedByExecute()) {
1016 : /* pushExecuteFrame pushes exactly (callee, this) before frame. */
1017 1068 : sp_ = (Value *)oldfp - 2;
1018 : } else {
1019 : /* pushDummyFrame pushes exactly 0 slots before frame. */
1020 24816 : JS_ASSERT(oldfp->isDummyFrame());
1021 24816 : sp_ = (Value *)oldfp;
1022 : }
1023 :
1024 4392184 : script_ = fp_->maybeScript();
1025 : } else {
1026 10713 : poisonRegs();
1027 : }
1028 4402897 : }
1029 :
1030 : void
1031 103189 : StackIter::popCall()
1032 : {
1033 103189 : CallArgsList *oldCall = calls_;
1034 103189 : JS_ASSERT(seg_->contains(oldCall));
1035 103189 : calls_ = calls_->prev();
1036 103189 : if (seg_->contains(fp_)) {
1037 : /* pc_ keeps its same value. */
1038 101993 : sp_ = oldCall->base();
1039 : } else {
1040 1196 : poisonRegs();
1041 : }
1042 103189 : }
1043 :
1044 : void
1045 63804 : StackIter::settleOnNewSegment()
1046 : {
1047 63804 : if (FrameRegs *regs = seg_->maybeRegs()) {
1048 63304 : sp_ = regs->sp;
1049 63304 : pc_ = regs->pc;
1050 63304 : if (fp_)
1051 63304 : script_ = fp_->maybeScript();
1052 : } else {
1053 500 : poisonRegs();
1054 : }
1055 63804 : }
1056 :
1057 : void
1058 63561 : StackIter::startOnSegment(StackSegment *seg)
1059 : {
1060 63561 : seg_ = seg;
1061 63561 : fp_ = seg_->maybefp();
1062 63561 : calls_ = seg_->maybeCalls();
1063 63561 : settleOnNewSegment();
1064 63561 : }
1065 :
1066 : static void JS_NEVER_INLINE
1067 4103142 : CrashIfInvalidSlot(StackFrame *fp, Value *vp)
1068 : {
1069 4103142 : if (vp < fp->slots() || vp >= fp->slots() + fp->script()->nslots) {
1070 0 : JS_ASSERT(false && "About to dereference invalid slot");
1071 0 : *(int *)0xbad = 0; // show up nicely in crash-stats
1072 0 : MOZ_Assert("About to dereference invalid slot", __FILE__, __LINE__);
1073 : }
1074 4103142 : }
1075 :
1076 : void
1077 4569404 : StackIter::settleOnNewState()
1078 : {
1079 : /*
1080 : * There are elements of the calls_ and fp_ chains that we want to skip
1081 : * over so iterate until we settle on one or until there are no more.
1082 : */
1083 83963 : while (true) {
1084 4569404 : if (!fp_ && !calls_) {
1085 10727 : if (savedOption_ == GO_THROUGH_SAVED && seg_->prevInContext()) {
1086 149 : startOnSegment(seg_->prevInContext());
1087 149 : continue;
1088 : }
1089 10578 : state_ = DONE;
1090 10578 : return;
1091 : }
1092 :
1093 : /* Check if popFrame/popCall changed segment. */
1094 4558677 : bool containsFrame = seg_->contains(fp_);
1095 4558677 : bool containsCall = seg_->contains(calls_);
1096 9117597 : while (!containsFrame && !containsCall) {
1097 486 : seg_ = seg_->prevInContext();
1098 486 : containsFrame = seg_->contains(fp_);
1099 486 : containsCall = seg_->contains(calls_);
1100 :
1101 : /* Eval-in-frame allows jumping into the middle of a segment. */
1102 486 : if (containsFrame && seg_->fp() != fp_) {
1103 : /* Avoid duplicating logic; seg_ contains fp_, so no iloop. */
1104 243 : StackIter tmp = *this;
1105 243 : tmp.startOnSegment(seg_);
1106 936 : while (!tmp.isScript() || tmp.fp() != fp_)
1107 450 : ++tmp;
1108 243 : JS_ASSERT(tmp.state_ == SCRIPTED && tmp.seg_ == seg_ && tmp.fp_ == fp_);
1109 243 : *this = tmp;
1110 243 : return;
1111 : }
1112 : /* There is no eval-in-frame equivalent for native calls. */
1113 243 : JS_ASSERT_IF(containsCall, &seg_->calls() == calls_);
1114 243 : settleOnNewSegment();
1115 : }
1116 :
1117 : /*
1118 : * In case of both a scripted frame and call record, use linear memory
1119 : * ordering to decide which was the most recent.
1120 : */
1121 4558434 : if (containsFrame && (!containsCall || (Value *)fp_ >= calls_->array())) {
1122 : /* Nobody wants to see dummy frames. */
1123 4455245 : if (fp_->isDummyFrame()) {
1124 24769 : popFrame();
1125 24769 : continue;
1126 : }
1127 :
1128 : /*
1129 : * As an optimization, there is no CallArgsList element pushed for
1130 : * natives called directly by a script (compiled or interpreted).
1131 : * We catch these by inspecting the bytecode and stack. This check
1132 : * relies on the property that, at a call opcode,
1133 : *
1134 : * regs.sp == vp + 2 + argc
1135 : *
1136 : * The Function.prototype.call optimization leaves no record when
1137 : * 'this' is a native function. Thus, if the following expression
1138 : * runs and breaks in the debugger, the call to 'replace' will not
1139 : * appear on the callstack.
1140 : *
1141 : * (String.prototype.replace).call('a',/a/,function(){debugger});
1142 : *
1143 : * Function.prototype.call will however appear, hence the debugger
1144 : * can, by inspecting 'args.thisv', give some useful information.
1145 : *
1146 : * For Function.prototype.apply, the situation is even worse: since
1147 : * a dynamic number of arguments have been pushed onto the stack
1148 : * (see SplatApplyArgs), there is no efficient way to know how to
1149 : * find the callee. Thus, calls to apply are lost completely.
1150 : */
1151 4430476 : JSOp op = JSOp(*pc_);
1152 4430476 : if (op == JSOP_CALL || op == JSOP_FUNCALL) {
1153 4103142 : unsigned argc = GET_ARGC(pc_);
1154 8206284 : DebugOnly<unsigned> spoff = sp_ - fp_->base();
1155 8206284 : JS_ASSERT_IF(cx_->stackIterAssertionEnabled,
1156 12309426 : spoff == js_ReconstructStackDepth(cx_, fp_->script(), pc_));
1157 4103142 : Value *vp = sp_ - (2 + argc);
1158 :
1159 4103142 : CrashIfInvalidSlot(fp_, vp);
1160 4103142 : if (IsNativeFunction(*vp)) {
1161 59040 : state_ = IMPLICIT_NATIVE;
1162 59040 : args_ = CallArgsFromVp(argc, vp);
1163 : return;
1164 : }
1165 : }
1166 :
1167 4371436 : state_ = SCRIPTED;
1168 8742872 : DebugOnly<JSScript *> script = fp_->script();
1169 8731370 : JS_ASSERT_IF(op != JSOP_FUNAPPLY,
1170 13102806 : sp_ >= fp_->base() && sp_ <= fp_->slots() + script->nslots);
1171 4371436 : JS_ASSERT(pc_ >= script->code && pc_ < script->code + script->length);
1172 : return;
1173 : }
1174 :
1175 : /*
1176 : * A CallArgsList element is pushed for any call to Invoke, regardless
1177 : * of whether the callee is a scripted function or even a callable
1178 : * object. Thus, it is necessary to filter calleev for natives.
1179 : *
1180 : * Second, stuff can happen after the args are pushed but before/after
1181 : * the actual call, so only consider "active" calls. (Since Invoke
1182 : * necessarily clobbers the callee, "active" is also necessary to
1183 : * ensure that the callee slot is valid.)
1184 : */
1185 103189 : if (calls_->active() && IsNativeFunction(calls_->calleev())) {
1186 44144 : state_ = NATIVE;
1187 44144 : args_ = *calls_;
1188 44144 : return;
1189 : }
1190 :
1191 : /* Pop the call and keep looking. */
1192 59045 : popCall();
1193 : }
1194 : }
1195 :
1196 63473 : StackIter::StackIter(JSContext *cx, SavedOption savedOption)
1197 : : cx_(cx),
1198 63473 : savedOption_(savedOption)
1199 : {
1200 : #ifdef JS_METHODJIT
1201 63473 : mjit::ExpandInlineFrames(cx->compartment);
1202 : #endif
1203 :
1204 63473 : if (StackSegment *seg = cx->stack.seg_) {
1205 63169 : startOnSegment(seg);
1206 63169 : settleOnNewState();
1207 : } else {
1208 304 : state_ = DONE;
1209 : }
1210 63473 : }
1211 :
1212 : StackIter &
1213 4481312 : StackIter::operator++()
1214 : {
1215 4481312 : JS_ASSERT(!done());
1216 4481312 : switch (state_) {
1217 : case DONE:
1218 0 : JS_NOT_REACHED("");
1219 : case SCRIPTED:
1220 4378128 : popFrame();
1221 4378128 : settleOnNewState();
1222 4378128 : break;
1223 : case NATIVE:
1224 44144 : popCall();
1225 44144 : settleOnNewState();
1226 44144 : break;
1227 : case IMPLICIT_NATIVE:
1228 59040 : state_ = SCRIPTED;
1229 59040 : break;
1230 : }
1231 4481312 : return *this;
1232 : }
1233 :
1234 : bool
1235 0 : StackIter::operator==(const StackIter &rhs) const
1236 : {
1237 0 : return done() == rhs.done() &&
1238 0 : (done() ||
1239 0 : (isScript() == rhs.isScript() &&
1240 0 : ((isScript() && fp() == rhs.fp()) ||
1241 0 : (!isScript() && nativeArgs().base() == rhs.nativeArgs().base()))));
1242 : }
1243 :
1244 : /*****************************************************************************/
1245 :
1246 46874 : AllFramesIter::AllFramesIter(StackSpace &space)
1247 : : seg_(space.seg_),
1248 46874 : fp_(seg_ ? seg_->maybefp() : NULL)
1249 : {
1250 46874 : settle();
1251 46874 : }
1252 :
1253 : AllFramesIter&
1254 93358 : AllFramesIter::operator++()
1255 : {
1256 93358 : JS_ASSERT(!done());
1257 93358 : fp_ = fp_->prev();
1258 93358 : settle();
1259 93358 : return *this;
1260 : }
1261 :
1262 : void
1263 140232 : AllFramesIter::settle()
1264 : {
1265 285457 : while (seg_ && (!fp_ || !seg_->contains(fp_))) {
1266 4993 : seg_ = seg_->prevInMemory();
1267 4993 : fp_ = seg_ ? seg_->maybefp() : NULL;
1268 : }
1269 :
1270 140232 : JS_ASSERT(!!seg_ == !!fp_);
1271 140232 : JS_ASSERT_IF(fp_, seg_->contains(fp_));
1272 140232 : }
|