1 : /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=4 sw=4 et tw=99:
3 : *
4 : * ***** BEGIN LICENSE BLOCK *****
5 : * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 : *
7 : * The contents of this file are subject to the Mozilla Public License Version
8 : * 1.1 (the "License"); you may not use this file except in compliance with
9 : * the License. You may obtain a copy of the License at
10 : * http://www.mozilla.org/MPL/
11 : *
12 : * Software distributed under the License is distributed on an "AS IS" basis,
13 : * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 : * for the specific language governing rights and limitations under the
15 : * License.
16 : *
17 : * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 : * May 28, 2008.
19 : *
20 : * The Initial Developer of the Original Code is
21 : * Brendan Eich <brendan@mozilla.org>
22 : *
23 : * Contributor(s):
24 : * David Anderson <danderson@mozilla.com>
25 : *
26 : * Alternatively, the contents of this file may be used under the terms of
27 : * either of the GNU General Public License Version 2 or later (the "GPL"),
28 : * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
29 : * in which case the provisions of the GPL or the LGPL are applicable instead
30 : * of those above. If you wish to allow use of your version of this file only
31 : * under the terms of either the GPL or the LGPL, and not to allow others to
32 : * use your version of this file under the terms of the MPL, indicate your
33 : * decision by deleting the provisions above and replace them with the notice
34 : * and other provisions required by the GPL or the LGPL. If you do not delete
35 : * the provisions above, a recipient may use your version of this file under
36 : * the terms of any one of the MPL, the GPL or the LGPL.
37 : *
38 : * ***** END LICENSE BLOCK ***** */
39 : #include "jscntxt.h"
40 : #include "FrameState.h"
41 : #include "FrameState-inl.h"
42 : #include "StubCompiler.h"
43 :
44 : using namespace js;
45 : using namespace js::mjit;
46 : using namespace js::analyze;
47 :
48 : /* Because of Value alignment */
49 : JS_STATIC_ASSERT(sizeof(FrameEntry) % 8 == 0);
50 :
51 95862 : FrameState::FrameState(JSContext *cx, mjit::Compiler &cc,
52 : Assembler &masm, StubCompiler &stubcc)
53 : : cx(cx),
54 : masm(masm), cc(cc), stubcc(stubcc),
55 : a(NULL), entries(NULL), nentries(0), freeRegs(Registers::AvailAnyRegs),
56 95862 : loop(NULL), inTryBlock(false)
57 : {
58 95862 : }
59 :
60 191724 : FrameState::~FrameState()
61 : {
62 285455 : while (a) {
63 93731 : ActiveFrame *parent = a->parent;
64 93731 : a->script->analysis()->clearAllocations();
65 93731 : cx->free_(a);
66 93731 : a = parent;
67 : }
68 95862 : cx->free_(entries);
69 95862 : }
70 :
71 : void
72 2764 : FrameState::pruneDeadEntries()
73 : {
74 2764 : unsigned shift = 0;
75 39410 : for (unsigned i = 0; i < tracker.nentries; i++) {
76 36646 : FrameEntry *fe = tracker[i];
77 36646 : if (deadEntry(fe)) {
78 6619 : fe->untrack();
79 6619 : shift++;
80 30027 : } else if (shift) {
81 990 : fe->index_ -= shift;
82 990 : tracker.entries[fe->index_] = fe;
83 : }
84 : }
85 2764 : tracker.nentries -= shift;
86 2764 : }
87 :
88 : bool
89 96663 : FrameState::pushActiveFrame(JSScript *script, uint32_t argc)
90 : {
91 96663 : if (!a) {
92 93731 : this->nentries = analyze::TotalSlots(script) + (script->nslots - script->nfixed) +
93 93731 : StackSpace::STACK_JIT_EXTRA - VALUES_PER_STACK_FRAME;
94 : size_t totalBytes = sizeof(FrameEntry) * nentries + // entries[]
95 : sizeof(FrameEntry *) * nentries + // tracker.entries
96 93731 : sizeof(StackEntryExtra) * nentries; // extraArray
97 93731 : uint8_t *cursor = (uint8_t *)OffTheBooks::calloc_(totalBytes);
98 93731 : if (!cursor)
99 0 : return false;
100 :
101 93731 : this->entries = (FrameEntry *) cursor;
102 93731 : cursor += sizeof(FrameEntry) * nentries;
103 :
104 93731 : this->tracker.entries = (FrameEntry **)cursor;
105 93731 : cursor += sizeof(FrameEntry *) * nentries;
106 :
107 93731 : this->extraArray = (StackEntryExtra *)cursor;
108 93731 : cursor += sizeof(StackEntryExtra) * nentries;
109 :
110 93731 : JS_ASSERT(reinterpret_cast<uint8_t *>(this->entries) + totalBytes == cursor);
111 :
112 : #if defined JS_NUNBOX32
113 93731 : if (!reifier.init(cx, *this, nentries))
114 0 : return false;
115 : #endif
116 :
117 93731 : this->temporaries = this->temporariesTop = this->entries + nentries - TEMPORARY_LIMIT;
118 : }
119 :
120 : /* We should have already checked that argc == nargs */
121 96663 : JS_ASSERT_IF(a, argc == script->function()->nargs);
122 :
123 96663 : ActiveFrame *newa = OffTheBooks::new_<ActiveFrame>();
124 96663 : if (!newa)
125 0 : return false;
126 :
127 96663 : newa->parent = a;
128 96663 : newa->depth = a ? (totalDepth() + VALUES_PER_STACK_FRAME) : 0;
129 :
130 96663 : newa->script = script;
131 96663 : newa->PC = script->code;
132 96663 : newa->analysis = script->analysis();
133 :
134 : /*
135 : * The callee/this/args in the new frame reuse the same entries as are on
136 : * the stack in the old frame.
137 : */
138 96663 : FrameEntry *entriesStart = a ? a->sp - (argc + 2) : entries;
139 96663 : newa->callee_ = entriesStart + analyze::CalleeSlot();
140 96663 : newa->this_ = entriesStart + analyze::ThisSlot();
141 96663 : newa->args = entriesStart + analyze::ArgSlot(0);
142 96663 : newa->locals = entriesStart + analyze::LocalSlot(script, 0);
143 96663 : newa->spBase = entriesStart + analyze::TotalSlots(script);
144 96663 : newa->sp = newa->spBase;
145 :
146 96663 : this->a = newa;
147 :
148 96663 : return true;
149 : }
150 :
151 : void
152 0 : FrameState::associateReg(FrameEntry *fe, RematInfo::RematType type, AnyRegisterID reg)
153 : {
154 0 : freeRegs.takeReg(reg);
155 :
156 0 : if (type == RematInfo::TYPE)
157 0 : fe->type.setRegister(reg.reg());
158 0 : else if (reg.isReg())
159 0 : fe->data.setRegister(reg.reg());
160 : else
161 0 : fe->data.setFPRegister(reg.fpreg());
162 0 : regstate(reg).associate(fe, type);
163 0 : }
164 :
165 : void
166 2932 : FrameState::popActiveFrame()
167 : {
168 2932 : a->analysis->clearAllocations();
169 :
170 2932 : if (a->parent) {
171 : /* Clear registers and copies used by local variables and stack slots. */
172 6988 : for (FrameEntry *fe = a->sp - 1; fe >= a->locals; fe--) {
173 4056 : if (!fe->isTracked())
174 395 : continue;
175 3661 : forgetAllRegs(fe);
176 3661 : fe->clear();
177 : }
178 : }
179 :
180 2932 : ActiveFrame *parent = a->parent;
181 2932 : cx->delete_(a);
182 2932 : a = parent;
183 2932 : }
184 :
185 : void
186 502569 : FrameState::takeReg(AnyRegisterID reg)
187 : {
188 502569 : modifyReg(reg);
189 502569 : if (freeRegs.hasReg(reg)) {
190 484649 : freeRegs.takeReg(reg);
191 484649 : JS_ASSERT(!regstate(reg).usedBy());
192 : } else {
193 17920 : JS_ASSERT(regstate(reg).fe());
194 17920 : evictReg(reg);
195 : }
196 502569 : }
197 :
198 : #ifdef DEBUG
199 : const char *
200 154612 : FrameState::entryName(const FrameEntry *fe) const
201 : {
202 : static char bufs[4][50];
203 : static unsigned which = 0;
204 154612 : which = (which + 1) & 3;
205 154612 : char *buf = bufs[which];
206 :
207 154612 : if (isTemporary(fe)) {
208 5404 : JS_snprintf(buf, 50, "temp%d", fe - temporaries);
209 5404 : return buf;
210 : }
211 :
212 149208 : if (fe < a->callee_)
213 1117 : return "parent";
214 :
215 148091 : JS_ASSERT(fe >= a->callee_ && fe < a->sp);
216 :
217 148091 : if (fe == a->callee_)
218 129 : return "callee";
219 147962 : if (fe == a->this_)
220 2125 : return "'this'";
221 :
222 145837 : if (isArg(fe))
223 15151 : JS_snprintf(buf, 50, "arg%d", fe - a->args);
224 130686 : else if (isLocal(fe))
225 62395 : JS_snprintf(buf, 50, "local%d", fe - a->locals);
226 : else
227 68291 : JS_snprintf(buf, 50, "slot%d", fe - a->spBase);
228 145837 : return buf;
229 : }
230 : #endif
231 :
232 : void
233 94321 : FrameState::evictReg(AnyRegisterID reg)
234 : {
235 94321 : FrameEntry *fe = regstate(reg).fe();
236 :
237 94321 : JaegerSpew(JSpew_Regalloc, "evicting %s from %s\n", entryName(fe), reg.name());
238 :
239 94321 : if (regstate(reg).type() == RematInfo::TYPE) {
240 36119 : syncType(fe);
241 36119 : fe->type.setMemory();
242 58202 : } else if (reg.isReg()) {
243 58190 : syncData(fe);
244 58190 : fe->data.setMemory();
245 : } else {
246 12 : syncFe(fe);
247 12 : fe->data.setMemory();
248 : }
249 :
250 94321 : regstate(reg).forget();
251 94321 : }
252 :
253 : inline Lifetime *
254 88700 : FrameState::variableLive(FrameEntry *fe, jsbytecode *pc) const
255 : {
256 : /*
257 : * Whether an argument, local or 'this' entry is live at pc. Note: this
258 : * does not account for the 'this' entry when the script is used as a
259 : * constructor, in which case it is live the entire frame.
260 : */
261 88700 : JS_ASSERT(cx->typeInferenceEnabled());
262 88700 : JS_ASSERT(fe > a->callee_ && fe < a->spBase);
263 :
264 88700 : uint32_t offset = pc - a->script->code;
265 88700 : return a->analysis->liveness(entrySlot(fe)).live(offset);
266 : }
267 :
268 : AnyRegisterID
269 54168 : FrameState::bestEvictReg(uint32_t mask, bool includePinned) const
270 : {
271 54168 : JS_ASSERT(cx->typeInferenceEnabled());
272 :
273 : /* Must be looking for a specific type of register. */
274 54168 : JS_ASSERT((mask & Registers::AvailRegs) != (mask & Registers::AvailFPRegs));
275 :
276 54168 : AnyRegisterID fallback;
277 54168 : uint32_t fallbackOffset = UINT32_MAX;
278 :
279 54168 : JaegerSpew(JSpew_Regalloc, "picking best register to evict:\n");
280 :
281 857470 : for (uint32_t i = 0; i < Registers::TotalAnyRegisters; i++) {
282 804032 : AnyRegisterID reg = AnyRegisterID::fromRaw(i);
283 :
284 : /* Register is not allocatable, don't bother. */
285 804032 : if (!(Registers::maskReg(reg) & mask))
286 486718 : continue;
287 :
288 : /* Register is not owned by the FrameState. */
289 317314 : FrameEntry *fe = includePinned ? regstate(reg).usedBy() : regstate(reg).fe();
290 317314 : if (!fe)
291 70769 : continue;
292 :
293 : /*
294 : * Liveness is not tracked for the callee or for stack slot frame entries.
295 : * The callee is evicted as early as needed, stack slots are evicted as
296 : * late as possible. :XXX: This is unfortunate if the stack slot lives
297 : * a long time (especially if it gets spilled anyways when we hit a branch).
298 : */
299 :
300 246545 : if (fe == a->callee_) {
301 129 : JaegerSpew(JSpew_Regalloc, "result: %s is callee\n", reg.name());
302 129 : return reg;
303 : }
304 :
305 246416 : if (fe >= a->spBase && !isTemporary(fe)) {
306 214218 : if (!fallback.isSet()) {
307 46905 : fallback = reg;
308 46905 : fallbackOffset = 0;
309 : }
310 214218 : JaegerSpew(JSpew_Regalloc, " %s is on stack\n", reg.name());
311 214218 : continue;
312 : }
313 :
314 : /* Prioritize keeping copied entries in registers. */
315 32198 : if (fe->isCopied()) {
316 14820 : if (!fallback.isSet()) {
317 4256 : fallback = reg;
318 4256 : fallbackOffset = 0;
319 : }
320 14820 : JaegerSpew(JSpew_Regalloc, " %s has copies\n", reg.name());
321 14820 : continue;
322 : }
323 :
324 17378 : if (isTemporary(fe) || (a->parent && fe < a->locals)) {
325 : /*
326 : * All temporaries we currently generate are for loop invariants,
327 : * which we treat as being live everywhere within the loop.
328 : * Additionally, if this is an inlined frame then any entries
329 : * belonging to parents are treated as live everywhere in the call.
330 : */
331 4668 : uint32_t offset = a->parent ? a->script->length : loop->backedgeOffset();
332 4668 : if (!fallback.isSet() || offset > fallbackOffset) {
333 1883 : fallback = reg;
334 1883 : fallbackOffset = offset;
335 : }
336 4668 : JaegerSpew(JSpew_Regalloc, " %s is a LICM or inline parent entry\n", reg.name());
337 4668 : continue;
338 : }
339 :
340 : /*
341 : * All entries still in registers should have a lifetime, except 'this'
342 : * in constructors which are not accessed later on.
343 : */
344 12710 : Lifetime *lifetime = variableLive(fe, a->PC);
345 :
346 12710 : if (!lifetime) {
347 17 : JS_ASSERT(isConstructorThis(fe));
348 17 : fallback = reg;
349 17 : fallbackOffset = a->script->length;
350 17 : JaegerSpew(JSpew_Regalloc, " %s is 'this' in a constructor\n", reg.name());
351 17 : continue;
352 : }
353 :
354 : /*
355 : * Evict variables which are only live in future loop iterations, and are
356 : * not carried around the loop in a register.
357 : */
358 12693 : if (lifetime->loopTail && (!loop || !loop->carriesLoopReg(fe))) {
359 : JaegerSpew(JSpew_Regalloc, "result: %s (%s) only live in later iterations\n",
360 601 : entryName(fe), reg.name());
361 601 : return reg;
362 : }
363 :
364 12092 : JaegerSpew(JSpew_Regalloc, " %s (%s): %u\n", entryName(fe), reg.name(), lifetime->end);
365 :
366 : /*
367 : * The best live register to evict is the one that will be live for the
368 : * longest time. This may need tweaking for variables that are used in
369 : * many places throughout their lifetime. Note that we don't pay attention
370 : * to whether the register is synced or not --- it is more efficient to
371 : * have things in registers when they're needed than to emit some extra
372 : * writes for things that won't be used again for a while.
373 : */
374 :
375 12092 : if (!fallback.isSet() || lifetime->end > fallbackOffset) {
376 7929 : fallback = reg;
377 7929 : fallbackOffset = lifetime->end;
378 : }
379 : }
380 :
381 53438 : JS_ASSERT(fallback.isSet());
382 :
383 53438 : JaegerSpew(JSpew_Regalloc, "result %s\n", fallback.name());
384 53438 : return fallback;
385 : }
386 :
387 : void
388 67369 : FrameState::evictDeadEntries(bool includePinned)
389 : {
390 1077904 : for (uint32_t i = 0; i < Registers::TotalAnyRegisters; i++) {
391 1010535 : AnyRegisterID reg = AnyRegisterID::fromRaw(i);
392 :
393 : /* Follow along with the same filters as bestEvictReg. */
394 :
395 1010535 : if (!(Registers::maskReg(reg) & Registers::AvailAnyRegs))
396 134738 : continue;
397 :
398 875797 : FrameEntry *fe = includePinned ? regstate(reg).usedBy() : regstate(reg).fe();
399 875797 : if (!fe)
400 587302 : continue;
401 :
402 334608 : if (fe == a->callee_ || isConstructorThis(fe) ||
403 46113 : fe >= a->spBase || fe->isCopied() || (a->parent && fe < a->locals)) {
404 268839 : continue;
405 : }
406 :
407 19656 : Lifetime *lifetime = variableLive(fe, a->PC);
408 19656 : if (lifetime)
409 17622 : continue;
410 :
411 : /*
412 : * If we are about to fake sync for an entry with known type, reset
413 : * that type. We don't want to regard it as correctly synced later.
414 : */
415 2034 : if (!fe->type.synced() && fe->isTypeKnown())
416 226 : fe->type.setMemory();
417 :
418 : /*
419 : * Mark the entry as synced to avoid emitting a store, we don't need
420 : * to keep this value around.
421 : */
422 2034 : fakeSync(fe);
423 2034 : if (regstate(reg).type() == RematInfo::DATA)
424 1491 : fe->data.setMemory();
425 : else
426 543 : fe->type.setMemory();
427 2034 : forgetReg(reg);
428 : }
429 67369 : }
430 :
431 : AnyRegisterID
432 89751 : FrameState::evictSomeReg(uint32_t mask)
433 : {
434 89751 : JS_ASSERT(!freeRegs.hasRegInMask(mask));
435 :
436 89751 : if (cx->typeInferenceEnabled()) {
437 54120 : evictDeadEntries(false);
438 :
439 54120 : if (freeRegs.hasRegInMask(mask)) {
440 : /* There was a register in use by a dead local variable. */
441 1102 : AnyRegisterID reg = freeRegs.takeAnyReg(mask);
442 1102 : modifyReg(reg);
443 1102 : return reg;
444 : }
445 :
446 53018 : AnyRegisterID reg = bestEvictReg(mask, false);
447 53018 : evictReg(reg);
448 53018 : return reg;
449 : }
450 :
451 : /* With inference disabled, only general purpose registers are managed. */
452 35631 : JS_ASSERT((mask & ~Registers::AvailRegs) == 0);
453 :
454 35631 : MaybeRegisterID fallback;
455 :
456 251186 : for (uint32_t i = 0; i < JSC::MacroAssembler::TotalRegisters; i++) {
457 227803 : RegisterID reg = RegisterID(i);
458 :
459 : /* Register is not allocatable, don't bother. */
460 227803 : if (!(Registers::maskReg(reg) & mask))
461 53250 : continue;
462 :
463 : /* Register is not owned by the FrameState. */
464 174553 : FrameEntry *fe = regstate(reg).fe();
465 174553 : if (!fe)
466 32283 : continue;
467 :
468 : /* Try to find a candidate... that doesn't need spilling. */
469 142270 : fallback = reg;
470 :
471 142270 : if (regstate(reg).type() == RematInfo::TYPE && fe->type.synced()) {
472 6776 : fe->type.setMemory();
473 6776 : regstate(reg).forget();
474 6776 : return reg;
475 : }
476 135494 : if (regstate(reg).type() == RematInfo::DATA && fe->data.synced()) {
477 5472 : fe->data.setMemory();
478 5472 : regstate(reg).forget();
479 5472 : return reg;
480 : }
481 : }
482 :
483 23383 : evictReg(fallback.reg());
484 23383 : return fallback.reg();
485 : }
486 :
487 : void
488 604151 : FrameState::resetInternalState()
489 : {
490 1989827 : for (uint32_t i = 0; i < tracker.nentries; i++)
491 1385676 : tracker[i]->untrack();
492 :
493 604151 : tracker.reset();
494 604151 : freeRegs = Registers(Registers::AvailAnyRegs);
495 604151 : }
496 :
497 : void
498 102477 : FrameState::discardFrame()
499 : {
500 102477 : resetInternalState();
501 102477 : PodArrayZero(regstate_);
502 102477 : }
503 :
504 : FrameEntry *
505 155 : FrameState::snapshotState()
506 : {
507 : /* Everything can be recovered from a copy of the frame entries. */
508 155 : FrameEntry *snapshot = cx->array_new<FrameEntry>(nentries);
509 155 : if (!snapshot)
510 0 : return NULL;
511 155 : PodCopy(snapshot, entries, nentries);
512 155 : return snapshot;
513 : }
514 :
515 : void
516 323 : FrameState::restoreFromSnapshot(FrameEntry *snapshot)
517 : {
518 323 : discardFrame();
519 323 : PodCopy(entries, snapshot, nentries);
520 :
521 85329 : for (unsigned i = 0; i < nentries; i++) {
522 85006 : FrameEntry *fe = entries + i;
523 85006 : if (!fe->isTracked())
524 83158 : continue;
525 1848 : tracker.entries[fe->index_] = fe;
526 1848 : tracker.nentries = Max(tracker.nentries, fe->index_ + 1);
527 1848 : if (fe->isCopy())
528 518 : continue;
529 1330 : if (fe->type.inRegister()) {
530 24 : freeRegs.takeReg(fe->type.reg());
531 24 : regstate(fe->type.reg()).associate(fe, RematInfo::TYPE);
532 : }
533 1330 : if (fe->data.inRegister()) {
534 1048 : freeRegs.takeReg(fe->data.reg());
535 1048 : regstate(fe->data.reg()).associate(fe, RematInfo::DATA);
536 : }
537 1330 : if (fe->data.inFPRegister()) {
538 0 : freeRegs.takeReg(fe->data.fpreg());
539 0 : regstate(fe->data.fpreg()).associate(fe, RematInfo::DATA);
540 : }
541 : }
542 323 : }
543 :
544 : void
545 204022 : FrameState::forgetEverything()
546 : {
547 204022 : resetInternalState();
548 :
549 : #ifdef DEBUG
550 3264352 : for (uint32_t i = 0; i < Registers::TotalAnyRegisters; i++) {
551 3060330 : AnyRegisterID reg = AnyRegisterID::fromRaw(i);
552 3060330 : JS_ASSERT(!regstate(reg).usedBy());
553 : }
554 : #endif
555 204022 : }
556 :
557 : #ifdef DEBUG
558 : void
559 0 : FrameState::dumpAllocation(RegisterAllocation *alloc)
560 : {
561 0 : JS_ASSERT(cx->typeInferenceEnabled());
562 0 : for (unsigned i = 0; i < Registers::TotalAnyRegisters; i++) {
563 0 : AnyRegisterID reg = AnyRegisterID::fromRaw(i);
564 0 : if (alloc->assigned(reg)) {
565 0 : printf(" (%s: %s%s)", reg.name(), entryName(entries + alloc->index(reg)),
566 0 : alloc->synced(reg) ? "" : " unsynced");
567 : }
568 : }
569 0 : printf("\n");
570 0 : }
571 : #endif
572 :
573 : RegisterAllocation *
574 147124 : FrameState::computeAllocation(jsbytecode *target)
575 : {
576 147124 : JS_ASSERT(cx->typeInferenceEnabled());
577 147124 : RegisterAllocation *alloc = cx->typeLifoAlloc().new_<RegisterAllocation>(false);
578 147124 : if (!alloc) {
579 0 : js_ReportOutOfMemory(cx);
580 0 : return NULL;
581 : }
582 :
583 : /*
584 : * State must be synced at exception and switch targets, at traps and when
585 : * crossing between compilation chunks.
586 : */
587 435265 : if (a->analysis->getCode(target).safePoint ||
588 288141 : (!a->parent && !cc.bytecodeInChunk(target))) {
589 : #ifdef DEBUG
590 1161 : if (IsJaegerSpewChannelActive(JSpew_Regalloc)) {
591 0 : JaegerSpew(JSpew_Regalloc, "allocation at %u:", unsigned(target - a->script->code));
592 0 : dumpAllocation(alloc);
593 : }
594 : #endif
595 1161 : return alloc;
596 : }
597 :
598 : /*
599 : * The allocation to use at the target consists of all parent, temporary
600 : * and non-stack entries currently in registers which are live at target.
601 : */
602 145963 : Registers regs = Registers::AvailAnyRegs;
603 2189445 : while (!regs.empty()) {
604 1897519 : AnyRegisterID reg = regs.takeAnyReg();
605 1897519 : if (freeRegs.hasReg(reg) || regstate(reg).type() == RematInfo::TYPE)
606 1806646 : continue;
607 90873 : FrameEntry *fe = regstate(reg).fe();
608 285627 : if (fe < a->callee_ ||
609 88356 : isConstructorThis(fe) ||
610 52891 : (fe > a->callee_ && fe < a->spBase && variableLive(fe, target)) ||
611 53507 : (isTemporary(fe) && (a->parent || uint32_t(target - a->script->code) <= loop->backedgeOffset()))) {
612 : /*
613 : * For entries currently in floating point registers, check they
614 : * are known to be doubles at the target. We don't need to do this
615 : * for entries in normal registers, as fixDoubleTypes must have been
616 : * called to convert them to floats.
617 : */
618 38845 : if (!reg.isReg() && !isTemporary(fe) && fe >= a->callee_ && fe < a->spBase) {
619 486 : if (!a->analysis->trackSlot(entrySlot(fe)))
620 0 : continue;
621 486 : bool nonDoubleTarget = false;
622 486 : const SlotValue *newv = a->analysis->newValues(target);
623 1651 : while (newv && newv->slot) {
624 2003 : if (newv->value.kind() == SSAValue::PHI &&
625 662 : newv->value.phiOffset() == uint32_t(target - a->script->code) &&
626 662 : newv->slot == entrySlot(fe)) {
627 123 : types::TypeSet *types = a->analysis->getValueTypes(newv->value);
628 123 : if (types->getKnownTypeTag(cx) != JSVAL_TYPE_DOUBLE)
629 0 : nonDoubleTarget = true;
630 : }
631 679 : newv++;
632 : }
633 486 : if (nonDoubleTarget)
634 0 : continue;
635 : }
636 38845 : alloc->set(reg, fe - entries, fe->data.synced());
637 : }
638 : }
639 :
640 : #ifdef DEBUG
641 145963 : if (IsJaegerSpewChannelActive(JSpew_Regalloc)) {
642 0 : JaegerSpew(JSpew_Regalloc, "allocation at %u:", unsigned(target - a->script->code));
643 0 : dumpAllocation(alloc);
644 : }
645 : #endif
646 :
647 145963 : return alloc;
648 : }
649 :
650 : void
651 2896 : FrameState::relocateReg(AnyRegisterID reg, RegisterAllocation *alloc, Uses uses)
652 : {
653 2896 : JS_ASSERT(cx->typeInferenceEnabled());
654 :
655 : /*
656 : * The reg needs to be freed to make room for a variable carried across
657 : * a branch. Either evict its entry, or try to move it to a different
658 : * register if it is needed to test the branch condition. :XXX: could also
659 : * watch for variables which are carried across the branch but are in a
660 : * the register for a different carried entry, we just spill these for now.
661 : */
662 2896 : JS_ASSERT(!freeRegs.hasReg(reg));
663 :
664 5218 : for (unsigned i = 0; i < uses.nuses; i++) {
665 3955 : FrameEntry *fe = peek(-1 - i);
666 3955 : if (fe->isCopy())
667 1959 : fe = fe->copyOf();
668 3955 : if (reg.isReg() && fe->data.inRegister() && fe->data.reg() == reg.reg()) {
669 1633 : pinReg(reg);
670 1633 : RegisterID nreg = allocReg();
671 1633 : unpinReg(reg);
672 :
673 1633 : JaegerSpew(JSpew_Regalloc, "relocating %s\n", reg.name());
674 :
675 1633 : masm.move(reg.reg(), nreg);
676 1633 : regstate(reg).forget();
677 1633 : regstate(nreg).associate(fe, RematInfo::DATA);
678 1633 : fe->data.setRegister(nreg);
679 1633 : freeRegs.putReg(reg);
680 1633 : return;
681 : }
682 : }
683 :
684 1263 : JaegerSpew(JSpew_Regalloc, "could not relocate %s\n", reg.name());
685 :
686 1263 : takeReg(reg);
687 1263 : freeRegs.putReg(reg);
688 : }
689 :
690 : bool
691 328280 : FrameState::syncForBranch(jsbytecode *target, Uses uses)
692 : {
693 : /* There should be no unowned or pinned registers. */
694 : #ifdef DEBUG
695 328280 : Registers checkRegs(Registers::AvailAnyRegs);
696 4924200 : while (!checkRegs.empty()) {
697 4267640 : AnyRegisterID reg = checkRegs.takeAnyReg();
698 4267640 : JS_ASSERT_IF(!freeRegs.hasReg(reg), regstate(reg).fe());
699 : }
700 : #endif
701 :
702 328280 : if (!cx->typeInferenceEnabled()) {
703 127261 : syncAndForgetEverything();
704 127261 : return true;
705 : }
706 :
707 201019 : RegisterAllocation *&alloc = a->analysis->getAllocation(target);
708 201019 : if (!alloc) {
709 146780 : alloc = computeAllocation(target);
710 146780 : if (!alloc)
711 0 : return false;
712 : }
713 :
714 201019 : syncForAllocation(alloc, false, uses);
715 :
716 201019 : return true;
717 : }
718 :
719 : void
720 201812 : FrameState::syncForAllocation(RegisterAllocation *alloc, bool inlineReturn, Uses uses)
721 : {
722 : /*
723 : * First pass. Sync all entries which will not be carried in a register,
724 : * and uncopy everything except values popped by the branch or before the
725 : * call returns.
726 : */
727 :
728 201812 : FrameEntry *topEntry = NULL;
729 201812 : if (inlineReturn)
730 793 : topEntry = a->parent->sp - (GET_ARGC(a->parent->PC) + 2);
731 :
732 975111 : for (uint32_t i = tracker.nentries - 1; i < tracker.nentries; i--) {
733 773299 : FrameEntry *fe = tracker[i];
734 :
735 773299 : if (deadEntry(fe, uses.nuses))
736 464427 : continue;
737 308872 : if (inlineReturn && fe >= topEntry && !isTemporary(fe)) {
738 : /*
739 : * The return value has already been stored, so there is no need to
740 : * keep any of the entries for this frame or for values popped once
741 : * the call returns intact. Forcibly evict any registers for these,
742 : * so that we don't emit sync code for them if we need a register
743 : * in syncFe below.
744 : */
745 1775 : forgetAllRegs(fe);
746 1775 : fe->resetSynced();
747 1775 : continue;
748 : }
749 :
750 : /* Force syncs for locals which are dead at the current PC. */
751 307097 : if (isLocal(fe) && !fe->copied && !a->analysis->slotEscapes(entrySlot(fe))) {
752 87084 : Lifetime *lifetime = a->analysis->liveness(entrySlot(fe)).live(a->PC - a->script->code);
753 87084 : if (!lifetime)
754 38420 : fakeSync(fe);
755 : }
756 :
757 : /* If returning from a script, fake syncs for dead locals in the immediate parent. */
758 307573 : if (inlineReturn && fe >= a->parent->locals &&
759 : fe - a->parent->locals < a->parent->script->nfixed &&
760 476 : !a->parent->analysis->slotEscapes(frameSlot(a->parent, fe))) {
761 476 : const LifetimeVariable &var = a->parent->analysis->liveness(frameSlot(a->parent, fe));
762 476 : Lifetime *lifetime = var.live(a->parent->PC - a->parent->script->code);
763 476 : if (!lifetime)
764 47 : fakeSync(fe);
765 : }
766 :
767 307097 : if (!fe->isCopy() && alloc->hasAnyReg(fe - entries)) {
768 : /* Types are always synced, except for known doubles. */
769 51861 : if (!fe->isType(JSVAL_TYPE_DOUBLE))
770 51262 : syncType(fe);
771 : } else {
772 255236 : syncFe(fe);
773 255236 : if (fe->isCopy())
774 3822 : fe->resetSynced();
775 : }
776 : }
777 :
778 : /*
779 : * Second pass. Move entries carried in registers to the right register
780 : * provided no value used in the branch is evicted. After this pass,
781 : * everything will either be in the right register or will be in memory.
782 : */
783 :
784 201812 : Registers regs = Registers(Registers::AvailAnyRegs);
785 3027180 : while (!regs.empty()) {
786 2623556 : AnyRegisterID reg = regs.takeAnyReg();
787 2623556 : if (!alloc->assigned(reg))
788 2568464 : continue;
789 55092 : FrameEntry *fe = getOrTrack(alloc->index(reg));
790 55092 : JS_ASSERT(!fe->isCopy());
791 :
792 55092 : JS_ASSERT_IF(!fe->isType(JSVAL_TYPE_DOUBLE), fe->type.synced());
793 55092 : if (!fe->data.synced() && alloc->synced(reg))
794 3983 : syncFe(fe);
795 :
796 55092 : if (fe->dataInRegister(reg))
797 46933 : continue;
798 :
799 8159 : if (!freeRegs.hasReg(reg))
800 2896 : relocateReg(reg, alloc, uses);
801 :
802 8159 : if (reg.isReg()) {
803 8059 : RegisterID nreg = reg.reg();
804 8059 : if (fe->isType(JSVAL_TYPE_DOUBLE)) {
805 0 : JS_ASSERT(!a->analysis->trackSlot(entrySlot(fe)));
806 0 : syncFe(fe);
807 0 : forgetAllRegs(fe);
808 0 : fe->type.setMemory();
809 0 : fe->data.setMemory();
810 : }
811 8059 : if (fe->data.inMemory()) {
812 4969 : masm.loadPayload(addressOf(fe), nreg);
813 3090 : } else if (fe->isConstant()) {
814 7 : masm.loadValuePayload(fe->getValue(), nreg);
815 : } else {
816 3083 : JS_ASSERT(fe->data.inRegister() && fe->data.reg() != nreg);
817 3083 : masm.move(fe->data.reg(), nreg);
818 3083 : freeRegs.putReg(fe->data.reg());
819 3083 : regstate(fe->data.reg()).forget();
820 : }
821 8059 : fe->data.setRegister(nreg);
822 : } else {
823 100 : FPRegisterID nreg = reg.fpreg();
824 100 : JS_ASSERT(!fe->isNotType(JSVAL_TYPE_DOUBLE));
825 100 : if (!fe->isTypeKnown())
826 88 : learnType(fe, JSVAL_TYPE_DOUBLE, false);
827 100 : if (fe->data.inMemory()) {
828 90 : masm.loadDouble(addressOf(fe), nreg);
829 10 : } else if (fe->isConstant()) {
830 2 : masm.slowLoadConstantDouble(fe->getValue().toDouble(), nreg);
831 : } else {
832 8 : JS_ASSERT(fe->data.inFPRegister() && fe->data.fpreg() != nreg);
833 8 : masm.moveDouble(fe->data.fpreg(), nreg);
834 8 : freeRegs.putReg(fe->data.fpreg());
835 8 : regstate(fe->data.fpreg()).forget();
836 : }
837 100 : fe->data.setFPRegister(nreg);
838 : }
839 :
840 8159 : freeRegs.takeReg(reg);
841 8159 : regstate(reg).associate(fe, RematInfo::DATA);
842 : }
843 201812 : }
844 :
845 : bool
846 297652 : FrameState::discardForJoin(RegisterAllocation *&alloc, uint32_t stackDepth)
847 : {
848 297652 : if (!cx->typeInferenceEnabled()) {
849 106611 : resetInternalState();
850 106611 : PodArrayZero(regstate_);
851 106611 : a->sp = a->spBase + stackDepth;
852 106611 : return true;
853 : }
854 :
855 191041 : if (!alloc) {
856 : /*
857 : * This shows up for loop entries which are not reachable from the
858 : * loop head, and for exception, switch target and trap safe points.
859 : */
860 3957 : alloc = cx->typeLifoAlloc().new_<RegisterAllocation>(false);
861 3957 : if (!alloc) {
862 0 : js_ReportOutOfMemory(cx);
863 0 : return false;
864 : }
865 : }
866 :
867 191041 : resetInternalState();
868 191041 : PodArrayZero(regstate_);
869 :
870 191041 : Registers regs(Registers::AvailAnyRegs);
871 2865615 : while (!regs.empty()) {
872 2483533 : AnyRegisterID reg = regs.takeAnyReg();
873 2483533 : if (!alloc->assigned(reg))
874 2444735 : continue;
875 38798 : FrameEntry *fe = getOrTrack(alloc->index(reg));
876 :
877 38798 : freeRegs.takeReg(reg);
878 :
879 : /*
880 : * We can't look at the type of the fe as we haven't restored analysis types yet,
881 : * but if this is an FP reg it will be set to double type.
882 : */
883 38798 : if (reg.isReg()) {
884 38289 : fe->data.setRegister(reg.reg());
885 : } else {
886 509 : fe->setType(JSVAL_TYPE_DOUBLE);
887 509 : fe->data.setFPRegister(reg.fpreg());
888 : }
889 :
890 38798 : regstate(reg).associate(fe, RematInfo::DATA);
891 38798 : if (!alloc->synced(reg)) {
892 16511 : fe->data.unsync();
893 16511 : if (!reg.isReg())
894 269 : fe->type.unsync();
895 : }
896 : }
897 :
898 191041 : a->sp = a->spBase + stackDepth;
899 :
900 310120 : for (unsigned i = 0; i < stackDepth; i++)
901 119079 : extraArray[a->spBase + i - entries].reset();
902 :
903 191041 : return true;
904 : }
905 :
906 : bool
907 289883 : FrameState::consistentRegisters(jsbytecode *target)
908 : {
909 289883 : if (!cx->typeInferenceEnabled()) {
910 63273 : JS_ASSERT(freeRegs.freeMask == Registers::AvailAnyRegs);
911 63273 : return true;
912 : }
913 :
914 : /*
915 : * Before calling this, either the entire state should have been synced or
916 : * syncForBranch should have been called. These will ensure that any FE
917 : * which is not consistent with the target's register state has already
918 : * been synced, and no stores will need to be issued by prepareForJump.
919 : */
920 226610 : RegisterAllocation *alloc = a->analysis->getAllocation(target);
921 226610 : JS_ASSERT(alloc);
922 :
923 226610 : Registers regs(Registers::AvailAnyRegs);
924 226610 : while (!regs.empty()) {
925 2920552 : AnyRegisterID reg = regs.takeAnyReg();
926 2920552 : if (alloc->assigned(reg)) {
927 61145 : FrameEntry *needed = getOrTrack(alloc->index(reg));
928 61145 : if (!freeRegs.hasReg(reg)) {
929 54343 : FrameEntry *fe = regstate(reg).fe();
930 54343 : if (fe != needed)
931 14 : return false;
932 : } else {
933 6802 : return false;
934 : }
935 : }
936 : }
937 :
938 219794 : return true;
939 : }
940 :
941 : void
942 83298 : FrameState::prepareForJump(jsbytecode *target, Assembler &masm, bool synced)
943 : {
944 83298 : if (!cx->typeInferenceEnabled())
945 0 : return;
946 :
947 83298 : JS_ASSERT_IF(!synced, !consistentRegisters(target));
948 :
949 83298 : RegisterAllocation *alloc = a->analysis->getAllocation(target);
950 83298 : JS_ASSERT(alloc);
951 :
952 83298 : Registers regs = 0;
953 :
954 83298 : regs = Registers(Registers::AvailAnyRegs);
955 1249470 : while (!regs.empty()) {
956 1082874 : AnyRegisterID reg = regs.takeAnyReg();
957 1082874 : if (!alloc->assigned(reg))
958 1020677 : continue;
959 :
960 62197 : const FrameEntry *fe = getOrTrack(alloc->index(reg));
961 62197 : if (synced || !fe->backing()->dataInRegister(reg)) {
962 62154 : JS_ASSERT_IF(!synced, fe->data.synced());
963 62154 : if (reg.isReg())
964 61628 : masm.loadPayload(addressOf(fe), reg.reg());
965 : else
966 526 : masm.loadDouble(addressOf(fe), reg.fpreg());
967 : }
968 : }
969 : }
970 :
971 : void
972 235574 : FrameState::storeTo(FrameEntry *fe, Address address, bool popped)
973 : {
974 235574 : if (fe->isConstant()) {
975 84088 : masm.storeValue(fe->getValue(), address);
976 84088 : return;
977 : }
978 :
979 151486 : if (fe->isCopy())
980 2682 : fe = fe->copyOf();
981 :
982 151486 : JS_ASSERT(!freeRegs.hasReg(address.base));
983 :
984 : /* If loading from memory, ensure destination differs. */
985 260570 : JS_ASSERT_IF((fe->type.inMemory() || fe->data.inMemory()),
986 : addressOf(fe).base != address.base ||
987 260570 : addressOf(fe).offset != address.offset);
988 :
989 151486 : if (fe->data.inFPRegister()) {
990 2430 : masm.storeDouble(fe->data.fpreg(), address);
991 2430 : return;
992 : }
993 :
994 149056 : if (fe->isType(JSVAL_TYPE_DOUBLE)) {
995 101 : JS_ASSERT(fe->data.inMemory());
996 101 : masm.loadDouble(addressOf(fe), Registers::FPConversionTemp);
997 101 : masm.storeDouble(Registers::FPConversionTemp, address);
998 101 : return;
999 : }
1000 :
1001 : /* Don't clobber the address's register. */
1002 148955 : bool pinAddressReg = !!regstate(address.base).fe();
1003 148955 : if (pinAddressReg)
1004 1358 : pinReg(address.base);
1005 :
1006 : #if defined JS_PUNBOX64
1007 : if (fe->type.inMemory() && fe->data.inMemory()) {
1008 : /* Future optimization: track that the Value is in a register. */
1009 : RegisterID vreg = Registers::ValueReg;
1010 : masm.loadPtr(addressOf(fe), vreg);
1011 : masm.storePtr(vreg, address);
1012 : if (pinAddressReg)
1013 : unpinReg(address.base);
1014 : return;
1015 : }
1016 :
1017 : JS_ASSERT(!fe->isType(JSVAL_TYPE_DOUBLE));
1018 :
1019 : /*
1020 : * If dreg is obtained via allocReg(), then calling
1021 : * pinReg() trips an assertion. But in all other cases,
1022 : * calling pinReg() is necessary in the fe->type.inMemory() path.
1023 : * Remember whether pinReg() can be safely called.
1024 : */
1025 : bool canPinDreg = true;
1026 : bool wasInRegister = fe->data.inRegister();
1027 :
1028 : /* Get a register for the payload. */
1029 : MaybeRegisterID dreg;
1030 : if (fe->data.inRegister()) {
1031 : dreg = fe->data.reg();
1032 : } else {
1033 : JS_ASSERT(fe->data.inMemory());
1034 : if (popped) {
1035 : dreg = allocReg();
1036 : masm.loadPayload(addressOf(fe), dreg.reg());
1037 : canPinDreg = false;
1038 : } else {
1039 : dreg = allocAndLoadReg(fe, false, RematInfo::DATA).reg();
1040 : fe->data.setRegister(dreg.reg());
1041 : }
1042 : }
1043 :
1044 : /* Store the Value. */
1045 : if (fe->type.inRegister()) {
1046 : masm.storeValueFromComponents(fe->type.reg(), dreg.reg(), address);
1047 : } else if (fe->isTypeKnown()) {
1048 : masm.storeValueFromComponents(ImmType(fe->getKnownType()), dreg.reg(), address);
1049 : } else {
1050 : JS_ASSERT(fe->type.inMemory());
1051 : if (canPinDreg)
1052 : pinReg(dreg.reg());
1053 :
1054 : RegisterID treg;
1055 : if (popped) {
1056 : treg = allocReg();
1057 : masm.loadTypeTag(addressOf(fe), treg);
1058 : } else {
1059 : treg = allocAndLoadReg(fe, false, RematInfo::TYPE).reg();
1060 : }
1061 : masm.storeValueFromComponents(treg, dreg.reg(), address);
1062 :
1063 : if (popped)
1064 : freeReg(treg);
1065 : else
1066 : fe->type.setRegister(treg);
1067 :
1068 : if (canPinDreg)
1069 : unpinReg(dreg.reg());
1070 : }
1071 :
1072 : /* If register is untracked, free it. */
1073 : if (!wasInRegister && popped)
1074 : freeReg(dreg.reg());
1075 :
1076 : #elif defined JS_NUNBOX32
1077 :
1078 148955 : if (fe->data.inRegister()) {
1079 120929 : masm.storePayload(fe->data.reg(), address);
1080 : } else {
1081 28026 : JS_ASSERT(fe->data.inMemory());
1082 : RegisterID reg;
1083 28026 : if (popped) {
1084 25947 : reg = allocReg();
1085 25947 : masm.loadPayload(addressOf(fe), reg);
1086 : } else {
1087 2079 : reg = allocAndLoadReg(fe, false, RematInfo::DATA).reg();
1088 : }
1089 28026 : masm.storePayload(reg, address);
1090 28026 : if (popped)
1091 25947 : freeReg(reg);
1092 : else
1093 2079 : fe->data.setRegister(reg);
1094 : }
1095 :
1096 148955 : if (fe->isTypeKnown()) {
1097 67542 : masm.storeTypeTag(ImmType(fe->getKnownType()), address);
1098 81413 : } else if (fe->type.inRegister()) {
1099 38891 : masm.storeTypeTag(fe->type.reg(), address);
1100 : } else {
1101 42522 : JS_ASSERT(fe->type.inMemory());
1102 : RegisterID reg;
1103 42522 : if (popped) {
1104 41376 : reg = allocReg();
1105 41376 : masm.loadTypeTag(addressOf(fe), reg);
1106 : } else {
1107 1146 : reg = allocAndLoadReg(fe, false, RematInfo::TYPE).reg();
1108 : }
1109 42522 : masm.storeTypeTag(reg, address);
1110 42522 : if (popped)
1111 41376 : freeReg(reg);
1112 : else
1113 1146 : fe->type.setRegister(reg);
1114 : }
1115 : #endif
1116 148955 : if (pinAddressReg)
1117 1358 : unpinReg(address.base);
1118 : }
1119 :
1120 : void
1121 492 : FrameState::loadThisForReturn(RegisterID typeReg, RegisterID dataReg, RegisterID tempReg)
1122 : {
1123 492 : return loadForReturn(getThis(), typeReg, dataReg, tempReg);
1124 : }
1125 :
1126 10573 : void FrameState::loadForReturn(FrameEntry *fe, RegisterID typeReg, RegisterID dataReg, RegisterID tempReg)
1127 : {
1128 10573 : JS_ASSERT(dataReg != typeReg && dataReg != tempReg && typeReg != tempReg);
1129 :
1130 10573 : if (fe->isConstant()) {
1131 2281 : masm.loadValueAsComponents(fe->getValue(), typeReg, dataReg);
1132 2281 : return;
1133 : }
1134 :
1135 8292 : if (fe->isType(JSVAL_TYPE_DOUBLE)) {
1136 335 : FPRegisterID fpreg = tempFPRegForData(fe);
1137 335 : masm.breakDouble(fpreg, typeReg, dataReg);
1138 335 : return;
1139 : }
1140 :
1141 7957 : if (fe->isCopy())
1142 1909 : fe = fe->copyOf();
1143 :
1144 7957 : MaybeRegisterID maybeType = maybePinType(fe);
1145 7957 : MaybeRegisterID maybeData = maybePinData(fe);
1146 :
1147 7957 : if (fe->isTypeKnown()) {
1148 : // If the data is in memory, or in the wrong reg, load/move it.
1149 3232 : if (!maybeData.isSet())
1150 1045 : masm.loadPayload(addressOf(fe), dataReg);
1151 2187 : else if (maybeData.reg() != dataReg)
1152 1317 : masm.move(maybeData.reg(), dataReg);
1153 3232 : masm.move(ImmType(fe->getKnownType()), typeReg);
1154 3232 : return;
1155 : }
1156 :
1157 : // If both halves of the value are in memory, make this easier and load
1158 : // both pieces into their respective registers.
1159 4725 : if (fe->type.inMemory() && fe->data.inMemory()) {
1160 1636 : masm.loadValueAsComponents(addressOf(fe), typeReg, dataReg);
1161 1636 : return;
1162 : }
1163 :
1164 : // Now, we should be guaranteed that at least one part is in a register.
1165 3089 : JS_ASSERT(maybeType.isSet() || maybeData.isSet());
1166 :
1167 : // Make sure we have two registers while making sure not clobber either half.
1168 : // Here we are allowed to mess up the FrameState invariants, because this
1169 : // is specialized code for a path that is about to discard the entire frame.
1170 3089 : if (!maybeType.isSet()) {
1171 277 : JS_ASSERT(maybeData.isSet());
1172 277 : if (maybeData.reg() != typeReg)
1173 251 : maybeType = typeReg;
1174 : else
1175 26 : maybeType = tempReg;
1176 277 : masm.loadTypeTag(addressOf(fe), maybeType.reg());
1177 2812 : } else if (!maybeData.isSet()) {
1178 3 : JS_ASSERT(maybeType.isSet());
1179 3 : if (maybeType.reg() != dataReg)
1180 2 : maybeData = dataReg;
1181 : else
1182 1 : maybeData = tempReg;
1183 3 : masm.loadPayload(addressOf(fe), maybeData.reg());
1184 : }
1185 :
1186 3089 : RegisterID type = maybeType.reg();
1187 3089 : RegisterID data = maybeData.reg();
1188 :
1189 3089 : if (data == typeReg && type == dataReg) {
1190 213 : masm.move(type, tempReg);
1191 213 : masm.move(data, dataReg);
1192 213 : masm.move(tempReg, typeReg);
1193 2876 : } else if (data != dataReg) {
1194 583 : if (type == typeReg) {
1195 251 : masm.move(data, dataReg);
1196 332 : } else if (type != dataReg) {
1197 298 : masm.move(data, dataReg);
1198 298 : if (type != typeReg)
1199 298 : masm.move(type, typeReg);
1200 : } else {
1201 34 : JS_ASSERT(data != typeReg);
1202 34 : masm.move(type, typeReg);
1203 34 : masm.move(data, dataReg);
1204 : }
1205 2293 : } else if (type != typeReg) {
1206 79 : masm.move(type, typeReg);
1207 : }
1208 : }
1209 :
1210 : #ifdef DEBUG
1211 : void
1212 8145898 : FrameState::assertValidRegisterState() const
1213 : {
1214 8145898 : Registers checkedFreeRegs(Registers::AvailAnyRegs);
1215 :
1216 : /* Check that copied and copy info balance out. */
1217 8145898 : int32_t copyCount = 0;
1218 :
1219 425283296 : for (uint32_t i = 0; i < tracker.nentries; i++) {
1220 417137398 : FrameEntry *fe = tracker[i];
1221 417137398 : if (deadEntry(fe))
1222 10378200 : continue;
1223 :
1224 406759198 : JS_ASSERT(i == fe->trackerIndex());
1225 :
1226 406759198 : if (fe->isCopy()) {
1227 1439849 : JS_ASSERT_IF(!fe->copyOf()->temporary, fe > fe->copyOf());
1228 1439849 : JS_ASSERT(fe->trackerIndex() > fe->copyOf()->trackerIndex());
1229 1439849 : JS_ASSERT(!deadEntry(fe->copyOf()));
1230 1439849 : JS_ASSERT(fe->copyOf()->isCopied());
1231 1439849 : JS_ASSERT(!fe->isCopied());
1232 1439849 : copyCount--;
1233 1439849 : continue;
1234 : }
1235 :
1236 405319349 : copyCount += fe->copied;
1237 :
1238 405319349 : if (fe->type.inRegister()) {
1239 3326115 : checkedFreeRegs.takeReg(fe->type.reg());
1240 3326115 : JS_ASSERT(regstate(fe->type.reg()).fe() == fe);
1241 3326115 : JS_ASSERT(!fe->isType(JSVAL_TYPE_DOUBLE));
1242 : }
1243 405319349 : if (fe->data.inRegister()) {
1244 7643149 : checkedFreeRegs.takeReg(fe->data.reg());
1245 7643149 : JS_ASSERT(regstate(fe->data.reg()).fe() == fe);
1246 7643149 : JS_ASSERT(!fe->isType(JSVAL_TYPE_DOUBLE));
1247 : }
1248 405319349 : if (fe->data.inFPRegister()) {
1249 130279 : JS_ASSERT(fe->isType(JSVAL_TYPE_DOUBLE));
1250 130279 : checkedFreeRegs.takeReg(fe->data.fpreg());
1251 130279 : JS_ASSERT(regstate(fe->data.fpreg()).fe() == fe);
1252 : }
1253 : }
1254 :
1255 8145898 : JS_ASSERT(copyCount == 0);
1256 8145898 : JS_ASSERT(checkedFreeRegs == freeRegs);
1257 :
1258 73313082 : for (uint32_t i = 0; i < Registers::TotalRegisters; i++) {
1259 65167184 : AnyRegisterID reg = (RegisterID) i;
1260 65167184 : JS_ASSERT(!regstate(reg).isPinned());
1261 65167184 : JS_ASSERT_IF(regstate(reg).fe(), !freeRegs.hasReg(reg));
1262 65167184 : JS_ASSERT_IF(regstate(reg).fe(), regstate(reg).fe()->isTracked());
1263 : }
1264 :
1265 65167184 : for (uint32_t i = 0; i < Registers::TotalFPRegisters; i++) {
1266 57021286 : AnyRegisterID reg = (FPRegisterID) i;
1267 57021286 : JS_ASSERT(!regstate(reg).isPinned());
1268 57021286 : JS_ASSERT_IF(regstate(reg).fe(), !freeRegs.hasReg(reg));
1269 57021286 : JS_ASSERT_IF(regstate(reg).fe(), regstate(reg).fe()->isTracked());
1270 57021286 : JS_ASSERT_IF(regstate(reg).fe(), regstate(reg).type() == RematInfo::DATA);
1271 : }
1272 8145898 : }
1273 : #endif
1274 :
1275 : #if defined JS_NUNBOX32
1276 : void
1277 75565 : FrameState::syncFancy(Assembler &masm, Registers avail, int trackerIndex) const
1278 : {
1279 75565 : reifier.reset(&masm, avail, a->sp, entries);
1280 :
1281 512547 : for (; trackerIndex >= 0; trackerIndex--) {
1282 436982 : FrameEntry *fe = tracker[trackerIndex];
1283 436982 : if (fe >= a->sp)
1284 11457 : continue;
1285 :
1286 425525 : reifier.sync(fe);
1287 : }
1288 75565 : }
1289 :
1290 : #endif
1291 : void
1292 2107379 : FrameState::sync(Assembler &masm, Uses uses) const
1293 : {
1294 2107379 : if (!entries)
1295 0 : return;
1296 :
1297 : /* Sync all registers up-front. */
1298 2107379 : Registers allRegs(Registers::AvailAnyRegs);
1299 31610685 : while (!allRegs.empty()) {
1300 27395927 : AnyRegisterID reg = allRegs.takeAnyReg();
1301 27395927 : FrameEntry *fe = regstate(reg).usedBy();
1302 27395927 : if (!fe)
1303 22008955 : continue;
1304 :
1305 5386972 : JS_ASSERT(fe->isTracked());
1306 :
1307 : #if defined JS_PUNBOX64
1308 : /* Sync entire FE to prevent loads. */
1309 : ensureFeSynced(fe, masm);
1310 :
1311 : /* Take the other register in the pair, if one exists. */
1312 : if (regstate(reg).type() == RematInfo::DATA && fe->type.inRegister())
1313 : allRegs.takeReg(fe->type.reg());
1314 : else if (regstate(reg).type() == RematInfo::TYPE && fe->data.inRegister())
1315 : allRegs.takeReg(fe->data.reg());
1316 : #elif defined JS_NUNBOX32
1317 : /* Sync register if unsynced. */
1318 5386972 : if (fe->isType(JSVAL_TYPE_DOUBLE)) {
1319 27733 : ensureFeSynced(fe, masm);
1320 5359239 : } else if (regstate(reg).type() == RematInfo::DATA) {
1321 3174394 : JS_ASSERT(fe->data.reg() == reg.reg());
1322 3174394 : ensureDataSynced(fe, masm);
1323 : } else {
1324 2184845 : JS_ASSERT(fe->type.reg() == reg.reg());
1325 2184845 : ensureTypeSynced(fe, masm);
1326 : }
1327 : #endif
1328 : }
1329 :
1330 : /*
1331 : * Keep track of free registers using a bitmask. If we have to drop into
1332 : * syncFancy(), then this mask will help avoid eviction.
1333 : */
1334 2107379 : Registers avail(freeRegs.freeMask & Registers::AvailRegs);
1335 2107379 : Registers temp(Registers::TempAnyRegs);
1336 :
1337 2107379 : unsigned nentries = tracker.nentries;
1338 10025827 : for (int trackerIndex = nentries - 1; trackerIndex >= 0; trackerIndex--) {
1339 7994013 : JS_ASSERT(tracker.nentries == nentries);
1340 7994013 : FrameEntry *fe = tracker[trackerIndex];
1341 7994013 : if (fe >= a->sp)
1342 1918725 : continue;
1343 :
1344 6075288 : if (fe->isType(JSVAL_TYPE_DOUBLE)) {
1345 : /* Copies of in-memory doubles can be synced without spilling. */
1346 40068 : if (fe->isCopy() || !fe->data.inFPRegister())
1347 12689 : ensureFeSynced(fe, masm);
1348 40068 : continue;
1349 : }
1350 :
1351 6035220 : if (!fe->isCopy()) {
1352 5693580 : if (fe->data.inRegister() && !regstate(fe->data.reg()).isPinned())
1353 3010762 : avail.putReg(fe->data.reg());
1354 5693580 : if (fe->type.inRegister() && !regstate(fe->type.reg()).isPinned())
1355 2103295 : avail.putReg(fe->type.reg());
1356 : } else {
1357 341640 : FrameEntry *backing = fe->copyOf();
1358 341640 : JS_ASSERT(!backing->isConstant() && !fe->isConstant());
1359 :
1360 : #if defined JS_PUNBOX64
1361 : if ((!fe->type.synced() && backing->type.inMemory()) ||
1362 : (!fe->data.synced() && backing->data.inMemory())) {
1363 :
1364 : RegisterID syncReg = Registers::ValueReg;
1365 :
1366 : /* Load the entire Value into syncReg. */
1367 : if (backing->type.synced() && backing->data.synced()) {
1368 : masm.loadValue(addressOf(backing), syncReg);
1369 : } else if (backing->type.inMemory()) {
1370 : masm.loadTypeTag(addressOf(backing), syncReg);
1371 : masm.orPtr(backing->data.reg(), syncReg);
1372 : } else {
1373 : JS_ASSERT(backing->data.inMemory());
1374 : masm.loadPayload(addressOf(backing), syncReg);
1375 : if (backing->isTypeKnown())
1376 : masm.orPtr(ImmType(backing->getKnownType()), syncReg);
1377 : else
1378 : masm.orPtr(backing->type.reg(), syncReg);
1379 : }
1380 :
1381 : masm.storeValue(syncReg, addressOf(fe));
1382 : continue;
1383 : }
1384 : #elif defined JS_NUNBOX32
1385 : /* Fall back to a slower sync algorithm if load required. */
1386 960424 : if ((!fe->type.synced() && backing->type.inMemory()) ||
1387 618784 : (!fe->data.synced() && backing->data.inMemory())) {
1388 75565 : syncFancy(masm, avail, trackerIndex);
1389 75565 : return;
1390 : }
1391 : #endif
1392 : }
1393 :
1394 5959655 : bool copy = fe->isCopy();
1395 :
1396 : /* If a part still needs syncing, it is either a copy or constant. */
1397 : #if defined JS_PUNBOX64
1398 : /* All register-backed FEs have been entirely synced up-front. */
1399 : if (copy || (!fe->type.inRegister() && !fe->data.inRegister()))
1400 : ensureFeSynced(fe, masm);
1401 : #elif defined JS_NUNBOX32
1402 : /* All components held in registers have been already synced. */
1403 5959655 : if (copy || !fe->data.inRegister())
1404 2869668 : ensureDataSynced(fe, masm);
1405 5959655 : if (copy || !fe->type.inRegister())
1406 3836483 : ensureTypeSynced(fe, masm);
1407 : #endif
1408 : }
1409 : }
1410 :
1411 : void
1412 896614 : FrameState::syncAndKill(Registers kill, Uses uses, Uses ignore)
1413 : {
1414 896614 : if (loop) {
1415 : /*
1416 : * Drop any remaining loop registers so we don't do any more after-the-fact
1417 : * allocation of the initial register state.
1418 : */
1419 117050 : loop->clearLoopRegisters();
1420 : }
1421 :
1422 : /* Sync all kill-registers up-front. */
1423 896614 : Registers search(kill.freeMask & ~freeRegs.freeMask);
1424 2521606 : while (!search.empty()) {
1425 728378 : AnyRegisterID reg = search.takeAnyReg();
1426 728378 : FrameEntry *fe = regstate(reg).usedBy();
1427 728378 : if (!fe || deadEntry(fe, ignore.nuses))
1428 98229 : continue;
1429 :
1430 630149 : JS_ASSERT(fe->isTracked());
1431 :
1432 : #if defined JS_PUNBOX64
1433 : /* Don't use syncFe(), since that may clobber more registers. */
1434 : ensureFeSynced(fe, masm);
1435 :
1436 : if (!fe->type.synced())
1437 : fe->type.sync();
1438 : if (!fe->data.synced())
1439 : fe->data.sync();
1440 :
1441 : /* Take the other register in the pair, if one exists. */
1442 : if (regstate(reg).type() == RematInfo::DATA) {
1443 : if (!fe->isType(JSVAL_TYPE_DOUBLE)) {
1444 : JS_ASSERT(fe->data.reg() == reg.reg());
1445 : if (fe->type.inRegister() && search.hasReg(fe->type.reg()))
1446 : search.takeReg(fe->type.reg());
1447 : }
1448 : } else {
1449 : JS_ASSERT(fe->type.reg() == reg.reg());
1450 : if (fe->data.inRegister() && search.hasReg(fe->data.reg()))
1451 : search.takeReg(fe->data.reg());
1452 : }
1453 : #elif defined JS_NUNBOX32
1454 : /* Sync this register. */
1455 630149 : if (fe->isType(JSVAL_TYPE_DOUBLE)) {
1456 7084 : syncFe(fe);
1457 623065 : } else if (regstate(reg).type() == RematInfo::DATA) {
1458 401674 : JS_ASSERT(fe->data.reg() == reg.reg());
1459 401674 : syncData(fe);
1460 : } else {
1461 221391 : JS_ASSERT(fe->type.reg() == reg.reg());
1462 221391 : syncType(fe);
1463 : }
1464 : #endif
1465 : }
1466 :
1467 :
1468 896614 : unsigned nentries = tracker.nentries;
1469 4357107 : for (int trackerIndex = nentries - 1; trackerIndex >= 0; trackerIndex--) {
1470 3460493 : JS_ASSERT(tracker.nentries == nentries);
1471 3460493 : FrameEntry *fe = tracker[trackerIndex];
1472 :
1473 3460493 : if (fe >= a->sp || deadEntry(fe, ignore.nuses))
1474 888575 : continue;
1475 :
1476 2571918 : syncFe(fe);
1477 :
1478 2571918 : if (fe->isCopy())
1479 87894 : continue;
1480 :
1481 : /* Forget registers. */
1482 2484024 : if (fe->data.inRegister() && !regstate(fe->data.reg()).isPinned()) {
1483 408038 : forgetReg(fe->data.reg());
1484 408038 : fe->data.setMemory();
1485 : }
1486 2484024 : if (fe->data.inFPRegister() && !regstate(fe->data.fpreg()).isPinned()) {
1487 5800 : forgetReg(fe->data.fpreg());
1488 5800 : fe->data.setMemory();
1489 : }
1490 2484024 : if (fe->type.inRegister() && !regstate(fe->type.reg()).isPinned()) {
1491 234407 : forgetReg(fe->type.reg());
1492 234407 : fe->type.setMemory();
1493 : }
1494 : }
1495 :
1496 : /*
1497 : * Anything still alive at this point is guaranteed to be synced. However,
1498 : * it is necessary to evict temporary registers.
1499 : */
1500 896614 : search = Registers(kill.freeMask & ~freeRegs.freeMask);
1501 2047499 : while (!search.empty()) {
1502 254271 : AnyRegisterID reg = search.takeAnyReg();
1503 254271 : FrameEntry *fe = regstate(reg).usedBy();
1504 254271 : if (!fe || deadEntry(fe, ignore.nuses))
1505 98229 : continue;
1506 :
1507 156042 : JS_ASSERT(fe->isTracked());
1508 :
1509 156042 : if (regstate(reg).type() == RematInfo::DATA) {
1510 100152 : JS_ASSERT_IF(reg.isFPReg(), fe->data.fpreg() == reg.fpreg());
1511 100152 : JS_ASSERT_IF(!reg.isFPReg(), fe->data.reg() == reg.reg());
1512 100152 : JS_ASSERT(fe->data.synced());
1513 100152 : fe->data.setMemory();
1514 : } else {
1515 55890 : JS_ASSERT(fe->type.reg() == reg.reg());
1516 55890 : JS_ASSERT(fe->type.synced());
1517 55890 : fe->type.setMemory();
1518 : }
1519 :
1520 156042 : forgetReg(reg);
1521 : }
1522 896614 : }
1523 :
1524 : void
1525 1605194 : FrameState::merge(Assembler &masm, Changes changes) const
1526 : {
1527 : /*
1528 : * Note: this should only be called by StubCompiler::rejoin, which will notify
1529 : * this FrameState about the jump to patch up in case a new loop register is
1530 : * allocated later.
1531 : */
1532 :
1533 : /*
1534 : * For any changed values we are merging back which we consider to be doubles,
1535 : * ensure they actually are doubles. They must be doubles or ints, but we
1536 : * do not require stub paths to always generate a double when needed.
1537 : * :FIXME: we check this on OOL stub calls, but not inline stub calls.
1538 : */
1539 1605194 : if (cx->typeInferenceEnabled()) {
1540 1088678 : for (unsigned i = 0; i < changes.nchanges; i++) {
1541 380915 : FrameEntry *fe = a->sp - 1 - i;
1542 380915 : if (fe->isTracked() && fe->isType(JSVAL_TYPE_DOUBLE))
1543 19458 : masm.ensureInMemoryDouble(addressOf(fe));
1544 : }
1545 : }
1546 :
1547 1605194 : uint32_t mask = Registers::AvailAnyRegs & ~freeRegs.freeMask;
1548 1605194 : Registers search(mask);
1549 :
1550 5701386 : while (!search.empty(mask)) {
1551 2490998 : AnyRegisterID reg = search.peekReg(mask);
1552 2490998 : FrameEntry *fe = regstate(reg).usedBy();
1553 :
1554 2490998 : if (!fe) {
1555 7306 : search.takeReg(reg);
1556 7306 : continue;
1557 : }
1558 :
1559 2483692 : if (fe->isType(JSVAL_TYPE_DOUBLE)) {
1560 41997 : JS_ASSERT(fe->data.fpreg() == reg.fpreg());
1561 41997 : search.takeReg(fe->data.fpreg());
1562 41997 : masm.loadDouble(addressOf(fe), fe->data.fpreg());
1563 2441695 : } else if (fe->data.inRegister() && fe->type.inRegister()) {
1564 1081765 : search.takeReg(fe->data.reg());
1565 1081765 : search.takeReg(fe->type.reg());
1566 1081765 : masm.loadValueAsComponents(addressOf(fe), fe->type.reg(), fe->data.reg());
1567 : } else {
1568 1359930 : if (fe->data.inRegister()) {
1569 1329271 : search.takeReg(fe->data.reg());
1570 1329271 : masm.loadPayload(addressOf(fe), fe->data.reg());
1571 : }
1572 1359930 : if (fe->type.inRegister()) {
1573 30659 : search.takeReg(fe->type.reg());
1574 30659 : masm.loadTypeTag(addressOf(fe), fe->type.reg());
1575 : }
1576 : }
1577 : }
1578 1605194 : }
1579 :
1580 : JSC::MacroAssembler::RegisterID
1581 187127 : FrameState::copyDataIntoReg(FrameEntry *fe)
1582 : {
1583 187127 : return copyDataIntoReg(this->masm, fe);
1584 : }
1585 :
1586 : void
1587 1000 : FrameState::copyDataIntoReg(FrameEntry *fe, RegisterID hint)
1588 : {
1589 1000 : JS_ASSERT(!fe->isConstant());
1590 1000 : JS_ASSERT(!fe->isType(JSVAL_TYPE_DOUBLE));
1591 :
1592 1000 : if (fe->isCopy())
1593 325 : fe = fe->copyOf();
1594 :
1595 1000 : if (!fe->data.inRegister())
1596 245 : tempRegForData(fe);
1597 :
1598 1000 : RegisterID reg = fe->data.reg();
1599 1000 : if (reg == hint) {
1600 85 : if (freeRegs.empty(Registers::AvailRegs)) {
1601 71 : ensureDataSynced(fe, masm);
1602 71 : fe->data.setMemory();
1603 : } else {
1604 14 : reg = allocReg();
1605 14 : masm.move(hint, reg);
1606 14 : fe->data.setRegister(reg);
1607 14 : regstate(reg).associate(regstate(hint).fe(), RematInfo::DATA);
1608 : }
1609 85 : regstate(hint).forget();
1610 : } else {
1611 915 : pinReg(reg);
1612 915 : takeReg(hint);
1613 915 : unpinReg(reg);
1614 915 : masm.move(reg, hint);
1615 : }
1616 :
1617 1000 : modifyReg(hint);
1618 1000 : }
1619 :
1620 : JSC::MacroAssembler::RegisterID
1621 189493 : FrameState::copyDataIntoReg(Assembler &masm, FrameEntry *fe)
1622 : {
1623 189493 : JS_ASSERT(!fe->isConstant());
1624 :
1625 189493 : if (fe->isCopy())
1626 67038 : fe = fe->copyOf();
1627 :
1628 189493 : if (fe->data.inRegister()) {
1629 142585 : RegisterID reg = fe->data.reg();
1630 142585 : if (freeRegs.empty(Registers::AvailRegs)) {
1631 12316 : ensureDataSynced(fe, masm);
1632 12316 : fe->data.setMemory();
1633 12316 : regstate(reg).forget();
1634 12316 : modifyReg(reg);
1635 : } else {
1636 130269 : RegisterID newReg = allocReg();
1637 130269 : masm.move(reg, newReg);
1638 130269 : reg = newReg;
1639 : }
1640 142585 : return reg;
1641 : }
1642 :
1643 46908 : RegisterID reg = allocReg();
1644 :
1645 46908 : if (!freeRegs.empty(Registers::AvailRegs))
1646 41294 : masm.move(tempRegForData(fe), reg);
1647 : else
1648 5614 : masm.loadPayload(addressOf(fe),reg);
1649 :
1650 46908 : return reg;
1651 : }
1652 :
1653 : JSC::MacroAssembler::RegisterID
1654 8505 : FrameState::copyTypeIntoReg(FrameEntry *fe)
1655 : {
1656 8505 : if (fe->isCopy())
1657 2035 : fe = fe->copyOf();
1658 :
1659 8505 : JS_ASSERT(!fe->type.isConstant());
1660 :
1661 8505 : if (fe->type.inRegister()) {
1662 6019 : RegisterID reg = fe->type.reg();
1663 6019 : if (freeRegs.empty(Registers::AvailRegs)) {
1664 4062 : ensureTypeSynced(fe, masm);
1665 4062 : fe->type.setMemory();
1666 4062 : regstate(reg).forget();
1667 4062 : modifyReg(reg);
1668 : } else {
1669 1957 : RegisterID newReg = allocReg();
1670 1957 : masm.move(reg, newReg);
1671 1957 : reg = newReg;
1672 : }
1673 6019 : return reg;
1674 : }
1675 :
1676 2486 : RegisterID reg = allocReg();
1677 :
1678 2486 : if (!freeRegs.empty(Registers::AvailRegs))
1679 872 : masm.move(tempRegForType(fe), reg);
1680 : else
1681 1614 : masm.loadTypeTag(addressOf(fe), reg);
1682 :
1683 2486 : return reg;
1684 : }
1685 :
1686 : JSC::MacroAssembler::RegisterID
1687 0 : FrameState::copyInt32ConstantIntoReg(FrameEntry *fe)
1688 : {
1689 0 : return copyInt32ConstantIntoReg(masm, fe);
1690 : }
1691 :
1692 : JSC::MacroAssembler::RegisterID
1693 0 : FrameState::copyInt32ConstantIntoReg(Assembler &masm, FrameEntry *fe)
1694 : {
1695 0 : JS_ASSERT(fe->data.isConstant());
1696 :
1697 0 : if (fe->isCopy())
1698 0 : fe = fe->copyOf();
1699 :
1700 0 : RegisterID reg = allocReg();
1701 0 : masm.move(Imm32(fe->getValue().toInt32()), reg);
1702 0 : return reg;
1703 : }
1704 :
1705 : JSC::MacroAssembler::RegisterID
1706 647 : FrameState::ownRegForType(FrameEntry *fe)
1707 : {
1708 647 : JS_ASSERT(!fe->isTypeKnown());
1709 :
1710 : RegisterID reg;
1711 647 : if (fe->isCopy()) {
1712 : /* For now, just do an extra move. The reg must be mutable. */
1713 195 : FrameEntry *backing = fe->copyOf();
1714 195 : if (!backing->type.inRegister()) {
1715 107 : JS_ASSERT(backing->type.inMemory());
1716 107 : tempRegForType(backing);
1717 : }
1718 :
1719 195 : if (freeRegs.empty(Registers::AvailRegs)) {
1720 : /* For now... just steal the register that already exists. */
1721 9 : ensureTypeSynced(backing, masm);
1722 9 : reg = backing->type.reg();
1723 9 : backing->type.setMemory();
1724 9 : regstate(reg).forget();
1725 9 : modifyReg(reg);
1726 : } else {
1727 186 : reg = allocReg();
1728 186 : masm.move(backing->type.reg(), reg);
1729 : }
1730 195 : return reg;
1731 : }
1732 :
1733 452 : if (fe->type.inRegister()) {
1734 450 : reg = fe->type.reg();
1735 :
1736 : /* Remove ownership of this register. */
1737 450 : JS_ASSERT(regstate(reg).fe() == fe);
1738 450 : JS_ASSERT(regstate(reg).type() == RematInfo::TYPE);
1739 450 : regstate(reg).forget();
1740 450 : fe->type.setMemory();
1741 450 : modifyReg(reg);
1742 : } else {
1743 2 : JS_ASSERT(fe->type.inMemory());
1744 2 : reg = allocReg();
1745 2 : masm.loadTypeTag(addressOf(fe), reg);
1746 : }
1747 452 : return reg;
1748 : }
1749 :
1750 : JSC::MacroAssembler::RegisterID
1751 46433 : FrameState::ownRegForData(FrameEntry *fe)
1752 : {
1753 46433 : JS_ASSERT(!fe->isConstant());
1754 46433 : JS_ASSERT(!fe->isType(JSVAL_TYPE_DOUBLE));
1755 :
1756 : RegisterID reg;
1757 46433 : if (fe->isCopy()) {
1758 : /* For now, just do an extra move. The reg must be mutable. */
1759 3921 : FrameEntry *backing = fe->copyOf();
1760 3921 : if (!backing->data.inRegister()) {
1761 1412 : JS_ASSERT(backing->data.inMemory());
1762 1412 : tempRegForData(backing);
1763 : }
1764 :
1765 3921 : if (freeRegs.empty(Registers::AvailRegs)) {
1766 : /* For now... just steal the register that already exists. */
1767 1382 : ensureDataSynced(backing, masm);
1768 1382 : reg = backing->data.reg();
1769 1382 : backing->data.setMemory();
1770 1382 : regstate(reg).forget();
1771 1382 : modifyReg(reg);
1772 : } else {
1773 2539 : reg = allocReg();
1774 2539 : masm.move(backing->data.reg(), reg);
1775 : }
1776 3921 : return reg;
1777 : }
1778 :
1779 42512 : if (fe->isCopied())
1780 0 : uncopy(fe);
1781 :
1782 42512 : if (fe->data.inRegister()) {
1783 42124 : reg = fe->data.reg();
1784 : /* Remove ownership of this register. */
1785 42124 : JS_ASSERT(regstate(reg).fe() == fe);
1786 42124 : JS_ASSERT(regstate(reg).type() == RematInfo::DATA);
1787 42124 : regstate(reg).forget();
1788 42124 : fe->data.setMemory();
1789 42124 : modifyReg(reg);
1790 : } else {
1791 388 : JS_ASSERT(fe->data.inMemory());
1792 388 : reg = allocReg();
1793 388 : masm.loadPayload(addressOf(fe), reg);
1794 : }
1795 42512 : return reg;
1796 : }
1797 :
1798 : void
1799 16843 : FrameState::discardFe(FrameEntry *fe)
1800 : {
1801 16843 : forgetEntry(fe);
1802 16843 : fe->type.setMemory();
1803 16843 : fe->data.setMemory();
1804 16843 : fe->clear();
1805 16843 : }
1806 :
1807 : void
1808 8364 : FrameState::pushDouble(FPRegisterID fpreg)
1809 : {
1810 8364 : FrameEntry *fe = rawPush();
1811 8364 : fe->resetUnsynced();
1812 8364 : fe->setType(JSVAL_TYPE_DOUBLE);
1813 8364 : fe->data.setFPRegister(fpreg);
1814 8364 : regstate(fpreg).associate(fe, RematInfo::DATA);
1815 8364 : }
1816 :
1817 : void
1818 0 : FrameState::pushDouble(Address address)
1819 : {
1820 0 : FPRegisterID fpreg = allocFPReg();
1821 0 : masm.loadDouble(address, fpreg);
1822 0 : pushDouble(fpreg);
1823 0 : }
1824 :
1825 : void
1826 1498 : FrameState::ensureDouble(FrameEntry *fe)
1827 : {
1828 1498 : if (fe->isType(JSVAL_TYPE_DOUBLE))
1829 1054 : return;
1830 :
1831 444 : if (fe->isConstant()) {
1832 89 : JS_ASSERT(fe->getValue().isInt32());
1833 89 : Value newValue = DoubleValue(double(fe->getValue().toInt32()));
1834 89 : fe->setConstant(newValue);
1835 89 : return;
1836 : }
1837 :
1838 355 : FrameEntry *backing = fe;
1839 355 : if (fe->isCopy()) {
1840 : /* Forget this entry is a copy. We are converting this entry, not the backing. */
1841 12 : backing = fe->copyOf();
1842 12 : fe->clear();
1843 343 : } else if (fe->isCopied()) {
1844 : /* Sync and forget any copies of this entry. */
1845 36 : for (uint32_t i = fe->trackerIndex() + 1; i < tracker.nentries; i++) {
1846 20 : FrameEntry *nfe = tracker[i];
1847 20 : if (!deadEntry(nfe) && nfe->isCopy() && nfe->copyOf() == fe) {
1848 16 : syncFe(nfe);
1849 16 : nfe->resetSynced();
1850 : }
1851 : }
1852 : }
1853 :
1854 355 : FPRegisterID fpreg = allocFPReg();
1855 :
1856 355 : if (backing->isType(JSVAL_TYPE_INT32)) {
1857 26 : RegisterID data = tempRegForData(backing);
1858 26 : masm.convertInt32ToDouble(data, fpreg);
1859 : } else {
1860 329 : syncFe(backing);
1861 329 : masm.moveInt32OrDouble(addressOf(backing), fpreg);
1862 : }
1863 :
1864 355 : if (fe == backing)
1865 343 : forgetAllRegs(fe);
1866 355 : fe->resetUnsynced();
1867 355 : fe->setType(JSVAL_TYPE_DOUBLE);
1868 355 : fe->data.setFPRegister(fpreg);
1869 355 : regstate(fpreg).associate(fe, RematInfo::DATA);
1870 :
1871 355 : fe->data.unsync();
1872 355 : fe->type.unsync();
1873 : }
1874 :
1875 : void
1876 4 : FrameState::ensureInteger(FrameEntry *fe)
1877 : {
1878 : /*
1879 : * This method is used to revert a previous ensureDouble call made for a
1880 : * branch. The entry is definitely a double, and has had no copies made.
1881 : */
1882 :
1883 4 : if (fe->isConstant()) {
1884 1 : Value newValue = Int32Value(int32_t(fe->getValue().toDouble()));
1885 1 : fe->setConstant(newValue);
1886 1 : return;
1887 : }
1888 :
1889 3 : JS_ASSERT(!fe->isCopy() && !fe->isCopied());
1890 3 : JS_ASSERT_IF(fe->isTypeKnown(), fe->isType(JSVAL_TYPE_DOUBLE));
1891 :
1892 3 : if (!fe->isType(JSVAL_TYPE_DOUBLE)) {
1893 : /*
1894 : * A normal register may have been allocated after calling
1895 : * syncAndForgetEverything.
1896 : */
1897 0 : if (fe->data.inRegister()) {
1898 0 : syncFe(fe);
1899 0 : forgetReg(fe->data.reg());
1900 0 : fe->data.setMemory();
1901 : }
1902 0 : learnType(fe, JSVAL_TYPE_DOUBLE, false);
1903 : }
1904 :
1905 3 : RegisterID reg = allocReg();
1906 3 : FPRegisterID fpreg = tempFPRegForData(fe);
1907 3 : Jump j = masm.branchTruncateDoubleToInt32(fpreg, reg);
1908 3 : j.linkTo(masm.label(), &masm);
1909 :
1910 3 : forgetAllRegs(fe);
1911 3 : fe->resetUnsynced();
1912 3 : fe->setType(JSVAL_TYPE_INT32);
1913 3 : fe->data.setRegister(reg);
1914 3 : regstate(reg).associate(fe, RematInfo::DATA);
1915 :
1916 3 : fe->data.unsync();
1917 3 : fe->type.unsync();
1918 : }
1919 :
1920 : void
1921 0 : FrameState::ensureInMemoryDoubles(Assembler &masm)
1922 : {
1923 0 : JS_ASSERT(!a->parent);
1924 0 : for (uint32_t i = 0; i < tracker.nentries; i++) {
1925 0 : FrameEntry *fe = tracker[i];
1926 0 : if (!deadEntry(fe) && fe->isType(JSVAL_TYPE_DOUBLE) &&
1927 0 : !fe->isCopy() && !fe->isConstant()) {
1928 0 : masm.ensureInMemoryDouble(addressOf(fe));
1929 : }
1930 : }
1931 0 : }
1932 :
1933 : void
1934 511040 : FrameState::pushCopyOf(FrameEntry *backing)
1935 : {
1936 511040 : JS_ASSERT(backing->isTracked());
1937 511040 : FrameEntry *fe = rawPush();
1938 511040 : fe->resetUnsynced();
1939 511040 : if (backing->isConstant()) {
1940 82426 : fe->setConstant(backing->getValue());
1941 : } else {
1942 428614 : if (backing->isCopy())
1943 28969 : backing = backing->copyOf();
1944 428614 : fe->setCopyOf(backing);
1945 :
1946 : /* Maintain tracker ordering guarantees for copies. */
1947 428614 : JS_ASSERT(backing->isCopied());
1948 428614 : if (fe->trackerIndex() < backing->trackerIndex())
1949 46410 : swapInTracker(fe, backing);
1950 : }
1951 511040 : }
1952 :
1953 : FrameEntry *
1954 50 : FrameState::walkTrackerForUncopy(FrameEntry *original)
1955 : {
1956 50 : uint32_t firstCopy = InvalidIndex;
1957 50 : FrameEntry *bestFe = NULL;
1958 50 : uint32_t ncopies = 0;
1959 204 : for (uint32_t i = original->trackerIndex() + 1; i < tracker.nentries; i++) {
1960 154 : FrameEntry *fe = tracker[i];
1961 154 : if (deadEntry(fe))
1962 15 : continue;
1963 139 : if (fe->isCopy() && fe->copyOf() == original) {
1964 50 : if (firstCopy == InvalidIndex) {
1965 50 : firstCopy = i;
1966 50 : bestFe = fe;
1967 0 : } else if (fe < bestFe) {
1968 0 : bestFe = fe;
1969 : }
1970 50 : ncopies++;
1971 : }
1972 : }
1973 :
1974 50 : if (!ncopies) {
1975 0 : JS_ASSERT(firstCopy == InvalidIndex);
1976 0 : JS_ASSERT(!bestFe);
1977 0 : return NULL;
1978 : }
1979 :
1980 50 : JS_ASSERT(firstCopy != InvalidIndex);
1981 50 : JS_ASSERT(bestFe);
1982 50 : JS_ASSERT_IF(!isTemporary(original), bestFe > original);
1983 :
1984 : /* Mark all extra copies as copies of the new backing index. */
1985 50 : bestFe->setCopyOf(NULL);
1986 50 : if (ncopies > 1) {
1987 0 : for (uint32_t i = firstCopy; i < tracker.nentries; i++) {
1988 0 : FrameEntry *other = tracker[i];
1989 0 : if (deadEntry(other) || other == bestFe)
1990 0 : continue;
1991 :
1992 : /* The original must be tracked before copies. */
1993 0 : JS_ASSERT(other != original);
1994 :
1995 0 : if (!other->isCopy() || other->copyOf() != original)
1996 0 : continue;
1997 :
1998 0 : other->setCopyOf(bestFe);
1999 :
2000 : /*
2001 : * This is safe even though we're mutating during iteration. There
2002 : * are two cases. The first is that both indexes are <= i, and :.
2003 : * will never be observed. The other case is we're placing the
2004 : * other FE such that it will be observed later. Luckily, copyOf()
2005 : * will return != original, so nothing will happen.
2006 : */
2007 0 : if (other->trackerIndex() < bestFe->trackerIndex())
2008 0 : swapInTracker(bestFe, other);
2009 : }
2010 : }
2011 :
2012 50 : return bestFe;
2013 : }
2014 :
2015 : FrameEntry *
2016 57566 : FrameState::walkFrameForUncopy(FrameEntry *original)
2017 : {
2018 57566 : FrameEntry *bestFe = NULL;
2019 57566 : uint32_t ncopies = 0;
2020 :
2021 : /* It's only necessary to visit as many FEs are being tracked. */
2022 57566 : uint32_t maxvisits = tracker.nentries;
2023 :
2024 233196 : for (FrameEntry *fe = original + 1; fe < a->sp && maxvisits; fe++) {
2025 175630 : if (!fe->isTracked())
2026 749 : continue;
2027 :
2028 174881 : maxvisits--;
2029 :
2030 174881 : if (fe->isCopy() && fe->copyOf() == original) {
2031 57569 : if (!bestFe) {
2032 57566 : bestFe = fe;
2033 57566 : bestFe->setCopyOf(NULL);
2034 : } else {
2035 3 : fe->setCopyOf(bestFe);
2036 3 : if (fe->trackerIndex() < bestFe->trackerIndex())
2037 0 : swapInTracker(bestFe, fe);
2038 : }
2039 57569 : ncopies++;
2040 : }
2041 : }
2042 :
2043 57566 : return bestFe;
2044 : }
2045 :
2046 : FrameEntry *
2047 57616 : FrameState::uncopy(FrameEntry *original)
2048 : {
2049 57616 : JS_ASSERT(original->isCopied());
2050 :
2051 : /*
2052 : * Copies have three critical invariants:
2053 : * 1) The backing store precedes all copies in the tracker.
2054 : * 2) The backing store precedes all copies in the FrameState.
2055 : * 3) The backing store of a copy cannot be popped from the stack
2056 : * while the copy is still live.
2057 : *
2058 : * Maintaining this invariant iteratively is kind of hard, so we choose
2059 : * the "lowest" copy in the frame up-front.
2060 : *
2061 : * For example, if the stack is:
2062 : * [A, B, C, D]
2063 : * And the tracker has:
2064 : * [A, D, C, B]
2065 : *
2066 : * If B, C, and D are copies of A - we will walk the tracker to the end
2067 : * and select B, not D (see bug 583684).
2068 : *
2069 : * Note: |tracker.nentries <= (nslots + nargs)|. However, this walk is
2070 : * sub-optimal if |tracker.nentries - original->trackerIndex() > sp - original|.
2071 : * With large scripts this may be a problem worth investigating. Note that
2072 : * the tracker is walked twice, so we multiply by 2 for pessimism.
2073 : */
2074 : FrameEntry *fe;
2075 57616 : if ((tracker.nentries - original->trackerIndex()) * 2 > uint32_t(a->sp - original))
2076 57566 : fe = walkFrameForUncopy(original);
2077 : else
2078 50 : fe = walkTrackerForUncopy(original);
2079 57616 : JS_ASSERT(fe);
2080 :
2081 : /*
2082 : * Switch the new backing store to the old backing store. During
2083 : * this process we also necessarily make sure the copy can be
2084 : * synced.
2085 : */
2086 57616 : if (!original->isTypeKnown()) {
2087 : /*
2088 : * If the copy is unsynced, and the original is in memory,
2089 : * give the original a register. We do this below too; it's
2090 : * okay if it's spilled.
2091 : */
2092 32019 : if (original->type.inMemory() && !fe->type.synced())
2093 161 : tempRegForType(original);
2094 32019 : fe->type.inherit(original->type);
2095 32019 : if (fe->type.inRegister())
2096 32007 : regstate(fe->type.reg()).reassociate(fe);
2097 : } else {
2098 25597 : fe->setType(original->getKnownType());
2099 : }
2100 57616 : if (original->isType(JSVAL_TYPE_DOUBLE)) {
2101 100 : if (original->data.inMemory() && !fe->data.synced())
2102 0 : tempFPRegForData(original);
2103 100 : fe->data.inherit(original->data);
2104 100 : if (fe->data.inFPRegister())
2105 100 : regstate(fe->data.fpreg()).reassociate(fe);
2106 : } else {
2107 57516 : if (fe->type.inRegister())
2108 32007 : pinReg(fe->type.reg());
2109 57516 : if (original->data.inMemory() && !fe->data.synced())
2110 2793 : tempRegForData(original);
2111 57516 : if (fe->type.inRegister())
2112 32007 : unpinReg(fe->type.reg());
2113 57516 : fe->data.inherit(original->data);
2114 57516 : if (fe->data.inRegister())
2115 57479 : regstate(fe->data.reg()).reassociate(fe);
2116 : }
2117 :
2118 57616 : return fe;
2119 : }
2120 :
2121 : bool
2122 11961 : FrameState::hasOnlyCopy(FrameEntry *backing, FrameEntry *fe)
2123 : {
2124 11961 : JS_ASSERT(backing->isCopied() && fe->copyOf() == backing);
2125 :
2126 66348 : for (uint32_t i = backing->trackerIndex() + 1; i < tracker.nentries; i++) {
2127 54752 : FrameEntry *nfe = tracker[i];
2128 54752 : if (nfe != fe && !deadEntry(nfe) && nfe->isCopy() && nfe->copyOf() == backing)
2129 365 : return false;
2130 : }
2131 :
2132 11596 : return true;
2133 : }
2134 :
2135 : void
2136 12302 : FrameState::separateBinaryEntries(FrameEntry *lhs, FrameEntry *rhs)
2137 : {
2138 12302 : JS_ASSERT(lhs == a->sp - 2 && rhs == a->sp - 1);
2139 12302 : if (rhs->isCopy() && rhs->copyOf() == lhs) {
2140 0 : syncAndForgetFe(rhs);
2141 0 : syncAndForgetFe(lhs);
2142 0 : uncopy(lhs);
2143 : }
2144 12302 : }
2145 :
2146 : void
2147 198605 : FrameState::storeLocal(uint32_t n, bool popGuaranteed)
2148 : {
2149 198605 : FrameEntry *local = getLocal(n);
2150 :
2151 198605 : if (a->analysis->slotEscapes(entrySlot(local))) {
2152 107882 : JS_ASSERT(local->data.inMemory());
2153 107882 : storeTo(peek(-1), addressOf(local), popGuaranteed);
2154 107882 : return;
2155 : }
2156 :
2157 90723 : storeTop(local);
2158 :
2159 90723 : if (loop)
2160 18571 : local->lastLoop = loop->headOffset();
2161 :
2162 90723 : if (inTryBlock)
2163 482 : syncFe(local);
2164 : }
2165 :
2166 : void
2167 2299 : FrameState::storeArg(uint32_t n, bool popGuaranteed)
2168 : {
2169 : // Note that args are always immediately synced, because they can be
2170 : // aliased (but not written to) via f.arguments.
2171 2299 : FrameEntry *arg = getArg(n);
2172 :
2173 2299 : if (a->analysis->slotEscapes(entrySlot(arg))) {
2174 1333 : JS_ASSERT(arg->data.inMemory());
2175 1333 : storeTo(peek(-1), addressOf(arg), popGuaranteed);
2176 1333 : return;
2177 : }
2178 :
2179 966 : storeTop(arg);
2180 :
2181 966 : if (loop)
2182 373 : arg->lastLoop = loop->headOffset();
2183 :
2184 966 : syncFe(arg);
2185 : }
2186 :
2187 : void
2188 1390559 : FrameState::forgetEntry(FrameEntry *fe)
2189 : {
2190 1390559 : if (fe->isCopied()) {
2191 57614 : uncopy(fe);
2192 57614 : fe->resetUnsynced();
2193 : } else {
2194 1332945 : forgetAllRegs(fe);
2195 : }
2196 :
2197 1390559 : extraArray[fe - entries].reset();
2198 1390559 : }
2199 :
2200 : void
2201 408446 : FrameState::storeTop(FrameEntry *target)
2202 : {
2203 408446 : JS_ASSERT(!isTemporary(target));
2204 :
2205 : /* Detect something like (x = x) which is a no-op. */
2206 408446 : FrameEntry *top = peek(-1);
2207 408446 : if (top->isCopy() && top->copyOf() == target) {
2208 8 : JS_ASSERT(target->isCopied());
2209 8 : return;
2210 : }
2211 :
2212 : /*
2213 : * If this is overwriting a known non-double type with another value of the
2214 : * same type, then make sure we keep the type marked as synced after doing
2215 : * the copy.
2216 : */
2217 408438 : bool wasSynced = target->type.synced();
2218 408438 : JSValueType oldType = target->isTypeKnown() ? target->getKnownType() : JSVAL_TYPE_UNKNOWN;
2219 408438 : bool trySyncType = wasSynced && oldType != JSVAL_TYPE_UNKNOWN && oldType != JSVAL_TYPE_DOUBLE;
2220 :
2221 : /* Completely invalidate the local variable. */
2222 408438 : forgetEntry(target);
2223 408438 : target->resetUnsynced();
2224 :
2225 : /* Constants are easy to propagate. */
2226 408438 : if (top->isConstant()) {
2227 126255 : target->clear();
2228 126255 : target->setConstant(top->getValue());
2229 126255 : if (trySyncType && target->isType(oldType))
2230 119 : target->type.sync();
2231 126255 : return;
2232 : }
2233 :
2234 : /*
2235 : * When dealing with copies, there are three important invariants:
2236 : *
2237 : * 1) The backing store precedes all copies in the tracker.
2238 : * 2) The backing store precedes all copies in the FrameState.
2239 : * 2) The backing store of a local is never a stack slot, UNLESS the local
2240 : * variable itself is a stack slot (blocks) that precedes the stack
2241 : * slot.
2242 : *
2243 : * If the top is a copy, and the second condition holds true, the local
2244 : * can be rewritten as a copy of the original backing slot. If the first
2245 : * condition does not hold, force it to hold by swapping in-place.
2246 : */
2247 282183 : FrameEntry *backing = top;
2248 282183 : if (top->isCopy()) {
2249 92428 : backing = top->copyOf();
2250 92428 : JS_ASSERT(backing->trackerIndex() < top->trackerIndex());
2251 :
2252 92428 : if (backing < target || isTemporary(backing)) {
2253 : /* local.idx < backing.idx means local cannot be a copy yet */
2254 29883 : if (target->trackerIndex() < backing->trackerIndex())
2255 1543 : swapInTracker(backing, target);
2256 29883 : target->setCopyOf(backing);
2257 29883 : if (trySyncType && target->isType(oldType))
2258 37 : target->type.sync();
2259 29883 : return;
2260 : }
2261 :
2262 : /*
2263 : * If control flow lands here, then there was a bytecode sequence like
2264 : *
2265 : * ENTERBLOCK 2
2266 : * GETLOCAL 1
2267 : * SETLOCAL 0
2268 : *
2269 : * The problem is slot N can't be backed by M if M could be popped
2270 : * before N. We want a guarantee that when we pop M, even if it was
2271 : * copied, it has no outstanding copies.
2272 : *
2273 : * Because of |let| expressions, it's kind of hard to really know
2274 : * whether a region on the stack will be popped all at once. Bleh!
2275 : *
2276 : * This should be rare except in browser code (and maybe even then),
2277 : * but even so there's a quick workaround. We take all copies of the
2278 : * backing fe, and redirect them to be copies of the destination.
2279 : */
2280 218256 : for (uint32_t i = backing->trackerIndex() + 1; i < tracker.nentries; i++) {
2281 155711 : FrameEntry *fe = tracker[i];
2282 155711 : if (deadEntry(fe))
2283 11871 : continue;
2284 143840 : if (fe->isCopy() && fe->copyOf() == backing)
2285 62564 : fe->setCopyOf(target);
2286 : }
2287 : }
2288 :
2289 : /*
2290 : * This is valid from the top->isCopy() path because we're guaranteed a
2291 : * consistent ordering - all copies of |backing| are tracked after
2292 : * |backing|. Transitively, only one swap is needed.
2293 : */
2294 252300 : if (backing->trackerIndex() < target->trackerIndex())
2295 18770 : swapInTracker(backing, target);
2296 :
2297 252300 : if (backing->isType(JSVAL_TYPE_DOUBLE)) {
2298 2916 : FPRegisterID fpreg = tempFPRegForData(backing);
2299 2916 : target->setType(JSVAL_TYPE_DOUBLE);
2300 2916 : target->data.setFPRegister(fpreg);
2301 2916 : regstate(fpreg).reassociate(target);
2302 : } else {
2303 : /*
2304 : * Move the backing store down - we spill registers here, but we could be
2305 : * smarter and re-use the type reg. If we need registers for both the type
2306 : * and data in the backing, make sure we keep the other components pinned.
2307 : * There is nothing else to keep us from evicting the backing's registers.
2308 : */
2309 249384 : if (backing->type.inRegister())
2310 94889 : pinReg(backing->type.reg());
2311 249384 : RegisterID reg = tempRegForData(backing);
2312 249384 : if (backing->type.inRegister())
2313 94889 : unpinReg(backing->type.reg());
2314 249384 : target->data.setRegister(reg);
2315 249384 : regstate(reg).reassociate(target);
2316 :
2317 249384 : if (backing->isTypeKnown()) {
2318 135129 : target->setType(backing->getKnownType());
2319 : } else {
2320 114255 : pinReg(reg);
2321 114255 : RegisterID typeReg = tempRegForType(backing);
2322 114255 : unpinReg(reg);
2323 114255 : target->type.setRegister(typeReg);
2324 114255 : regstate(typeReg).reassociate(target);
2325 : }
2326 : }
2327 :
2328 252300 : backing->setCopyOf(target);
2329 252300 : JS_ASSERT(top->copyOf() == target);
2330 :
2331 252300 : if (trySyncType && target->isType(oldType))
2332 61032 : target->type.sync();
2333 : }
2334 :
2335 : void
2336 132213 : FrameState::shimmy(uint32_t n)
2337 : {
2338 132213 : JS_ASSERT(a->sp - n >= a->spBase);
2339 132213 : int32_t depth = 0 - int32_t(n);
2340 132213 : storeTop(peek(depth - 1));
2341 132213 : popn(n);
2342 132213 : }
2343 :
2344 : void
2345 184544 : FrameState::shift(int32_t n)
2346 : {
2347 184544 : JS_ASSERT(n < 0);
2348 184544 : JS_ASSERT(a->sp + n - 1 >= a->spBase);
2349 184544 : storeTop(peek(n - 1));
2350 184544 : pop();
2351 184544 : }
2352 :
2353 : void
2354 0 : FrameState::swap()
2355 : {
2356 : // A B
2357 :
2358 0 : dupAt(-2);
2359 : // A B A
2360 :
2361 0 : dupAt(-2);
2362 : // A B A B
2363 :
2364 0 : shift(-3);
2365 : // B B A
2366 :
2367 0 : shimmy(1);
2368 : // B A
2369 0 : }
2370 :
2371 : void
2372 395 : FrameState::forgetKnownDouble(FrameEntry *fe)
2373 : {
2374 : /*
2375 : * Forget all information indicating fe is a double, so we can use GPRs for its
2376 : * contents. We currently need to do this in order to use the entry in MICs/PICs
2377 : * or to construct its ValueRemat. :FIXME: this needs to get fixed.
2378 : */
2379 395 : JS_ASSERT(!fe->isConstant() && fe->isType(JSVAL_TYPE_DOUBLE));
2380 :
2381 395 : RegisterID typeReg = allocReg();
2382 395 : RegisterID dataReg = allocReg();
2383 :
2384 : /* Copy into a different FP register, as breakDouble can modify fpreg. */
2385 395 : FPRegisterID fpreg = allocFPReg();
2386 395 : masm.moveDouble(tempFPRegForData(fe), fpreg);
2387 395 : masm.breakDouble(fpreg, typeReg, dataReg);
2388 :
2389 395 : forgetAllRegs(fe);
2390 395 : fe->resetUnsynced();
2391 395 : fe->clear();
2392 :
2393 395 : regstate(typeReg).associate(fe, RematInfo::TYPE);
2394 395 : regstate(dataReg).associate(fe, RematInfo::DATA);
2395 395 : fe->type.setRegister(typeReg);
2396 395 : fe->data.setRegister(dataReg);
2397 395 : freeReg(fpreg);
2398 395 : }
2399 :
2400 : void
2401 81564 : FrameState::pinEntry(FrameEntry *fe, ValueRemat &vr, bool breakDouble)
2402 : {
2403 81564 : if (breakDouble && !fe->isConstant() && fe->isType(JSVAL_TYPE_DOUBLE))
2404 386 : forgetKnownDouble(fe);
2405 :
2406 81564 : if (fe->isConstant()) {
2407 16286 : vr = ValueRemat::FromConstant(fe->getValue());
2408 65278 : } else if (fe->isType(JSVAL_TYPE_DOUBLE)) {
2409 1385 : FPRegisterID fpreg = tempFPRegForData(fe);
2410 1385 : pinReg(fpreg);
2411 1385 : vr = ValueRemat::FromFPRegister(fpreg);
2412 : } else {
2413 : // Pin the type register so it can't spill.
2414 63893 : MaybeRegisterID maybePinnedType = maybePinType(fe);
2415 :
2416 : // Get and pin the data register.
2417 63893 : RegisterID dataReg = tempRegForData(fe);
2418 63893 : pinReg(dataReg);
2419 :
2420 63893 : if (fe->isTypeKnown()) {
2421 39260 : vr = ValueRemat::FromKnownType(fe->getKnownType(), dataReg);
2422 : } else {
2423 : // The type might not be loaded yet, so unpin for simplicity.
2424 24633 : maybeUnpinReg(maybePinnedType);
2425 :
2426 24633 : vr = ValueRemat::FromRegisters(tempRegForType(fe), dataReg);
2427 24633 : pinReg(vr.typeReg());
2428 : }
2429 : }
2430 :
2431 : // Set these bits last, since allocation could have caused a sync.
2432 81564 : vr.isDataSynced = fe->data.synced();
2433 81564 : vr.isTypeSynced = fe->type.synced();
2434 81564 : }
2435 :
2436 : void
2437 70852 : FrameState::unpinEntry(const ValueRemat &vr)
2438 : {
2439 70852 : if (vr.isFPRegister()) {
2440 1385 : unpinReg(vr.fpReg());
2441 69467 : } else if (!vr.isConstant()) {
2442 56061 : if (!vr.isTypeKnown())
2443 20011 : unpinReg(vr.typeReg());
2444 56061 : unpinReg(vr.dataReg());
2445 : }
2446 70852 : }
2447 :
2448 : void
2449 10712 : FrameState::ensureValueSynced(Assembler &masm, FrameEntry *fe, const ValueRemat &vr)
2450 : {
2451 : #if defined JS_PUNBOX64
2452 : if (!vr.isDataSynced || !vr.isTypeSynced)
2453 : masm.storeValue(vr, addressOf(fe));
2454 : #elif defined JS_NUNBOX32
2455 10712 : if (vr.isConstant() || vr.isFPRegister()) {
2456 2880 : if (!vr.isDataSynced || !vr.isTypeSynced)
2457 2869 : masm.storeValue(vr.value(), addressOf(fe));
2458 : } else {
2459 7832 : if (!vr.isDataSynced)
2460 7603 : masm.storePayload(vr.dataReg(), addressOf(fe));
2461 7832 : if (!vr.isTypeSynced) {
2462 7554 : if (vr.isTypeKnown())
2463 3194 : masm.storeTypeTag(ImmType(vr.knownType()), addressOf(fe));
2464 : else
2465 4360 : masm.storeTypeTag(vr.typeReg(), addressOf(fe));
2466 : }
2467 : }
2468 : #endif
2469 10712 : }
2470 :
2471 : static inline bool
2472 1558320 : AllocHelper(RematInfo &info, MaybeRegisterID &maybe)
2473 : {
2474 1558320 : if (info.inRegister()) {
2475 1057833 : maybe = info.reg();
2476 1057833 : return true;
2477 : }
2478 500487 : return false;
2479 : }
2480 :
2481 : void
2482 82 : FrameState::allocForSameBinary(FrameEntry *fe, JSOp op, BinaryAlloc &alloc)
2483 : {
2484 82 : alloc.rhsNeedsRemat = false;
2485 :
2486 82 : if (!fe->isTypeKnown()) {
2487 54 : alloc.lhsType = tempRegForType(fe);
2488 54 : pinReg(alloc.lhsType.reg());
2489 : }
2490 :
2491 82 : alloc.lhsData = tempRegForData(fe);
2492 :
2493 82 : if (!freeRegs.empty(Registers::AvailRegs)) {
2494 62 : alloc.result = allocReg();
2495 62 : masm.move(alloc.lhsData.reg(), alloc.result);
2496 62 : alloc.lhsNeedsRemat = false;
2497 : } else {
2498 20 : alloc.result = alloc.lhsData.reg();
2499 20 : takeReg(alloc.result);
2500 20 : alloc.lhsNeedsRemat = true;
2501 : }
2502 :
2503 82 : if (alloc.lhsType.isSet())
2504 54 : unpinReg(alloc.lhsType.reg());
2505 :
2506 82 : alloc.lhsFP = alloc.rhsFP = allocFPReg();
2507 82 : }
2508 :
2509 : void
2510 93030 : FrameState::ensureFullRegs(FrameEntry *fe, MaybeRegisterID *type, MaybeRegisterID *data)
2511 : {
2512 93030 : fe = fe->isCopy() ? fe->copyOf() : fe;
2513 :
2514 93030 : JS_ASSERT(!data->isSet() && !type->isSet());
2515 93030 : if (!fe->type.inMemory()) {
2516 76013 : if (fe->type.inRegister())
2517 38058 : *type = fe->type.reg();
2518 76013 : if (fe->data.isConstant())
2519 0 : return;
2520 76013 : if (fe->data.inRegister()) {
2521 72870 : *data = fe->data.reg();
2522 72870 : return;
2523 : }
2524 3143 : if (fe->type.inRegister())
2525 296 : pinReg(fe->type.reg());
2526 3143 : *data = tempRegForData(fe);
2527 3143 : if (fe->type.inRegister())
2528 296 : unpinReg(fe->type.reg());
2529 17017 : } else if (!fe->data.inMemory()) {
2530 482 : if (fe->data.inRegister())
2531 482 : *data = fe->data.reg();
2532 482 : if (fe->type.isConstant())
2533 0 : return;
2534 482 : if (fe->type.inRegister()) {
2535 0 : *type = fe->type.reg();
2536 0 : return;
2537 : }
2538 482 : if (fe->data.inRegister())
2539 482 : pinReg(fe->data.reg());
2540 482 : *type = tempRegForType(fe);
2541 482 : if (fe->data.inRegister())
2542 482 : unpinReg(fe->data.reg());
2543 : } else {
2544 16535 : *data = tempRegForData(fe);
2545 16535 : pinReg(data->reg());
2546 16535 : *type = tempRegForType(fe);
2547 16535 : unpinReg(data->reg());
2548 : }
2549 : }
2550 :
2551 : inline bool
2552 53077 : FrameState::binaryEntryLive(FrameEntry *fe) const
2553 : {
2554 : /*
2555 : * Compute whether fe is live after the binary operation performed at the current
2556 : * bytecode. This is similar to variableLive except that it returns false for the
2557 : * top two stack entries and special cases LOCALINC/ARGINC and friends, which fuse
2558 : * a binary operation before writing over the local/arg.
2559 : */
2560 53077 : JS_ASSERT(cx->typeInferenceEnabled());
2561 :
2562 53077 : if (deadEntry(fe, 2))
2563 32403 : return false;
2564 :
2565 20674 : switch (JSOp(*a->PC)) {
2566 : case JSOP_INCLOCAL:
2567 : case JSOP_DECLOCAL:
2568 : case JSOP_LOCALINC:
2569 : case JSOP_LOCALDEC:
2570 11270 : if (fe - a->locals == (int) GET_SLOTNO(a->PC))
2571 10886 : return false;
2572 : case JSOP_INCARG:
2573 : case JSOP_DECARG:
2574 : case JSOP_ARGINC:
2575 : case JSOP_ARGDEC:
2576 581 : if (fe - a->args == (int) GET_SLOTNO(a->PC))
2577 129 : return false;
2578 : default:;
2579 : }
2580 :
2581 9659 : JS_ASSERT(fe != a->callee_);
2582 :
2583 : /* Arguments are always treated as live within inline frames, see bestEvictReg. */
2584 9659 : if (a->parent && fe < a->locals)
2585 89 : return true;
2586 :
2587 : /* Caller must check that no copies are invalidated by rewriting the entry. */
2588 9570 : return fe >= a->spBase || variableLive(fe, a->PC);
2589 : }
2590 :
2591 : void
2592 389580 : FrameState::allocForBinary(FrameEntry *lhs, FrameEntry *rhs, JSOp op, BinaryAlloc &alloc,
2593 : bool needsResult)
2594 : {
2595 389580 : FrameEntry *backingLeft = lhs;
2596 389580 : FrameEntry *backingRight = rhs;
2597 :
2598 389580 : if (backingLeft->isCopy())
2599 30597 : backingLeft = backingLeft->copyOf();
2600 389580 : if (backingRight->isCopy())
2601 2229 : backingRight = backingRight->copyOf();
2602 :
2603 : /*
2604 : * For each remat piece of both FEs, if a register is assigned, get it now
2605 : * and pin it. This is safe - constants and known types will be avoided.
2606 : */
2607 389580 : if (AllocHelper(backingLeft->type, alloc.lhsType))
2608 53955 : pinReg(alloc.lhsType.reg());
2609 389580 : if (AllocHelper(backingLeft->data, alloc.lhsData))
2610 370255 : pinReg(alloc.lhsData.reg());
2611 389580 : if (AllocHelper(backingRight->type, alloc.rhsType))
2612 312105 : pinReg(alloc.rhsType.reg());
2613 389580 : if (AllocHelper(backingRight->data, alloc.rhsData))
2614 321518 : pinReg(alloc.rhsData.reg());
2615 :
2616 : /* For each type without a register, give it a register if needed. */
2617 389580 : if (!alloc.lhsType.isSet() && backingLeft->type.inMemory()) {
2618 307573 : alloc.lhsType = tempRegForType(lhs);
2619 307573 : pinReg(alloc.lhsType.reg());
2620 : }
2621 389580 : if (!alloc.rhsType.isSet() && backingRight->type.inMemory()) {
2622 2542 : alloc.rhsType = tempRegForType(rhs);
2623 2542 : pinReg(alloc.rhsType.reg());
2624 : }
2625 :
2626 : /*
2627 : * Allocate floating point registers. These are temporaries with no pre-existing data;
2628 : * floating point registers are only allocated for known doubles, and BinaryAlloc is not
2629 : * used for such operations.
2630 : */
2631 389580 : JS_ASSERT(!backingLeft->isType(JSVAL_TYPE_DOUBLE));
2632 389580 : JS_ASSERT(!backingRight->isType(JSVAL_TYPE_DOUBLE));
2633 389580 : alloc.lhsFP = allocFPReg();
2634 389580 : alloc.rhsFP = allocFPReg();
2635 :
2636 : bool commu;
2637 389580 : switch (op) {
2638 : case JSOP_EQ:
2639 : case JSOP_GT:
2640 : case JSOP_GE:
2641 : case JSOP_LT:
2642 : case JSOP_LE:
2643 : /* fall through */
2644 : case JSOP_ADD:
2645 : case JSOP_MUL:
2646 : case JSOP_SUB:
2647 389580 : commu = true;
2648 389580 : break;
2649 :
2650 : case JSOP_DIV:
2651 0 : commu = false;
2652 0 : break;
2653 :
2654 : default:
2655 0 : JS_NOT_REACHED("unknown op");
2656 : return;
2657 : }
2658 :
2659 : /*
2660 : * Allocate data registers. If the op is not commutative, the LHS
2661 : * _must_ be in a register.
2662 : */
2663 389580 : JS_ASSERT_IF(lhs->isConstant(), !rhs->isConstant());
2664 389580 : JS_ASSERT_IF(rhs->isConstant(), !lhs->isConstant());
2665 :
2666 389580 : if (!alloc.lhsData.isSet()) {
2667 19325 : if (backingLeft->data.inMemory()) {
2668 17776 : alloc.lhsData = tempRegForData(lhs);
2669 17776 : pinReg(alloc.lhsData.reg());
2670 1549 : } else if (!commu) {
2671 0 : JS_ASSERT(lhs->isConstant());
2672 0 : alloc.lhsData = allocReg();
2673 0 : alloc.extraFree = alloc.lhsData;
2674 0 : masm.move(Imm32(lhs->getValue().toInt32()), alloc.lhsData.reg());
2675 : }
2676 : }
2677 389580 : if (!alloc.rhsData.isSet() && backingRight->data.inMemory()) {
2678 2748 : alloc.rhsData = tempRegForData(rhs);
2679 2748 : pinReg(alloc.rhsData.reg());
2680 : }
2681 :
2682 389580 : alloc.lhsNeedsRemat = false;
2683 389580 : alloc.rhsNeedsRemat = false;
2684 389580 : alloc.resultHasRhs = false;
2685 389580 : alloc.undoResult = false;
2686 :
2687 389580 : if (!needsResult)
2688 24811 : goto skip;
2689 :
2690 : /*
2691 : * Now a result register is needed. It must contain a mutable copy of the
2692 : * LHS. For commutative operations, we can opt to use the RHS instead. At
2693 : * this point, if for some reason either must be in a register, that has
2694 : * already been guaranteed at this point.
2695 : */
2696 :
2697 : /*
2698 : * Try to reuse operand registers without syncing for ADD and constant SUB,
2699 : * so long as the backing for the operand is dead.
2700 : */
2701 511517 : if (cx->typeInferenceEnabled() &&
2702 107327 : backingLeft->data.inRegister() && !binaryEntryLive(backingLeft) &&
2703 27460 : (op == JSOP_ADD || (op == JSOP_SUB && backingRight->isConstant())) &&
2704 11961 : (lhs == backingLeft || hasOnlyCopy(backingLeft, lhs))) {
2705 41001 : alloc.result = backingLeft->data.reg();
2706 41001 : alloc.undoResult = true;
2707 41001 : alloc.resultHasRhs = false;
2708 41001 : goto skip;
2709 : }
2710 :
2711 323768 : if (cx->typeInferenceEnabled())
2712 13249 : evictDeadEntries(true);
2713 :
2714 323768 : if (!freeRegs.empty(Registers::AvailRegs)) {
2715 : /* Free reg - just grab it. */
2716 320423 : alloc.result = allocReg();
2717 320423 : if (!alloc.lhsData.isSet()) {
2718 1344 : JS_ASSERT(alloc.rhsData.isSet());
2719 1344 : JS_ASSERT(commu);
2720 1344 : masm.move(alloc.rhsData.reg(), alloc.result);
2721 1344 : alloc.resultHasRhs = true;
2722 : } else {
2723 319079 : masm.move(alloc.lhsData.reg(), alloc.result);
2724 319079 : alloc.resultHasRhs = false;
2725 : }
2726 3345 : } else if (cx->typeInferenceEnabled()) {
2727 : /* No free regs. Evict a register or reuse one of the operands. */
2728 1150 : bool leftInReg = backingLeft->data.inRegister();
2729 1150 : bool rightInReg = backingRight->data.inRegister();
2730 :
2731 : /* If the LHS/RHS types are in registers, don't use them for the result. */
2732 1150 : uint32_t mask = Registers::AvailRegs;
2733 1150 : if (backingLeft->type.inRegister())
2734 781 : mask &= ~Registers::maskReg(backingLeft->type.reg());
2735 1150 : if (backingRight->type.inRegister())
2736 679 : mask &= ~Registers::maskReg(backingRight->type.reg());
2737 :
2738 1150 : RegisterID result = bestEvictReg(mask, true).reg();
2739 1150 : if (!commu && rightInReg && backingRight->data.reg() == result) {
2740 : /* Can't put the result in the RHS for non-commutative operations. */
2741 0 : alloc.result = allocReg();
2742 0 : masm.move(alloc.lhsData.reg(), alloc.result);
2743 : } else {
2744 1150 : alloc.result = result;
2745 1150 : if (leftInReg && result == backingLeft->data.reg()) {
2746 241 : alloc.lhsNeedsRemat = true;
2747 241 : unpinReg(result);
2748 241 : takeReg(result);
2749 909 : } else if (rightInReg && result == backingRight->data.reg()) {
2750 456 : alloc.rhsNeedsRemat = true;
2751 456 : alloc.resultHasRhs = true;
2752 456 : unpinReg(result);
2753 456 : takeReg(result);
2754 : } else {
2755 453 : JS_ASSERT(!regstate(result).isPinned());
2756 453 : takeReg(result);
2757 453 : if (leftInReg) {
2758 393 : masm.move(alloc.lhsData.reg(), result);
2759 : } else {
2760 60 : masm.move(alloc.rhsData.reg(), result);
2761 60 : alloc.resultHasRhs = true;
2762 : }
2763 : }
2764 : }
2765 : } else {
2766 : /*
2767 : * No free regs. Find a good candidate to re-use. Best candidates don't
2768 : * require syncs on the inline path.
2769 : */
2770 2195 : bool leftInReg = backingLeft->data.inRegister();
2771 2195 : bool rightInReg = backingRight->data.inRegister();
2772 2195 : bool leftSynced = backingLeft->data.synced();
2773 2195 : bool rightSynced = backingRight->data.synced();
2774 2195 : if (!commu || (leftInReg && (leftSynced || (!rightInReg || !rightSynced)))) {
2775 1965 : JS_ASSERT(backingLeft->data.inRegister() || !commu);
2776 5895 : JS_ASSERT_IF(backingLeft->data.inRegister(),
2777 5895 : backingLeft->data.reg() == alloc.lhsData.reg());
2778 1965 : if (backingLeft->data.inRegister()) {
2779 1965 : alloc.result = backingLeft->data.reg();
2780 1965 : unpinReg(alloc.result);
2781 1965 : takeReg(alloc.result);
2782 1965 : alloc.lhsNeedsRemat = true;
2783 : } else {
2784 : /* For now, just spill... */
2785 0 : alloc.result = allocReg();
2786 0 : masm.move(alloc.lhsData.reg(), alloc.result);
2787 : }
2788 1965 : alloc.resultHasRhs = false;
2789 : } else {
2790 230 : JS_ASSERT(commu);
2791 230 : JS_ASSERT(!leftInReg || (rightInReg && rightSynced));
2792 230 : alloc.result = backingRight->data.reg();
2793 230 : unpinReg(alloc.result);
2794 230 : takeReg(alloc.result);
2795 230 : alloc.resultHasRhs = true;
2796 230 : alloc.rhsNeedsRemat = true;
2797 : }
2798 : }
2799 :
2800 : skip:
2801 : /* Unpin everything that was pinned. */
2802 389580 : if (backingLeft->type.inRegister())
2803 361528 : unpinReg(backingLeft->type.reg());
2804 389580 : if (backingRight->type.inRegister())
2805 314647 : unpinReg(backingRight->type.reg());
2806 389580 : if (backingLeft->data.inRegister())
2807 385825 : unpinReg(backingLeft->data.reg());
2808 389580 : if (backingRight->data.inRegister())
2809 323580 : unpinReg(backingRight->data.reg());
2810 389580 : }
2811 :
2812 : void
2813 365240 : FrameState::rematBinary(FrameEntry *lhs, FrameEntry *rhs, const BinaryAlloc &alloc, Assembler &masm)
2814 : {
2815 365240 : if (alloc.rhsNeedsRemat)
2816 907 : masm.loadPayload(addressForDataRemat(rhs), alloc.rhsData.reg());
2817 365240 : if (alloc.lhsNeedsRemat)
2818 2603 : masm.loadPayload(addressForDataRemat(lhs), alloc.lhsData.reg());
2819 365240 : }
2820 :
2821 : MaybeRegisterID
2822 64243 : FrameState::maybePinData(FrameEntry *fe)
2823 : {
2824 64243 : fe = fe->isCopy() ? fe->copyOf() : fe;
2825 64243 : if (fe->data.inRegister()) {
2826 33475 : pinReg(fe->data.reg());
2827 33475 : return fe->data.reg();
2828 : }
2829 30768 : return MaybeRegisterID();
2830 : }
2831 :
2832 : MaybeRegisterID
2833 99209 : FrameState::maybePinType(FrameEntry *fe)
2834 : {
2835 99209 : fe = fe->isCopy() ? fe->copyOf() : fe;
2836 99209 : if (fe->type.inRegister()) {
2837 29936 : pinReg(fe->type.reg());
2838 29936 : return fe->type.reg();
2839 : }
2840 69273 : return MaybeRegisterID();
2841 : }
2842 :
2843 : void
2844 108391 : FrameState::maybeUnpinReg(MaybeRegisterID reg)
2845 : {
2846 108391 : if (reg.isSet())
2847 55326 : unpinReg(reg.reg());
2848 108391 : }
2849 :
2850 : uint32_t
2851 2443 : FrameState::allocTemporary()
2852 : {
2853 2443 : if (temporariesTop == temporaries + TEMPORARY_LIMIT)
2854 0 : return UINT32_MAX;
2855 2443 : FrameEntry *fe = temporariesTop++;
2856 2443 : fe->lastLoop = 0;
2857 2443 : fe->temporary = true;
2858 2443 : return fe - temporaries;
2859 : }
2860 :
2861 : void
2862 33560 : FrameState::clearTemporaries()
2863 : {
2864 33560 : JS_ASSERT(!a->parent);
2865 :
2866 36003 : for (FrameEntry *fe = temporaries; fe < temporariesTop; fe++) {
2867 2443 : if (!fe->isTracked())
2868 1557 : continue;
2869 886 : if (fe->isCopied())
2870 1 : uncopy(fe);
2871 886 : forgetAllRegs(fe);
2872 886 : fe->resetSynced();
2873 : }
2874 :
2875 33560 : temporariesTop = temporaries;
2876 33560 : }
2877 :
2878 : Vector<TemporaryCopy> *
2879 31932 : FrameState::getTemporaryCopies(Uses uses)
2880 : {
2881 : /* :XXX: handle OOM */
2882 31932 : Vector<TemporaryCopy> *res = NULL;
2883 :
2884 46248 : for (FrameEntry *fe = temporaries; fe < temporariesTop; fe++) {
2885 14316 : if (!fe->isTracked())
2886 3691 : continue;
2887 10625 : if (fe->isCopied()) {
2888 555 : for (uint32_t i = fe->trackerIndex() + 1; i < tracker.nentries; i++) {
2889 446 : FrameEntry *nfe = tracker[i];
2890 446 : if (!deadEntry(nfe, uses.nuses) && nfe->isCopy() && nfe->copyOf() == fe) {
2891 110 : if (!res)
2892 90 : res = OffTheBooks::new_< Vector<TemporaryCopy> >(cx);
2893 110 : res->append(TemporaryCopy(addressOf(nfe), addressOf(fe)));
2894 : }
2895 : }
2896 : }
2897 : }
2898 :
2899 31932 : return res;
2900 : }
|