/[jscoverage]/trunk/js/jsemit.cpp
ViewVC logotype

Annotation of /trunk/js/jsemit.cpp

Parent Directory Parent Directory | Revision Log Revision Log


Revision 585 - (hide annotations)
Sun Sep 12 15:13:23 2010 UTC (9 years, 1 month ago) by siliconforks
File size: 256335 byte(s)
Update to SpiderMonkey from Firefox 3.6.9.

1 siliconforks 507 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 siliconforks 332 * vim: set ts=8 sw=4 et tw=99:
3     *
4     * ***** BEGIN LICENSE BLOCK *****
5     * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6     *
7     * The contents of this file are subject to the Mozilla Public License Version
8     * 1.1 (the "License"); you may not use this file except in compliance with
9     * the License. You may obtain a copy of the License at
10     * http://www.mozilla.org/MPL/
11     *
12     * Software distributed under the License is distributed on an "AS IS" basis,
13     * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14     * for the specific language governing rights and limitations under the
15     * License.
16     *
17     * The Original Code is Mozilla Communicator client code, released
18     * March 31, 1998.
19     *
20     * The Initial Developer of the Original Code is
21     * Netscape Communications Corporation.
22     * Portions created by the Initial Developer are Copyright (C) 1998
23     * the Initial Developer. All Rights Reserved.
24     *
25     * Contributor(s):
26     *
27     * Alternatively, the contents of this file may be used under the terms of
28     * either of the GNU General Public License Version 2 or later (the "GPL"),
29     * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30     * in which case the provisions of the GPL or the LGPL are applicable instead
31     * of those above. If you wish to allow use of your version of this file only
32     * under the terms of either the GPL or the LGPL, and not to allow others to
33     * use your version of this file under the terms of the MPL, indicate your
34     * decision by deleting the provisions above and replace them with the notice
35     * and other provisions required by the GPL or the LGPL. If you do not delete
36     * the provisions above, a recipient may use your version of this file under
37     * the terms of any one of the MPL, the GPL or the LGPL.
38     *
39     * ***** END LICENSE BLOCK ***** */
40    
41     /*
42     * JS bytecode generation.
43     */
44     #ifdef HAVE_MEMORY_H
45     #include <memory.h>
46     #endif
47 siliconforks 460 #include <new>
48 siliconforks 332 #include <string.h>
49     #include "jstypes.h"
50 siliconforks 507 #include "jsstdint.h"
51 siliconforks 332 #include "jsarena.h" /* Added by JSIFY */
52     #include "jsutil.h" /* Added by JSIFY */
53     #include "jsbit.h"
54     #include "jsprf.h"
55     #include "jsapi.h"
56     #include "jsatom.h"
57     #include "jsbool.h"
58     #include "jscntxt.h"
59     #include "jsversion.h"
60     #include "jsemit.h"
61     #include "jsfun.h"
62     #include "jsnum.h"
63     #include "jsopcode.h"
64     #include "jsparse.h"
65     #include "jsregexp.h"
66     #include "jsscan.h"
67     #include "jsscope.h"
68     #include "jsscript.h"
69     #include "jsautooplen.h"
70     #include "jsstaticcheck.h"
71    
72     /* Allocation chunk counts, must be powers of two in general. */
73     #define BYTECODE_CHUNK 256 /* code allocation increment */
74     #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
75     #define TRYNOTE_CHUNK 64 /* trynote allocation increment */
76    
77     /* Macros to compute byte sizes from typed element counts. */
78     #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
79     #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
80     #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
81    
82     static JSBool
83     NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
84     uintN stackDepth, size_t start, size_t end);
85    
86 siliconforks 460 JSCodeGenerator::JSCodeGenerator(JSCompiler *jsc,
87     JSArenaPool *cpool, JSArenaPool *npool,
88     uintN lineno)
89     : JSTreeContext(jsc),
90     codePool(cpool), notePool(npool),
91     codeMark(JS_ARENA_MARK(cpool)), noteMark(JS_ARENA_MARK(npool)),
92     stackDepth(0), maxStackDepth(0),
93     ntrynotes(0), lastTryNode(NULL),
94     spanDeps(NULL), jumpTargets(NULL), jtFreeList(NULL),
95     numSpanDeps(0), numJumpTargets(0), spanDepTodo(0),
96     arrayCompDepth(0),
97     emitLevel(0)
98 siliconforks 332 {
99 siliconforks 460 flags = TCF_COMPILING;
100     memset(&prolog, 0, sizeof prolog);
101     memset(&main, 0, sizeof main);
102     current = &main;
103     firstLine = prolog.currentLine = main.currentLine = lineno;
104     prolog.noteMask = main.noteMask = SRCNOTE_CHUNK - 1;
105     memset(&upvarMap, 0, sizeof upvarMap);
106 siliconforks 332 }
107    
108 siliconforks 460 JSCodeGenerator::~JSCodeGenerator()
109 siliconforks 332 {
110 siliconforks 460 JS_ARENA_RELEASE(codePool, codeMark);
111     JS_ARENA_RELEASE(notePool, noteMark);
112 siliconforks 332
113     /* NB: non-null only after OOM. */
114 siliconforks 460 if (spanDeps)
115 siliconforks 507 compiler->context->free(spanDeps);
116 siliconforks 332
117 siliconforks 460 if (upvarMap.vector)
118 siliconforks 507 compiler->context->free(upvarMap.vector);
119 siliconforks 332 }
120    
121     static ptrdiff_t
122     EmitCheck(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t delta)
123     {
124     jsbytecode *base, *limit, *next;
125     ptrdiff_t offset, length;
126     size_t incr, size;
127    
128     base = CG_BASE(cg);
129     next = CG_NEXT(cg);
130     limit = CG_LIMIT(cg);
131 siliconforks 507 offset = next - base;
132 siliconforks 332 if (next + delta > limit) {
133     length = offset + delta;
134     length = (length <= BYTECODE_CHUNK)
135     ? BYTECODE_CHUNK
136     : JS_BIT(JS_CeilingLog2(length));
137     incr = BYTECODE_SIZE(length);
138     if (!base) {
139     JS_ARENA_ALLOCATE_CAST(base, jsbytecode *, cg->codePool, incr);
140     } else {
141 siliconforks 507 size = BYTECODE_SIZE(limit - base);
142 siliconforks 332 incr -= size;
143     JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
144     }
145     if (!base) {
146     js_ReportOutOfScriptQuota(cx);
147     return -1;
148     }
149     CG_BASE(cg) = base;
150     CG_LIMIT(cg) = base + length;
151     CG_NEXT(cg) = base + offset;
152     }
153     return offset;
154     }
155    
156     static void
157     UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target)
158     {
159     jsbytecode *pc;
160     JSOp op;
161     const JSCodeSpec *cs;
162 siliconforks 460 uintN extra, depth, nuses;
163     intN ndefs;
164 siliconforks 332
165     pc = CG_CODE(cg, target);
166     op = (JSOp) *pc;
167     cs = &js_CodeSpec[op];
168 siliconforks 460 #ifdef JS_TRACER
169     extern uint8 js_opcode2extra[];
170     extra = js_opcode2extra[op];
171     #else
172     extra = 0;
173     #endif
174     if ((cs->format & JOF_TMPSLOT_MASK) || extra) {
175 siliconforks 332 depth = (uintN) cg->stackDepth +
176 siliconforks 460 ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT) +
177     extra;
178 siliconforks 332 if (depth > cg->maxStackDepth)
179     cg->maxStackDepth = depth;
180     }
181 siliconforks 460
182     nuses = js_GetStackUses(cs, op, pc);
183 siliconforks 332 cg->stackDepth -= nuses;
184     JS_ASSERT(cg->stackDepth >= 0);
185     if (cg->stackDepth < 0) {
186     char numBuf[12];
187     JSTokenStream *ts;
188    
189     JS_snprintf(numBuf, sizeof numBuf, "%d", target);
190 siliconforks 460 ts = &cg->compiler->tokenStream;
191 siliconforks 332 JS_ReportErrorFlagsAndNumber(cx, JSREPORT_WARNING,
192     js_GetErrorMessage, NULL,
193     JSMSG_STACK_UNDERFLOW,
194     ts->filename ? ts->filename : "stdin",
195     numBuf);
196     }
197     ndefs = cs->ndefs;
198     if (ndefs < 0) {
199     JSObject *blockObj;
200    
201     /* We just executed IndexParsedObject */
202     JS_ASSERT(op == JSOP_ENTERBLOCK);
203     JS_ASSERT(nuses == 0);
204 siliconforks 460 blockObj = cg->objectList.lastbox->object;
205 siliconforks 332 JS_ASSERT(STOBJ_GET_CLASS(blockObj) == &js_BlockClass);
206     JS_ASSERT(JSVAL_IS_VOID(blockObj->fslots[JSSLOT_BLOCK_DEPTH]));
207    
208     OBJ_SET_BLOCK_DEPTH(cx, blockObj, cg->stackDepth);
209     ndefs = OBJ_BLOCK_COUNT(cx, blockObj);
210     }
211     cg->stackDepth += ndefs;
212     if ((uintN)cg->stackDepth > cg->maxStackDepth)
213     cg->maxStackDepth = cg->stackDepth;
214     }
215    
216     ptrdiff_t
217     js_Emit1(JSContext *cx, JSCodeGenerator *cg, JSOp op)
218     {
219     ptrdiff_t offset = EmitCheck(cx, cg, op, 1);
220    
221     if (offset >= 0) {
222     *CG_NEXT(cg)++ = (jsbytecode)op;
223     UpdateDepth(cx, cg, offset);
224     }
225     return offset;
226     }
227    
228     ptrdiff_t
229     js_Emit2(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1)
230     {
231     ptrdiff_t offset = EmitCheck(cx, cg, op, 2);
232    
233     if (offset >= 0) {
234     jsbytecode *next = CG_NEXT(cg);
235     next[0] = (jsbytecode)op;
236     next[1] = op1;
237     CG_NEXT(cg) = next + 2;
238     UpdateDepth(cx, cg, offset);
239     }
240     return offset;
241     }
242    
243     ptrdiff_t
244     js_Emit3(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1,
245     jsbytecode op2)
246     {
247     ptrdiff_t offset = EmitCheck(cx, cg, op, 3);
248    
249     if (offset >= 0) {
250     jsbytecode *next = CG_NEXT(cg);
251     next[0] = (jsbytecode)op;
252     next[1] = op1;
253     next[2] = op2;
254     CG_NEXT(cg) = next + 3;
255     UpdateDepth(cx, cg, offset);
256     }
257     return offset;
258     }
259    
260     ptrdiff_t
261     js_EmitN(JSContext *cx, JSCodeGenerator *cg, JSOp op, size_t extra)
262     {
263     ptrdiff_t length = 1 + (ptrdiff_t)extra;
264     ptrdiff_t offset = EmitCheck(cx, cg, op, length);
265    
266     if (offset >= 0) {
267     jsbytecode *next = CG_NEXT(cg);
268     *next = (jsbytecode)op;
269     memset(next + 1, 0, BYTECODE_SIZE(extra));
270     CG_NEXT(cg) = next + length;
271    
272     /*
273     * Don't UpdateDepth if op's use-count comes from the immediate
274     * operand yet to be stored in the extra bytes after op.
275     */
276     if (js_CodeSpec[op].nuses >= 0)
277     UpdateDepth(cx, cg, offset);
278     }
279     return offset;
280     }
281    
282     /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
283     const char js_with_statement_str[] = "with statement";
284     const char js_finally_block_str[] = "finally block";
285     const char js_script_str[] = "script";
286    
287     static const char *statementName[] = {
288     "label statement", /* LABEL */
289     "if statement", /* IF */
290     "else statement", /* ELSE */
291     "destructuring body", /* BODY */
292     "switch statement", /* SWITCH */
293     "block", /* BLOCK */
294     js_with_statement_str, /* WITH */
295     "catch block", /* CATCH */
296     "try block", /* TRY */
297     js_finally_block_str, /* FINALLY */
298     js_finally_block_str, /* SUBROUTINE */
299     "do loop", /* DO_LOOP */
300     "for loop", /* FOR_LOOP */
301     "for/in loop", /* FOR_IN_LOOP */
302     "while loop", /* WHILE_LOOP */
303     };
304    
305     JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT);
306    
307     static const char *
308     StatementName(JSCodeGenerator *cg)
309     {
310 siliconforks 460 if (!cg->topStmt)
311 siliconforks 332 return js_script_str;
312 siliconforks 460 return statementName[cg->topStmt->type];
313 siliconforks 332 }
314    
315     static void
316     ReportStatementTooLarge(JSContext *cx, JSCodeGenerator *cg)
317     {
318     JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET,
319     StatementName(cg));
320     }
321    
322     /**
323     Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
324     and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
325     into unconditional (gotos and gosubs), and conditional jumps or branches
326     (which pop a value, test it, and jump depending on its value). Most jumps
327     have just one immediate operand, a signed offset from the jump opcode's pc
328     to the target bytecode. The lookup and table switch opcodes may contain
329     many jump offsets.
330    
331     Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
332     fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
333     suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
334     the extended form of the JSOP_OR branch opcode). The unextended or short
335     formats have 16-bit signed immediate offset operands, the extended or long
336     formats have 32-bit signed immediates. The span-dependency problem consists
337     of selecting as few long instructions as possible, or about as few -- since
338     jumps can span other jumps, extending one jump may cause another to need to
339     be extended.
340    
341     Most JS scripts are short, so need no extended jumps. We optimize for this
342     case by generating short jumps until we know a long jump is needed. After
343     that point, we keep generating short jumps, but each jump's 16-bit immediate
344     offset operand is actually an unsigned index into cg->spanDeps, an array of
345     JSSpanDep structs. Each struct tells the top offset in the script of the
346     opcode, the "before" offset of the jump (which will be the same as top for
347     simplex jumps, but which will index further into the bytecode array for a
348     non-initial jump offset in a lookup or table switch), the after "offset"
349     adjusted during span-dependent instruction selection (initially the same
350     value as the "before" offset), and the jump target (more below).
351    
352     Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
353     ensure that all bytecode generated so far can be inspected to discover where
354     the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
355     that we generate span-dependency records sorted by their offsets, so we can
356     binary-search when trying to find a JSSpanDep for a given bytecode offset,
357     or the nearest JSSpanDep at or above a given pc.
358    
359     To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
360     65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
361     tells us that we need to binary-search for the cg->spanDeps entry by the
362     jump opcode's bytecode offset (sd->before).
363    
364     Jump targets need to be maintained in a data structure that lets us look
365     up an already-known target by its address (jumps may have a common target),
366     and that also lets us update the addresses (script-relative, a.k.a. absolute
367     offsets) of targets that come after a jump target (for when a jump below
368     that target needs to be extended). We use an AVL tree, implemented using
369     recursion, but with some tricky optimizations to its height-balancing code
370     (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
371    
372     A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
373     positive sign, even though they link "backward" (i.e., toward lower bytecode
374     address). We don't want to waste space and search time in the AVL tree for
375     such temporary backpatch deltas, so we use a single-bit wildcard scheme to
376     tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
377     in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
378     target, or is still awaiting backpatching.
379    
380     Note that backpatch chains would present a problem for BuildSpanDepTable,
381     which inspects bytecode to build cg->spanDeps on demand, when the first
382     short jump offset overflows. To solve this temporary problem, we emit a
383     proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
384     nuses/ndefs counts help keep the stack balanced, but whose opcode format
385     distinguishes its backpatch delta immediate operand from a normal jump
386     offset.
387     */
388     static int
389     BalanceJumpTargets(JSJumpTarget **jtp)
390     {
391     JSJumpTarget *jt, *jt2, *root;
392     int dir, otherDir, heightChanged;
393     JSBool doubleRotate;
394    
395     jt = *jtp;
396     JS_ASSERT(jt->balance != 0);
397    
398     if (jt->balance < -1) {
399     dir = JT_RIGHT;
400     doubleRotate = (jt->kids[JT_LEFT]->balance > 0);
401     } else if (jt->balance > 1) {
402     dir = JT_LEFT;
403     doubleRotate = (jt->kids[JT_RIGHT]->balance < 0);
404     } else {
405     return 0;
406     }
407    
408     otherDir = JT_OTHER_DIR(dir);
409     if (doubleRotate) {
410     jt2 = jt->kids[otherDir];
411     *jtp = root = jt2->kids[dir];
412    
413     jt->kids[otherDir] = root->kids[dir];
414     root->kids[dir] = jt;
415    
416     jt2->kids[dir] = root->kids[otherDir];
417     root->kids[otherDir] = jt2;
418    
419     heightChanged = 1;
420     root->kids[JT_LEFT]->balance = -JS_MAX(root->balance, 0);
421     root->kids[JT_RIGHT]->balance = -JS_MIN(root->balance, 0);
422     root->balance = 0;
423     } else {
424     *jtp = root = jt->kids[otherDir];
425     jt->kids[otherDir] = root->kids[dir];
426     root->kids[dir] = jt;
427    
428     heightChanged = (root->balance != 0);
429     jt->balance = -((dir == JT_LEFT) ? --root->balance : ++root->balance);
430     }
431    
432     return heightChanged;
433     }
434    
435     typedef struct AddJumpTargetArgs {
436     JSContext *cx;
437     JSCodeGenerator *cg;
438     ptrdiff_t offset;
439     JSJumpTarget *node;
440     } AddJumpTargetArgs;
441    
442     static int
443     AddJumpTarget(AddJumpTargetArgs *args, JSJumpTarget **jtp)
444     {
445     JSJumpTarget *jt;
446     int balanceDelta;
447    
448     jt = *jtp;
449     if (!jt) {
450     JSCodeGenerator *cg = args->cg;
451    
452     jt = cg->jtFreeList;
453     if (jt) {
454     cg->jtFreeList = jt->kids[JT_LEFT];
455     } else {
456     JS_ARENA_ALLOCATE_CAST(jt, JSJumpTarget *, &args->cx->tempPool,
457     sizeof *jt);
458     if (!jt) {
459     js_ReportOutOfScriptQuota(args->cx);
460     return 0;
461     }
462     }
463     jt->offset = args->offset;
464     jt->balance = 0;
465     jt->kids[JT_LEFT] = jt->kids[JT_RIGHT] = NULL;
466     cg->numJumpTargets++;
467     args->node = jt;
468     *jtp = jt;
469     return 1;
470     }
471    
472     if (jt->offset == args->offset) {
473     args->node = jt;
474     return 0;
475     }
476    
477     if (args->offset < jt->offset)
478     balanceDelta = -AddJumpTarget(args, &jt->kids[JT_LEFT]);
479     else
480     balanceDelta = AddJumpTarget(args, &jt->kids[JT_RIGHT]);
481     if (!args->node)
482     return 0;
483    
484     jt->balance += balanceDelta;
485     return (balanceDelta && jt->balance)
486     ? 1 - BalanceJumpTargets(jtp)
487     : 0;
488     }
489    
490     #ifdef DEBUG_brendan
491     static int AVLCheck(JSJumpTarget *jt)
492     {
493     int lh, rh;
494    
495     if (!jt) return 0;
496     JS_ASSERT(-1 <= jt->balance && jt->balance <= 1);
497     lh = AVLCheck(jt->kids[JT_LEFT]);
498     rh = AVLCheck(jt->kids[JT_RIGHT]);
499     JS_ASSERT(jt->balance == rh - lh);
500     return 1 + JS_MAX(lh, rh);
501     }
502     #endif
503    
504     static JSBool
505     SetSpanDepTarget(JSContext *cx, JSCodeGenerator *cg, JSSpanDep *sd,
506     ptrdiff_t off)
507     {
508     AddJumpTargetArgs args;
509    
510     if (off < JUMPX_OFFSET_MIN || JUMPX_OFFSET_MAX < off) {
511     ReportStatementTooLarge(cx, cg);
512     return JS_FALSE;
513     }
514    
515     args.cx = cx;
516     args.cg = cg;
517     args.offset = sd->top + off;
518     args.node = NULL;
519     AddJumpTarget(&args, &cg->jumpTargets);
520     if (!args.node)
521     return JS_FALSE;
522    
523     #ifdef DEBUG_brendan
524     AVLCheck(cg->jumpTargets);
525     #endif
526    
527     SD_SET_TARGET(sd, args.node);
528     return JS_TRUE;
529     }
530    
531     #define SPANDEPS_MIN 256
532     #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
533     #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
534    
535     static JSBool
536     AddSpanDep(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, jsbytecode *pc2,
537     ptrdiff_t off)
538     {
539     uintN index;
540     JSSpanDep *sdbase, *sd;
541     size_t size;
542    
543     index = cg->numSpanDeps;
544     if (index + 1 == 0) {
545     ReportStatementTooLarge(cx, cg);
546     return JS_FALSE;
547     }
548    
549     if ((index & (index - 1)) == 0 &&
550     (!(sdbase = cg->spanDeps) || index >= SPANDEPS_MIN)) {
551     size = sdbase ? SPANDEPS_SIZE(index) : SPANDEPS_SIZE_MIN / 2;
552 siliconforks 507 sdbase = (JSSpanDep *) cx->realloc(sdbase, size + size);
553 siliconforks 332 if (!sdbase)
554     return JS_FALSE;
555     cg->spanDeps = sdbase;
556     }
557    
558     cg->numSpanDeps = index + 1;
559     sd = cg->spanDeps + index;
560 siliconforks 507 sd->top = pc - CG_BASE(cg);
561     sd->offset = sd->before = pc2 - CG_BASE(cg);
562 siliconforks 332
563     if (js_CodeSpec[*pc].format & JOF_BACKPATCH) {
564     /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
565     if (off != 0) {
566     JS_ASSERT(off >= 1 + JUMP_OFFSET_LEN);
567     if (off > BPDELTA_MAX) {
568     ReportStatementTooLarge(cx, cg);
569     return JS_FALSE;
570     }
571     }
572     SD_SET_BPDELTA(sd, off);
573     } else if (off == 0) {
574     /* Jump offset will be patched directly, without backpatch chaining. */
575     SD_SET_TARGET(sd, 0);
576     } else {
577     /* The jump offset in off is non-zero, therefore it's already known. */
578     if (!SetSpanDepTarget(cx, cg, sd, off))
579     return JS_FALSE;
580     }
581    
582     if (index > SPANDEP_INDEX_MAX)
583     index = SPANDEP_INDEX_HUGE;
584     SET_SPANDEP_INDEX(pc2, index);
585     return JS_TRUE;
586     }
587    
588     static jsbytecode *
589     AddSwitchSpanDeps(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc)
590     {
591     JSOp op;
592     jsbytecode *pc2;
593     ptrdiff_t off;
594     jsint low, high;
595     uintN njumps, indexlen;
596    
597     op = (JSOp) *pc;
598     JS_ASSERT(op == JSOP_TABLESWITCH || op == JSOP_LOOKUPSWITCH);
599     pc2 = pc;
600     off = GET_JUMP_OFFSET(pc2);
601     if (!AddSpanDep(cx, cg, pc, pc2, off))
602     return NULL;
603     pc2 += JUMP_OFFSET_LEN;
604     if (op == JSOP_TABLESWITCH) {
605     low = GET_JUMP_OFFSET(pc2);
606     pc2 += JUMP_OFFSET_LEN;
607     high = GET_JUMP_OFFSET(pc2);
608     pc2 += JUMP_OFFSET_LEN;
609     njumps = (uintN) (high - low + 1);
610     indexlen = 0;
611     } else {
612     njumps = GET_UINT16(pc2);
613     pc2 += UINT16_LEN;
614     indexlen = INDEX_LEN;
615     }
616     while (njumps) {
617     --njumps;
618     pc2 += indexlen;
619     off = GET_JUMP_OFFSET(pc2);
620     if (!AddSpanDep(cx, cg, pc, pc2, off))
621     return NULL;
622     pc2 += JUMP_OFFSET_LEN;
623     }
624     return 1 + pc2;
625     }
626    
627     static JSBool
628     BuildSpanDepTable(JSContext *cx, JSCodeGenerator *cg)
629     {
630     jsbytecode *pc, *end;
631     JSOp op;
632     const JSCodeSpec *cs;
633     ptrdiff_t off;
634    
635     pc = CG_BASE(cg) + cg->spanDepTodo;
636     end = CG_NEXT(cg);
637     while (pc != end) {
638     JS_ASSERT(pc < end);
639     op = (JSOp)*pc;
640     cs = &js_CodeSpec[op];
641    
642     switch (JOF_TYPE(cs->format)) {
643     case JOF_TABLESWITCH:
644     case JOF_LOOKUPSWITCH:
645     pc = AddSwitchSpanDeps(cx, cg, pc);
646     if (!pc)
647     return JS_FALSE;
648     break;
649    
650     case JOF_JUMP:
651     off = GET_JUMP_OFFSET(pc);
652     if (!AddSpanDep(cx, cg, pc, pc, off))
653     return JS_FALSE;
654     /* FALL THROUGH */
655     default:
656     pc += cs->length;
657     break;
658     }
659     }
660    
661     return JS_TRUE;
662     }
663    
664     static JSSpanDep *
665     GetSpanDep(JSCodeGenerator *cg, jsbytecode *pc)
666     {
667     uintN index;
668     ptrdiff_t offset;
669     int lo, hi, mid;
670     JSSpanDep *sd;
671    
672     index = GET_SPANDEP_INDEX(pc);
673     if (index != SPANDEP_INDEX_HUGE)
674     return cg->spanDeps + index;
675    
676 siliconforks 507 offset = pc - CG_BASE(cg);
677 siliconforks 332 lo = 0;
678     hi = cg->numSpanDeps - 1;
679     while (lo <= hi) {
680     mid = (lo + hi) / 2;
681     sd = cg->spanDeps + mid;
682     if (sd->before == offset)
683     return sd;
684     if (sd->before < offset)
685     lo = mid + 1;
686     else
687     hi = mid - 1;
688     }
689    
690     JS_ASSERT(0);
691     return NULL;
692     }
693    
694     static JSBool
695     SetBackPatchDelta(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
696     ptrdiff_t delta)
697     {
698     JSSpanDep *sd;
699    
700     JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
701     if (!cg->spanDeps && delta < JUMP_OFFSET_MAX) {
702     SET_JUMP_OFFSET(pc, delta);
703     return JS_TRUE;
704     }
705    
706     if (delta > BPDELTA_MAX) {
707     ReportStatementTooLarge(cx, cg);
708     return JS_FALSE;
709     }
710    
711     if (!cg->spanDeps && !BuildSpanDepTable(cx, cg))
712     return JS_FALSE;
713    
714     sd = GetSpanDep(cg, pc);
715     JS_ASSERT(SD_GET_BPDELTA(sd) == 0);
716     SD_SET_BPDELTA(sd, delta);
717     return JS_TRUE;
718     }
719    
720     static void
721     UpdateJumpTargets(JSJumpTarget *jt, ptrdiff_t pivot, ptrdiff_t delta)
722     {
723     if (jt->offset > pivot) {
724     jt->offset += delta;
725     if (jt->kids[JT_LEFT])
726     UpdateJumpTargets(jt->kids[JT_LEFT], pivot, delta);
727     }
728     if (jt->kids[JT_RIGHT])
729     UpdateJumpTargets(jt->kids[JT_RIGHT], pivot, delta);
730     }
731    
732     static JSSpanDep *
733     FindNearestSpanDep(JSCodeGenerator *cg, ptrdiff_t offset, int lo,
734     JSSpanDep *guard)
735     {
736     int num, hi, mid;
737     JSSpanDep *sdbase, *sd;
738    
739     num = cg->numSpanDeps;
740     JS_ASSERT(num > 0);
741     hi = num - 1;
742     sdbase = cg->spanDeps;
743     while (lo <= hi) {
744     mid = (lo + hi) / 2;
745     sd = sdbase + mid;
746     if (sd->before == offset)
747     return sd;
748     if (sd->before < offset)
749     lo = mid + 1;
750     else
751     hi = mid - 1;
752     }
753     if (lo == num)
754     return guard;
755     sd = sdbase + lo;
756     JS_ASSERT(sd->before >= offset && (lo == 0 || sd[-1].before < offset));
757     return sd;
758     }
759    
760     static void
761     FreeJumpTargets(JSCodeGenerator *cg, JSJumpTarget *jt)
762     {
763     if (jt->kids[JT_LEFT])
764     FreeJumpTargets(cg, jt->kids[JT_LEFT]);
765     if (jt->kids[JT_RIGHT])
766     FreeJumpTargets(cg, jt->kids[JT_RIGHT]);
767     jt->kids[JT_LEFT] = cg->jtFreeList;
768     cg->jtFreeList = jt;
769     }
770    
771     static JSBool
772     OptimizeSpanDeps(JSContext *cx, JSCodeGenerator *cg)
773     {
774     jsbytecode *pc, *oldpc, *base, *limit, *next;
775     JSSpanDep *sd, *sd2, *sdbase, *sdlimit, *sdtop, guard;
776     ptrdiff_t offset, growth, delta, top, pivot, span, length, target;
777     JSBool done;
778     JSOp op;
779     uint32 type;
780     size_t size, incr;
781     jssrcnote *sn, *snlimit;
782     JSSrcNoteSpec *spec;
783     uintN i, n, noteIndex;
784     JSTryNode *tryNode;
785     #ifdef DEBUG_brendan
786     int passes = 0;
787     #endif
788    
789     base = CG_BASE(cg);
790     sdbase = cg->spanDeps;
791     sdlimit = sdbase + cg->numSpanDeps;
792     offset = CG_OFFSET(cg);
793     growth = 0;
794    
795     do {
796     done = JS_TRUE;
797     delta = 0;
798     top = pivot = -1;
799     sdtop = NULL;
800     pc = NULL;
801     op = JSOP_NOP;
802     type = 0;
803     #ifdef DEBUG_brendan
804     passes++;
805     #endif
806    
807     for (sd = sdbase; sd < sdlimit; sd++) {
808     JS_ASSERT(JT_HAS_TAG(sd->target));
809     sd->offset += delta;
810    
811     if (sd->top != top) {
812     sdtop = sd;
813     top = sd->top;
814     JS_ASSERT(top == sd->before);
815     pivot = sd->offset;
816     pc = base + top;
817     op = (JSOp) *pc;
818     type = JOF_OPTYPE(op);
819     if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
820     /*
821     * We already extended all the jump offset operands for
822     * the opcode at sd->top. Jumps and branches have only
823     * one jump offset operand, but switches have many, all
824     * of which are adjacent in cg->spanDeps.
825     */
826     continue;
827     }
828    
829     JS_ASSERT(type == JOF_JUMP ||
830     type == JOF_TABLESWITCH ||
831     type == JOF_LOOKUPSWITCH);
832     }
833    
834     if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
835     span = SD_SPAN(sd, pivot);
836     if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
837     ptrdiff_t deltaFromTop = 0;
838    
839     done = JS_FALSE;
840    
841     switch (op) {
842     case JSOP_GOTO: op = JSOP_GOTOX; break;
843     case JSOP_IFEQ: op = JSOP_IFEQX; break;
844     case JSOP_IFNE: op = JSOP_IFNEX; break;
845     case JSOP_OR: op = JSOP_ORX; break;
846     case JSOP_AND: op = JSOP_ANDX; break;
847     case JSOP_GOSUB: op = JSOP_GOSUBX; break;
848     case JSOP_CASE: op = JSOP_CASEX; break;
849     case JSOP_DEFAULT: op = JSOP_DEFAULTX; break;
850     case JSOP_TABLESWITCH: op = JSOP_TABLESWITCHX; break;
851     case JSOP_LOOKUPSWITCH: op = JSOP_LOOKUPSWITCHX; break;
852     default:
853     ReportStatementTooLarge(cx, cg);
854     return JS_FALSE;
855     }
856     *pc = (jsbytecode) op;
857    
858     for (sd2 = sdtop; sd2 < sdlimit && sd2->top == top; sd2++) {
859     if (sd2 <= sd) {
860     /*
861     * sd2->offset already includes delta as it stood
862     * before we entered this loop, but it must also
863     * include the delta relative to top due to all the
864     * extended jump offset immediates for the opcode
865     * starting at top, which we extend in this loop.
866     *
867     * If there is only one extended jump offset, then
868     * sd2->offset won't change and this for loop will
869     * iterate once only.
870     */
871     sd2->offset += deltaFromTop;
872     deltaFromTop += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
873     } else {
874     /*
875     * sd2 comes after sd, and won't be revisited by
876     * the outer for loop, so we have to increase its
877     * offset by delta, not merely by deltaFromTop.
878     */
879     sd2->offset += delta;
880     }
881    
882     delta += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
883     UpdateJumpTargets(cg->jumpTargets, sd2->offset,
884     JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
885     }
886     sd = sd2 - 1;
887     }
888     }
889     }
890    
891     growth += delta;
892     } while (!done);
893    
894     if (growth) {
895     #ifdef DEBUG_brendan
896 siliconforks 460 JSTokenStream *ts = &cg->compiler->tokenStream;
897 siliconforks 332
898     printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
899     ts->filename ? ts->filename : "stdin", cg->firstLine,
900     growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), cg->numSpanDeps,
901     passes, offset + growth, offset, growth);
902     #endif
903    
904     /*
905     * Ensure that we have room for the extended jumps, but don't round up
906     * to a power of two -- we're done generating code, so we cut to fit.
907     */
908     limit = CG_LIMIT(cg);
909     length = offset + growth;
910     next = base + length;
911     if (next > limit) {
912     JS_ASSERT(length > BYTECODE_CHUNK);
913 siliconforks 507 size = BYTECODE_SIZE(limit - base);
914 siliconforks 332 incr = BYTECODE_SIZE(length) - size;
915     JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
916     if (!base) {
917     js_ReportOutOfScriptQuota(cx);
918     return JS_FALSE;
919     }
920     CG_BASE(cg) = base;
921     CG_LIMIT(cg) = next = base + length;
922     }
923     CG_NEXT(cg) = next;
924    
925     /*
926     * Set up a fake span dependency record to guard the end of the code
927     * being generated. This guard record is returned as a fencepost by
928     * FindNearestSpanDep if there is no real spandep at or above a given
929     * unextended code offset.
930     */
931     guard.top = -1;
932     guard.offset = offset + growth;
933     guard.before = offset;
934     guard.target = NULL;
935     }
936    
937     /*
938     * Now work backwards through the span dependencies, copying chunks of
939     * bytecode between each extended jump toward the end of the grown code
940     * space, and restoring immediate offset operands for all jump bytecodes.
941     * The first chunk of bytecodes, starting at base and ending at the first
942     * extended jump offset (NB: this chunk includes the operation bytecode
943     * just before that immediate jump offset), doesn't need to be copied.
944     */
945     JS_ASSERT(sd == sdlimit);
946     top = -1;
947     while (--sd >= sdbase) {
948     if (sd->top != top) {
949     top = sd->top;
950     op = (JSOp) base[top];
951     type = JOF_OPTYPE(op);
952    
953     for (sd2 = sd - 1; sd2 >= sdbase && sd2->top == top; sd2--)
954     continue;
955     sd2++;
956     pivot = sd2->offset;
957     JS_ASSERT(top == sd2->before);
958     }
959    
960     oldpc = base + sd->before;
961     span = SD_SPAN(sd, pivot);
962    
963     /*
964     * If this jump didn't need to be extended, restore its span immediate
965     * offset operand now, overwriting the index of sd within cg->spanDeps
966     * that was stored temporarily after *pc when BuildSpanDepTable ran.
967     *
968     * Note that span might fit in 16 bits even for an extended jump op,
969     * if the op has multiple span operands, not all of which overflowed
970     * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
971     * range for a short jump, but others are not).
972     */
973     if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
974     JS_ASSERT(JUMP_OFFSET_MIN <= span && span <= JUMP_OFFSET_MAX);
975     SET_JUMP_OFFSET(oldpc, span);
976     continue;
977     }
978    
979     /*
980     * Set up parameters needed to copy the next run of bytecode starting
981     * at offset (which is a cursor into the unextended, original bytecode
982     * vector), down to sd->before (a cursor of the same scale as offset,
983     * it's the index of the original jump pc). Reuse delta to count the
984     * nominal number of bytes to copy.
985     */
986     pc = base + sd->offset;
987     delta = offset - sd->before;
988     JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
989    
990     /*
991     * Don't bother copying the jump offset we're about to reset, but do
992     * copy the bytecode at oldpc (which comes just before its immediate
993     * jump offset operand), on the next iteration through the loop, by
994     * including it in offset's new value.
995     */
996     offset = sd->before + 1;
997     size = BYTECODE_SIZE(delta - (1 + JUMP_OFFSET_LEN));
998     if (size) {
999     memmove(pc + 1 + JUMPX_OFFSET_LEN,
1000     oldpc + 1 + JUMP_OFFSET_LEN,
1001     size);
1002     }
1003    
1004     SET_JUMPX_OFFSET(pc, span);
1005     }
1006    
1007     if (growth) {
1008     /*
1009     * Fix source note deltas. Don't hardwire the delta fixup adjustment,
1010     * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
1011     * at each sd that moved. The future may bring different offset sizes
1012     * for span-dependent instruction operands. However, we fix only main
1013     * notes here, not prolog notes -- we know that prolog opcodes are not
1014     * span-dependent, and aren't likely ever to be.
1015     */
1016     offset = growth = 0;
1017     sd = sdbase;
1018     for (sn = cg->main.notes, snlimit = sn + cg->main.noteCount;
1019     sn < snlimit;
1020     sn = SN_NEXT(sn)) {
1021     /*
1022     * Recall that the offset of a given note includes its delta, and
1023     * tells the offset of the annotated bytecode from the main entry
1024     * point of the script.
1025     */
1026     offset += SN_DELTA(sn);
1027     while (sd < sdlimit && sd->before < offset) {
1028     /*
1029     * To compute the delta to add to sn, we need to look at the
1030     * spandep after sd, whose offset - (before + growth) tells by
1031     * how many bytes sd's instruction grew.
1032     */
1033     sd2 = sd + 1;
1034     if (sd2 == sdlimit)
1035     sd2 = &guard;
1036     delta = sd2->offset - (sd2->before + growth);
1037     if (delta > 0) {
1038     JS_ASSERT(delta == JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
1039     sn = js_AddToSrcNoteDelta(cx, cg, sn, delta);
1040     if (!sn)
1041     return JS_FALSE;
1042     snlimit = cg->main.notes + cg->main.noteCount;
1043     growth += delta;
1044     }
1045     sd++;
1046     }
1047    
1048     /*
1049     * If sn has span-dependent offset operands, check whether each
1050     * covers further span-dependencies, and increase those operands
1051     * accordingly. Some source notes measure offset not from the
1052     * annotated pc, but from that pc plus some small bias. NB: we
1053     * assume that spec->offsetBias can't itself span span-dependent
1054     * instructions!
1055     */
1056     spec = &js_SrcNoteSpec[SN_TYPE(sn)];
1057     if (spec->isSpanDep) {
1058     pivot = offset + spec->offsetBias;
1059     n = spec->arity;
1060     for (i = 0; i < n; i++) {
1061     span = js_GetSrcNoteOffset(sn, i);
1062     if (span == 0)
1063     continue;
1064     target = pivot + span * spec->isSpanDep;
1065     sd2 = FindNearestSpanDep(cg, target,
1066     (target >= pivot)
1067     ? sd - sdbase
1068     : 0,
1069     &guard);
1070    
1071     /*
1072     * Increase target by sd2's before-vs-after offset delta,
1073     * which is absolute (i.e., relative to start of script,
1074     * as is target). Recompute the span by subtracting its
1075     * adjusted pivot from target.
1076     */
1077     target += sd2->offset - sd2->before;
1078     span = target - (pivot + growth);
1079     span *= spec->isSpanDep;
1080     noteIndex = sn - cg->main.notes;
1081     if (!js_SetSrcNoteOffset(cx, cg, noteIndex, i, span))
1082     return JS_FALSE;
1083     sn = cg->main.notes + noteIndex;
1084     snlimit = cg->main.notes + cg->main.noteCount;
1085     }
1086     }
1087     }
1088     cg->main.lastNoteOffset += growth;
1089    
1090     /*
1091     * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1092     * not clear how we can beat that).
1093     */
1094     for (tryNode = cg->lastTryNode; tryNode; tryNode = tryNode->prev) {
1095     /*
1096     * First, look for the nearest span dependency at/above tn->start.
1097     * There may not be any such spandep, in which case the guard will
1098     * be returned.
1099     */
1100     offset = tryNode->note.start;
1101     sd = FindNearestSpanDep(cg, offset, 0, &guard);
1102     delta = sd->offset - sd->before;
1103     tryNode->note.start = offset + delta;
1104    
1105     /*
1106     * Next, find the nearest spandep at/above tn->start + tn->length.
1107     * Use its delta minus tn->start's delta to increase tn->length.
1108     */
1109     length = tryNode->note.length;
1110     sd2 = FindNearestSpanDep(cg, offset + length, sd - sdbase, &guard);
1111     if (sd2 != sd) {
1112     tryNode->note.length =
1113     length + sd2->offset - sd2->before - delta;
1114     }
1115     }
1116     }
1117    
1118     #ifdef DEBUG_brendan
1119     {
1120     uintN bigspans = 0;
1121     top = -1;
1122     for (sd = sdbase; sd < sdlimit; sd++) {
1123     offset = sd->offset;
1124    
1125     /* NB: sd->top cursors into the original, unextended bytecode vector. */
1126     if (sd->top != top) {
1127     JS_ASSERT(top == -1 ||
1128     !JOF_TYPE_IS_EXTENDED_JUMP(type) ||
1129     bigspans != 0);
1130     bigspans = 0;
1131     top = sd->top;
1132     JS_ASSERT(top == sd->before);
1133     op = (JSOp) base[offset];
1134     type = JOF_OPTYPE(op);
1135     JS_ASSERT(type == JOF_JUMP ||
1136     type == JOF_JUMPX ||
1137     type == JOF_TABLESWITCH ||
1138     type == JOF_TABLESWITCHX ||
1139     type == JOF_LOOKUPSWITCH ||
1140     type == JOF_LOOKUPSWITCHX);
1141     pivot = offset;
1142     }
1143    
1144     pc = base + offset;
1145     if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
1146     span = GET_JUMPX_OFFSET(pc);
1147     if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
1148     bigspans++;
1149     } else {
1150     JS_ASSERT(type == JOF_TABLESWITCHX ||
1151     type == JOF_LOOKUPSWITCHX);
1152     }
1153     } else {
1154     span = GET_JUMP_OFFSET(pc);
1155     }
1156     JS_ASSERT(SD_SPAN(sd, pivot) == span);
1157     }
1158     JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type) || bigspans != 0);
1159     }
1160     #endif
1161    
1162     /*
1163     * Reset so we optimize at most once -- cg may be used for further code
1164     * generation of successive, independent, top-level statements. No jump
1165     * can span top-level statements, because JS lacks goto.
1166     */
1167     size = SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg->numSpanDeps)));
1168 siliconforks 507 cx->free(cg->spanDeps);
1169 siliconforks 332 cg->spanDeps = NULL;
1170     FreeJumpTargets(cg, cg->jumpTargets);
1171     cg->jumpTargets = NULL;
1172     cg->numSpanDeps = cg->numJumpTargets = 0;
1173     cg->spanDepTodo = CG_OFFSET(cg);
1174     return JS_TRUE;
1175     }
1176    
1177     static ptrdiff_t
1178     EmitJump(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t off)
1179     {
1180     JSBool extend;
1181     ptrdiff_t jmp;
1182     jsbytecode *pc;
1183    
1184     extend = off < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < off;
1185     if (extend && !cg->spanDeps && !BuildSpanDepTable(cx, cg))
1186     return -1;
1187    
1188     jmp = js_Emit3(cx, cg, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off));
1189     if (jmp >= 0 && (extend || cg->spanDeps)) {
1190     pc = CG_CODE(cg, jmp);
1191     if (!AddSpanDep(cx, cg, pc, pc, off))
1192     return -1;
1193     }
1194     return jmp;
1195     }
1196    
1197     static ptrdiff_t
1198     GetJumpOffset(JSCodeGenerator *cg, jsbytecode *pc)
1199     {
1200     JSSpanDep *sd;
1201     JSJumpTarget *jt;
1202     ptrdiff_t top;
1203    
1204     if (!cg->spanDeps)
1205     return GET_JUMP_OFFSET(pc);
1206    
1207     sd = GetSpanDep(cg, pc);
1208     jt = sd->target;
1209     if (!JT_HAS_TAG(jt))
1210     return JT_TO_BPDELTA(jt);
1211    
1212     top = sd->top;
1213     while (--sd >= cg->spanDeps && sd->top == top)
1214     continue;
1215     sd++;
1216     return JT_CLR_TAG(jt)->offset - sd->offset;
1217     }
1218    
1219     JSBool
1220     js_SetJumpOffset(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
1221     ptrdiff_t off)
1222     {
1223     if (!cg->spanDeps) {
1224     if (JUMP_OFFSET_MIN <= off && off <= JUMP_OFFSET_MAX) {
1225     SET_JUMP_OFFSET(pc, off);
1226     return JS_TRUE;
1227     }
1228    
1229     if (!BuildSpanDepTable(cx, cg))
1230     return JS_FALSE;
1231     }
1232    
1233     return SetSpanDepTarget(cx, cg, GetSpanDep(cg, pc), off);
1234     }
1235    
1236 siliconforks 460 bool
1237     JSTreeContext::inStatement(JSStmtType type)
1238 siliconforks 332 {
1239 siliconforks 460 for (JSStmtInfo *stmt = topStmt; stmt; stmt = stmt->down) {
1240 siliconforks 332 if (stmt->type == type)
1241 siliconforks 460 return true;
1242 siliconforks 332 }
1243 siliconforks 460 return false;
1244 siliconforks 332 }
1245    
1246     void
1247     js_PushStatement(JSTreeContext *tc, JSStmtInfo *stmt, JSStmtType type,
1248     ptrdiff_t top)
1249     {
1250     stmt->type = type;
1251     stmt->flags = 0;
1252 siliconforks 460 stmt->blockid = tc->blockid();
1253 siliconforks 332 SET_STATEMENT_TOP(stmt, top);
1254 siliconforks 460 stmt->label = NULL;
1255     JS_ASSERT(!stmt->blockObj);
1256 siliconforks 332 stmt->down = tc->topStmt;
1257     tc->topStmt = stmt;
1258     if (STMT_LINKS_SCOPE(stmt)) {
1259     stmt->downScope = tc->topScopeStmt;
1260     tc->topScopeStmt = stmt;
1261     } else {
1262     stmt->downScope = NULL;
1263     }
1264     }
1265    
1266     void
1267     js_PushBlockScope(JSTreeContext *tc, JSStmtInfo *stmt, JSObject *blockObj,
1268     ptrdiff_t top)
1269     {
1270     js_PushStatement(tc, stmt, STMT_BLOCK, top);
1271     stmt->flags |= SIF_SCOPE;
1272     STOBJ_SET_PARENT(blockObj, tc->blockChain);
1273     stmt->downScope = tc->topScopeStmt;
1274     tc->topScopeStmt = stmt;
1275     tc->blockChain = blockObj;
1276 siliconforks 460 stmt->blockObj = blockObj;
1277 siliconforks 332 }
1278    
1279     /*
1280     * Emit a backpatch op with offset pointing to the previous jump of this type,
1281     * so that we can walk back up the chain fixing up the op and jump offset.
1282     */
1283     static ptrdiff_t
1284     EmitBackPatchOp(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t *lastp)
1285     {
1286     ptrdiff_t offset, delta;
1287    
1288     offset = CG_OFFSET(cg);
1289     delta = offset - *lastp;
1290     *lastp = offset;
1291     JS_ASSERT(delta > 0);
1292     return EmitJump(cx, cg, op, delta);
1293     }
1294    
1295     /*
1296     * Macro to emit a bytecode followed by a uint16 immediate operand stored in
1297     * big-endian order, used for arg and var numbers as well as for atomIndexes.
1298     * NB: We use cx and cg from our caller's lexical environment, and return
1299     * false on error.
1300     */
1301     #define EMIT_UINT16_IMM_OP(op, i) \
1302     JS_BEGIN_MACRO \
1303     if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
1304     return JS_FALSE; \
1305     JS_END_MACRO
1306    
1307     static JSBool
1308     FlushPops(JSContext *cx, JSCodeGenerator *cg, intN *npops)
1309     {
1310     JS_ASSERT(*npops != 0);
1311     if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1312     return JS_FALSE;
1313     EMIT_UINT16_IMM_OP(JSOP_POPN, *npops);
1314     *npops = 0;
1315     return JS_TRUE;
1316     }
1317    
1318     /*
1319     * Emit additional bytecode(s) for non-local jumps.
1320     */
1321     static JSBool
1322     EmitNonLocalJumpFixup(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt)
1323     {
1324     intN depth, npops;
1325     JSStmtInfo *stmt;
1326    
1327     /*
1328     * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1329     * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1330     * end of a with statement, so we save cg->stackDepth here and restore it
1331     * just before a successful return.
1332     */
1333     depth = cg->stackDepth;
1334     npops = 0;
1335    
1336     #define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE
1337    
1338 siliconforks 460 for (stmt = cg->topStmt; stmt != toStmt; stmt = stmt->down) {
1339 siliconforks 332 switch (stmt->type) {
1340     case STMT_FINALLY:
1341     FLUSH_POPS();
1342     if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1343     return JS_FALSE;
1344     if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(*stmt)) < 0)
1345     return JS_FALSE;
1346     break;
1347    
1348     case STMT_WITH:
1349     /* There's a With object on the stack that we need to pop. */
1350     FLUSH_POPS();
1351     if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1352     return JS_FALSE;
1353     if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
1354     return JS_FALSE;
1355     break;
1356    
1357     case STMT_FOR_IN_LOOP:
1358     /*
1359     * The iterator and the object being iterated need to be popped.
1360     */
1361     FLUSH_POPS();
1362     if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1363     return JS_FALSE;
1364     if (js_Emit1(cx, cg, JSOP_ENDITER) < 0)
1365     return JS_FALSE;
1366     break;
1367    
1368     case STMT_SUBROUTINE:
1369     /*
1370     * There's a [exception or hole, retsub pc-index] pair on the
1371     * stack that we need to pop.
1372     */
1373     npops += 2;
1374     break;
1375    
1376     default:;
1377     }
1378    
1379     if (stmt->flags & SIF_SCOPE) {
1380     uintN i;
1381    
1382     /* There is a Block object with locals on the stack to pop. */
1383     FLUSH_POPS();
1384     if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1385     return JS_FALSE;
1386 siliconforks 460 i = OBJ_BLOCK_COUNT(cx, stmt->blockObj);
1387 siliconforks 332 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, i);
1388     }
1389     }
1390    
1391     FLUSH_POPS();
1392     cg->stackDepth = depth;
1393     return JS_TRUE;
1394    
1395     #undef FLUSH_POPS
1396     }
1397    
1398     static ptrdiff_t
1399     EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
1400     ptrdiff_t *lastp, JSAtomListElement *label, JSSrcNoteType noteType)
1401     {
1402     intN index;
1403    
1404     if (!EmitNonLocalJumpFixup(cx, cg, toStmt))
1405     return -1;
1406    
1407     if (label)
1408     index = js_NewSrcNote2(cx, cg, noteType, (ptrdiff_t) ALE_INDEX(label));
1409     else if (noteType != SRC_NULL)
1410     index = js_NewSrcNote(cx, cg, noteType);
1411     else
1412     index = 0;
1413     if (index < 0)
1414     return -1;
1415    
1416     return EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, lastp);
1417     }
1418    
1419     static JSBool
1420     BackPatch(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t last,
1421     jsbytecode *target, jsbytecode op)
1422     {
1423     jsbytecode *pc, *stop;
1424     ptrdiff_t delta, span;
1425    
1426     pc = CG_CODE(cg, last);
1427     stop = CG_CODE(cg, -1);
1428     while (pc != stop) {
1429     delta = GetJumpOffset(cg, pc);
1430 siliconforks 507 span = target - pc;
1431 siliconforks 332 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, span);
1432    
1433     /*
1434     * Set *pc after jump offset in case bpdelta didn't overflow, but span
1435     * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1436     * and need to see the JSOP_BACKPATCH* op at *pc).
1437     */
1438     *pc = op;
1439     pc -= delta;
1440     }
1441     return JS_TRUE;
1442     }
1443    
1444     void
1445     js_PopStatement(JSTreeContext *tc)
1446     {
1447     JSStmtInfo *stmt;
1448    
1449     stmt = tc->topStmt;
1450     tc->topStmt = stmt->down;
1451     if (STMT_LINKS_SCOPE(stmt)) {
1452     tc->topScopeStmt = stmt->downScope;
1453     if (stmt->flags & SIF_SCOPE) {
1454 siliconforks 460 tc->blockChain = STOBJ_GET_PARENT(stmt->blockObj);
1455 siliconforks 332 JS_SCOPE_DEPTH_METERING(--tc->scopeDepth);
1456     }
1457     }
1458     }
1459    
1460     JSBool
1461     js_PopStatementCG(JSContext *cx, JSCodeGenerator *cg)
1462     {
1463     JSStmtInfo *stmt;
1464    
1465 siliconforks 460 stmt = cg->topStmt;
1466 siliconforks 332 if (!STMT_IS_TRYING(stmt) &&
1467     (!BackPatch(cx, cg, stmt->breaks, CG_NEXT(cg), JSOP_GOTO) ||
1468     !BackPatch(cx, cg, stmt->continues, CG_CODE(cg, stmt->update),
1469     JSOP_GOTO))) {
1470     return JS_FALSE;
1471     }
1472 siliconforks 460 js_PopStatement(cg);
1473 siliconforks 332 return JS_TRUE;
1474     }
1475    
1476     JSBool
1477     js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1478     JSParseNode *pn)
1479     {
1480     jsdouble dval;
1481     jsint ival;
1482     JSAtom *valueAtom;
1483     jsval v;
1484     JSAtomListElement *ale;
1485    
1486     /* XXX just do numbers for now */
1487     if (pn->pn_type == TOK_NUMBER) {
1488     dval = pn->pn_dval;
1489     if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
1490     v = INT_TO_JSVAL(ival);
1491     } else {
1492     /*
1493     * We atomize double to root a jsdouble instance that we wrap as
1494     * jsval and store in cg->constList. This works because atoms are
1495     * protected from GC during compilation.
1496     */
1497     valueAtom = js_AtomizeDouble(cx, dval);
1498     if (!valueAtom)
1499     return JS_FALSE;
1500     v = ATOM_KEY(valueAtom);
1501     }
1502 siliconforks 460 ale = cg->constList.add(cg->compiler, atom);
1503 siliconforks 332 if (!ale)
1504     return JS_FALSE;
1505     ALE_SET_VALUE(ale, v);
1506     }
1507     return JS_TRUE;
1508     }
1509    
1510     JSStmtInfo *
1511 siliconforks 460 js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSStmtInfo *stmt)
1512 siliconforks 332 {
1513     JSObject *obj;
1514     JSScope *scope;
1515     JSScopeProperty *sprop;
1516    
1517 siliconforks 460 if (!stmt)
1518     stmt = tc->topScopeStmt;
1519     for (; stmt; stmt = stmt->downScope) {
1520 siliconforks 332 if (stmt->type == STMT_WITH)
1521     break;
1522    
1523     /* Skip "maybe scope" statements that don't contain let bindings. */
1524     if (!(stmt->flags & SIF_SCOPE))
1525     continue;
1526    
1527 siliconforks 460 obj = stmt->blockObj;
1528 siliconforks 507 JS_ASSERT(obj->getClass() == &js_BlockClass);
1529 siliconforks 332 scope = OBJ_SCOPE(obj);
1530 siliconforks 507 sprop = scope->lookup(ATOM_TO_JSID(atom));
1531 siliconforks 332 if (sprop) {
1532     JS_ASSERT(sprop->flags & SPROP_HAS_SHORTID);
1533    
1534     if (slotp) {
1535     JS_ASSERT(JSVAL_IS_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]));
1536     *slotp = JSVAL_TO_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]) +
1537     sprop->shortid;
1538     }
1539     return stmt;
1540     }
1541     }
1542    
1543     if (slotp)
1544     *slotp = -1;
1545     return stmt;
1546     }
1547    
1548     /*
1549     * Check if the attributes describe a property holding a compile-time constant
1550     * or a permanent, read-only property without a getter.
1551     */
1552     #define IS_CONSTANT_PROPERTY(attrs) \
1553     (((attrs) & (JSPROP_READONLY | JSPROP_PERMANENT | JSPROP_GETTER)) == \
1554     (JSPROP_READONLY | JSPROP_PERMANENT))
1555    
1556     /*
1557     * The function sets vp to JSVAL_HOLE when the atom does not corresponds to a
1558     * name defining a constant.
1559     */
1560     static JSBool
1561     LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1562     jsval *vp)
1563     {
1564     JSBool ok;
1565     JSStmtInfo *stmt;
1566     JSAtomListElement *ale;
1567 siliconforks 460 JSObject *obj, *objbox;
1568 siliconforks 332 JSProperty *prop;
1569     uintN attrs;
1570    
1571     /*
1572     * Chase down the cg stack, but only until we reach the outermost cg.
1573     * This enables propagating consts from top-level into switch cases in a
1574     * function compiled along with the top-level script.
1575     */
1576     *vp = JSVAL_HOLE;
1577     do {
1578 siliconforks 460 if (cg->flags & (TCF_IN_FUNCTION | TCF_COMPILE_N_GO)) {
1579     /* XXX this will need revising if 'const' becomes block-scoped. */
1580     stmt = js_LexicalLookup(cg, atom, NULL);
1581 siliconforks 332 if (stmt)
1582     return JS_TRUE;
1583    
1584 siliconforks 460 ale = cg->constList.lookup(atom);
1585 siliconforks 332 if (ale) {
1586     JS_ASSERT(ALE_VALUE(ale) != JSVAL_HOLE);
1587     *vp = ALE_VALUE(ale);
1588     return JS_TRUE;
1589     }
1590    
1591     /*
1592     * Try looking in the variable object for a direct property that
1593     * is readonly and permanent. We know such a property can't be
1594     * shadowed by another property on obj's prototype chain, or a
1595     * with object or catch variable; nor can prop's value be changed,
1596     * nor can prop be deleted.
1597     */
1598 siliconforks 460 if (cg->flags & TCF_IN_FUNCTION) {
1599     if (js_LookupLocal(cx, cg->fun, atom, NULL) != JSLOCAL_NONE)
1600 siliconforks 332 break;
1601     } else {
1602 siliconforks 460 JS_ASSERT(cg->flags & TCF_COMPILE_N_GO);
1603     obj = cg->scopeChain;
1604 siliconforks 507 ok = obj->lookupProperty(cx, ATOM_TO_JSID(atom), &objbox, &prop);
1605 siliconforks 332 if (!ok)
1606     return JS_FALSE;
1607 siliconforks 460 if (objbox == obj) {
1608 siliconforks 332 /*
1609     * We're compiling code that will be executed immediately,
1610     * not re-executed against a different scope chain and/or
1611     * variable object. Therefore we can get constant values
1612     * from our variable object here.
1613     */
1614 siliconforks 507 ok = obj->getAttributes(cx, ATOM_TO_JSID(atom), prop, &attrs);
1615 siliconforks 332 if (ok && IS_CONSTANT_PROPERTY(attrs)) {
1616 siliconforks 507 ok = obj->getProperty(cx, ATOM_TO_JSID(atom), vp);
1617 siliconforks 332 JS_ASSERT_IF(ok, *vp != JSVAL_HOLE);
1618     }
1619     }
1620     if (prop)
1621 siliconforks 507 objbox->dropProperty(cx, prop);
1622 siliconforks 332 if (!ok)
1623     return JS_FALSE;
1624     if (prop)
1625     break;
1626     }
1627     }
1628 siliconforks 460 } while ((cg = (JSCodeGenerator *) cg->parent) != NULL);
1629 siliconforks 332 return JS_TRUE;
1630     }
1631    
1632     /*
1633     * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment
1634     * reset instruction is necessary, JSOP_FALSE to indicate an error or either
1635     * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue
1636     * after the main bytecode sequence.
1637     */
1638     static JSOp
1639     EmitBigIndexPrefix(JSContext *cx, JSCodeGenerator *cg, uintN index)
1640     {
1641     uintN indexBase;
1642    
1643     /*
1644     * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only
1645     * for big indexes.
1646     */
1647     JS_STATIC_ASSERT(INDEX_LIMIT <= JS_BIT(24));
1648     JS_STATIC_ASSERT(INDEX_LIMIT >=
1649     (JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 2) << 16);
1650    
1651     if (index < JS_BIT(16))
1652     return JSOP_NOP;
1653     indexBase = index >> 16;
1654     if (indexBase <= JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 1) {
1655     if (js_Emit1(cx, cg, (JSOp)(JSOP_INDEXBASE1 + indexBase - 1)) < 0)
1656     return JSOP_FALSE;
1657     return JSOP_RESETBASE0;
1658     }
1659    
1660     if (index >= INDEX_LIMIT) {
1661     JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1662     JSMSG_TOO_MANY_LITERALS);
1663     return JSOP_FALSE;
1664     }
1665    
1666     if (js_Emit2(cx, cg, JSOP_INDEXBASE, (JSOp)indexBase) < 0)
1667     return JSOP_FALSE;
1668     return JSOP_RESETBASE;
1669     }
1670    
1671     /*
1672     * Emit a bytecode and its 2-byte constant index immediate operand. If the
1673     * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate
1674     * operand effectively extends the 16-bit immediate of the prefixed opcode,
1675     * by changing index "segment" (see jsinterp.c). We optimize segments 1-3
1676     * with single-byte JSOP_INDEXBASE[123] codes.
1677     *
1678     * Such prefixing currently requires a suffix to restore the "zero segment"
1679     * register setting, but this could be optimized further.
1680     */
1681     static JSBool
1682     EmitIndexOp(JSContext *cx, JSOp op, uintN index, JSCodeGenerator *cg)
1683     {
1684     JSOp bigSuffix;
1685    
1686     bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1687     if (bigSuffix == JSOP_FALSE)
1688     return JS_FALSE;
1689     EMIT_UINT16_IMM_OP(op, index);
1690     return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1691     }
1692    
1693     /*
1694     * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro
1695     * caller's lexical environment, and embedding a false return on error.
1696     */
1697     #define EMIT_INDEX_OP(op, index) \
1698     JS_BEGIN_MACRO \
1699     if (!EmitIndexOp(cx, op, index, cg)) \
1700     return JS_FALSE; \
1701     JS_END_MACRO
1702    
1703     static JSBool
1704     EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
1705     {
1706     JSAtomListElement *ale;
1707    
1708     JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1709     if (op == JSOP_GETPROP &&
1710     pn->pn_atom == cx->runtime->atomState.lengthAtom) {
1711     return js_Emit1(cx, cg, JSOP_LENGTH) >= 0;
1712     }
1713 siliconforks 460 ale = cg->atomList.add(cg->compiler, pn->pn_atom);
1714 siliconforks 332 if (!ale)
1715     return JS_FALSE;
1716     return EmitIndexOp(cx, op, ALE_INDEX(ale), cg);
1717     }
1718    
1719     static JSBool
1720 siliconforks 460 EmitObjectOp(JSContext *cx, JSObjectBox *objbox, JSOp op,
1721 siliconforks 332 JSCodeGenerator *cg)
1722     {
1723     JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
1724 siliconforks 460 return EmitIndexOp(cx, op, cg->objectList.index(objbox), cg);
1725 siliconforks 332 }
1726    
1727     /*
1728     * What good are ARGNO_LEN and SLOTNO_LEN, you ask? The answer is that, apart
1729     * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in
1730     * other parts of the code there's no necessary relationship between the two.
1731     * The abstraction cracks here in order to share EmitSlotIndexOp code among
1732     * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases.
1733     */
1734     JS_STATIC_ASSERT(ARGNO_LEN == 2);
1735     JS_STATIC_ASSERT(SLOTNO_LEN == 2);
1736    
1737     static JSBool
1738     EmitSlotIndexOp(JSContext *cx, JSOp op, uintN slot, uintN index,
1739 siliconforks 460 JSCodeGenerator *cg)
1740 siliconforks 332 {
1741     JSOp bigSuffix;
1742     ptrdiff_t off;
1743     jsbytecode *pc;
1744    
1745     JS_ASSERT(JOF_OPTYPE(op) == JOF_SLOTATOM ||
1746     JOF_OPTYPE(op) == JOF_SLOTOBJECT);
1747     bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1748     if (bigSuffix == JSOP_FALSE)
1749     return JS_FALSE;
1750    
1751     /* Emit [op, slot, index]. */
1752     off = js_EmitN(cx, cg, op, 2 + INDEX_LEN);
1753     if (off < 0)
1754     return JS_FALSE;
1755     pc = CG_CODE(cg, off);
1756     SET_UINT16(pc, slot);
1757     pc += 2;
1758     SET_INDEX(pc, index);
1759     return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1760     }
1761    
1762     /*
1763     * Adjust the slot for a block local to account for the number of variables
1764     * that share the same index space with locals. Due to the incremental code
1765     * generation for top-level script, we do the adjustment via code patching in
1766 siliconforks 460 * JSCompiler::compileScript; see comments there.
1767 siliconforks 332 *
1768     * The function returns -1 on failures.
1769     */
1770     static jsint
1771     AdjustBlockSlot(JSContext *cx, JSCodeGenerator *cg, jsint slot)
1772     {
1773     JS_ASSERT((jsuint) slot < cg->maxStackDepth);
1774 siliconforks 460 if (cg->flags & TCF_IN_FUNCTION) {
1775     slot += cg->fun->u.i.nvars;
1776 siliconforks 332 if ((uintN) slot >= SLOTNO_LIMIT) {
1777     js_ReportCompileErrorNumber(cx, CG_TS(cg), NULL,
1778     JSREPORT_ERROR,
1779     JSMSG_TOO_MANY_LOCALS);
1780     slot = -1;
1781     }
1782     }
1783     return slot;
1784     }
1785    
1786 siliconforks 460 static bool
1787     EmitEnterBlock(JSContext *cx, JSParseNode *pn, JSCodeGenerator *cg)
1788     {
1789     JS_ASSERT(PN_TYPE(pn) == TOK_LEXICALSCOPE);
1790     if (!EmitObjectOp(cx, pn->pn_objbox, JSOP_ENTERBLOCK, cg))
1791     return false;
1792    
1793     JSObject *blockObj = pn->pn_objbox->object;
1794     jsint depth = AdjustBlockSlot(cx, cg, OBJ_BLOCK_DEPTH(cx, blockObj));
1795     if (depth < 0)
1796     return false;
1797    
1798     for (uintN slot = JSSLOT_FREE(&js_BlockClass),
1799     limit = slot + OBJ_BLOCK_COUNT(cx, blockObj);
1800     slot < limit; slot++) {
1801     jsval v = STOBJ_GET_SLOT(blockObj, slot);
1802    
1803     /* Beware the empty destructuring dummy. */
1804     if (JSVAL_IS_VOID(v)) {
1805     JS_ASSERT(slot + 1 <= limit);
1806     continue;
1807     }
1808    
1809     JSDefinition *dn = (JSDefinition *) JSVAL_TO_PRIVATE(v);
1810     JS_ASSERT(dn->pn_defn);
1811     JS_ASSERT(uintN(dn->frameSlot() + depth) < JS_BIT(16));
1812     dn->pn_cookie += depth;
1813     #ifdef DEBUG
1814     for (JSParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
1815     JS_ASSERT(pnu->pn_lexdef == dn);
1816     JS_ASSERT(!(pnu->pn_dflags & PND_BOUND));
1817     JS_ASSERT(pnu->pn_cookie == FREE_UPVAR_COOKIE);
1818     }
1819     #endif
1820     }
1821    
1822     OBJ_SCOPE(blockObj)->freeslot = JSSLOT_FREE(&js_BlockClass);
1823 siliconforks 507 return js_GrowSlots(cx, blockObj, JSSLOT_FREE(&js_BlockClass));
1824 siliconforks 460 }
1825    
1826 siliconforks 332 /*
1827 siliconforks 460 * When eval is called from a function, the eval code or function code it
1828     * compiles may reference upvars that live in the eval-calling function. The
1829     * eval-invoked compiler does not have explicit definitions for these upvars
1830     * and we do not attempt to create them a-priori (by inspecting the function's
1831     * args and vars) -- we could, but we'd take an avoidable penalty for each
1832     * function local not referenced by any upvar. Instead, we map such upvars
1833     * lazily, growing upvarMap.vector by powers of two.
1834 siliconforks 332 *
1835 siliconforks 460 * This function knows that it is called with pn pointing to a PN_NAME-arity
1836     * node, and cg->compiler->callerFrame having a non-null fun member, and the
1837     * static level of cg at least one greater than the eval-calling function's
1838     * static level.
1839     */
1840     static bool
1841     MakeUpvarForEval(JSParseNode *pn, JSCodeGenerator *cg)
1842     {
1843     JSContext *cx = cg->compiler->context;
1844     JSFunction *fun = cg->compiler->callerFrame->fun;
1845     uintN upvarLevel = fun->u.i.script->staticLevel;
1846    
1847     JSFunctionBox *funbox = cg->funbox;
1848     if (funbox) {
1849     /*
1850     * Treat top-level function definitions as escaping (i.e., as funargs),
1851     * required since we compile each such top level function or statement
1852     * and throw away the AST, so we can't yet see all funarg uses of this
1853     * function being compiled (cg->funbox->object). See bug 493177.
1854     */
1855     if (funbox->level == fun->u.i.script->staticLevel + 1U &&
1856     !(((JSFunction *) funbox->object)->flags & JSFUN_LAMBDA)) {
1857     JS_ASSERT_IF(cx->options & JSOPTION_ANONFUNFIX,
1858     ((JSFunction *) funbox->object)->atom);
1859     return true;
1860     }
1861    
1862     while (funbox->level >= upvarLevel) {
1863     if (funbox->node->pn_dflags & PND_FUNARG)
1864     return true;
1865     funbox = funbox->parent;
1866     if (!funbox)
1867     break;
1868     }
1869     }
1870    
1871     JSAtom *atom = pn->pn_atom;
1872    
1873     uintN index;
1874     JSLocalKind localKind = js_LookupLocal(cx, fun, atom, &index);
1875     if (localKind == JSLOCAL_NONE)
1876     return true;
1877    
1878     JS_ASSERT(cg->staticLevel > upvarLevel);
1879     if (cg->staticLevel >= JS_DISPLAY_SIZE || upvarLevel >= JS_DISPLAY_SIZE)
1880     return true;
1881    
1882     JSAtomListElement *ale = cg->upvarList.lookup(atom);
1883     if (!ale) {
1884     if ((cg->flags & TCF_IN_FUNCTION) &&
1885     !js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR)) {
1886     return false;
1887     }
1888    
1889     ale = cg->upvarList.add(cg->compiler, atom);
1890     if (!ale)
1891     return false;
1892     JS_ASSERT(ALE_INDEX(ale) == cg->upvarList.count - 1);
1893    
1894     uint32 *vector = cg->upvarMap.vector;
1895     uint32 length = cg->upvarMap.length;
1896    
1897     JS_ASSERT(ALE_INDEX(ale) <= length);
1898     if (ALE_INDEX(ale) == length) {
1899     length = 2 * JS_MAX(2, length);
1900 siliconforks 507 vector = (uint32 *) cx->realloc(vector, length * sizeof *vector);
1901 siliconforks 460 if (!vector)
1902     return false;
1903     cg->upvarMap.vector = vector;
1904     cg->upvarMap.length = length;
1905     }
1906    
1907     if (localKind != JSLOCAL_ARG)
1908     index += fun->nargs;
1909     JS_ASSERT(index < JS_BIT(16));
1910    
1911     uintN skip = cg->staticLevel - upvarLevel;
1912     vector[ALE_INDEX(ale)] = MAKE_UPVAR_COOKIE(skip, index);
1913     }
1914    
1915     pn->pn_op = JSOP_GETUPVAR;
1916     pn->pn_cookie = MAKE_UPVAR_COOKIE(cg->staticLevel, ALE_INDEX(ale));
1917     pn->pn_dflags |= PND_BOUND;
1918     return true;
1919     }
1920    
1921     /*
1922     * BindNameToSlot attempts to optimize name gets and sets to stack slot loads
1923     * and stores, given the compile-time information in cg and a TOK_NAME node pn.
1924     * It returns false on error, true on success.
1925     *
1926     * The caller can inspect pn->pn_cookie for FREE_UPVAR_COOKIE to tell whether
1927     * optimization occurred, in which case BindNameToSlot also updated pn->pn_op.
1928     * If pn->pn_cookie is still FREE_UPVAR_COOKIE on return, pn->pn_op still may
1929     * have been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not
1930     * pn->pn_op was modified, if this function finds an argument or local variable
1931     * name, PND_CONST will be set in pn_dflags for read-only properties after a
1932 siliconforks 332 * successful return.
1933     *
1934     * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1935     * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
1936     * in js_EmitTree.
1937     */
1938     static JSBool
1939     BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
1940     {
1941 siliconforks 460 JSDefinition *dn;
1942     JSOp op;
1943 siliconforks 332 JSAtom *atom;
1944 siliconforks 460 uint32 cookie;
1945     JSDefinition::Kind dn_kind;
1946     JSAtomListElement *ale;
1947 siliconforks 332 uintN index;
1948    
1949     JS_ASSERT(pn->pn_type == TOK_NAME);
1950    
1951 siliconforks 460 /* Idempotency tests come first, since we may be called more than once. */
1952     if (pn->pn_dflags & PND_BOUND)
1953 siliconforks 332 return JS_TRUE;
1954    
1955 siliconforks 460 /* No cookie initialized for these two, they're pre-bound by definition. */
1956     JS_ASSERT(pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE);
1957    
1958 siliconforks 332 /*
1959 siliconforks 460 * The parser linked all uses (including forward references) to their
1960     * definitions, unless a with statement or direct eval intervened.
1961 siliconforks 332 */
1962 siliconforks 460 if (pn->pn_used) {
1963     JS_ASSERT(pn->pn_cookie == FREE_UPVAR_COOKIE);
1964     dn = pn->pn_lexdef;
1965     JS_ASSERT(dn->pn_defn);
1966     pn->pn_dflags |= (dn->pn_dflags & PND_CONST);
1967     } else {
1968     if (!pn->pn_defn)
1969 siliconforks 332 return JS_TRUE;
1970 siliconforks 460 dn = (JSDefinition *) pn;
1971     }
1972 siliconforks 332
1973 siliconforks 460 op = PN_OP(pn);
1974     if (op == JSOP_NOP)
1975 siliconforks 332 return JS_TRUE;
1976    
1977 siliconforks 460 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1978     atom = pn->pn_atom;
1979     cookie = dn->pn_cookie;
1980     dn_kind = dn->kind();
1981    
1982 siliconforks 332 /*
1983 siliconforks 460 * Turn attempts to mutate const-declared bindings into get ops (for
1984     * pre-increment and pre-decrement ops, our caller will have to emit
1985     * JSOP_POS, JSOP_ONE, and JSOP_ADD as well).
1986     *
1987     * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared
1988     * bindings visible to the compiler are permanent in JS unless the
1989     * declaration originates in eval code. We detect eval code by testing
1990     * cg->compiler->callerFrame, which is set only by eval or a debugger
1991     * equivalent.
1992     *
1993     * Note that this callerFrame non-null test must be qualified by testing
1994     * !cg->funbox to exclude function code nested in eval code, which is not
1995     * subject to the deletable binding exception.
1996 siliconforks 332 */
1997 siliconforks 460 switch (op) {
1998     case JSOP_NAME:
1999     case JSOP_SETCONST:
2000     break;
2001     case JSOP_DELNAME:
2002     if (dn_kind != JSDefinition::UNKNOWN) {
2003     if (cg->compiler->callerFrame && !cg->funbox)
2004     JS_ASSERT(cg->flags & TCF_COMPILE_N_GO);
2005     else
2006     pn->pn_op = JSOP_FALSE;
2007     pn->pn_dflags |= PND_BOUND;
2008     return JS_TRUE;
2009     }
2010     break;
2011     default:
2012     if (pn->isConst())
2013     pn->pn_op = op = JSOP_NAME;
2014     }
2015 siliconforks 332
2016 siliconforks 460 if (cookie == FREE_UPVAR_COOKIE) {
2017     JSStackFrame *caller = cg->compiler->callerFrame;
2018     if (caller) {
2019     JS_ASSERT(cg->flags & TCF_COMPILE_N_GO);
2020 siliconforks 332
2021 siliconforks 460 /*
2022     * Don't generate upvars on the left side of a for loop. See
2023     * bug 470758.
2024     */
2025     if (cg->flags & TCF_IN_FOR_INIT)
2026     return JS_TRUE;
2027    
2028 siliconforks 332 JS_ASSERT(caller->script);
2029 siliconforks 460 if (!caller->fun)
2030 siliconforks 332 return JS_TRUE;
2031    
2032     /*
2033 siliconforks 460 * Make sure the variable object used by the compiler to initialize
2034     * parent links matches the caller's varobj. Compile-n-go compiler-
2035     * created function objects have the top-level cg's scopeChain set
2036     * as their parent by JSCompiler::newFunction.
2037     */
2038     JSObject *scopeobj = (cg->flags & TCF_IN_FUNCTION)
2039     ? STOBJ_GET_PARENT(FUN_OBJECT(cg->fun))
2040     : cg->scopeChain;
2041     if (scopeobj != caller->varobj)
2042     return JS_TRUE;
2043    
2044     /*
2045 siliconforks 332 * We are compiling eval or debug script inside a function frame
2046 siliconforks 460 * and the scope chain matches the function's variable object.
2047 siliconforks 332 * Optimize access to function's arguments and variable and the
2048     * arguments object.
2049     */
2050 siliconforks 460 if (op != JSOP_NAME)
2051     return JS_TRUE;
2052 siliconforks 332
2053 siliconforks 585 /*
2054     * Generator functions may be resumed from any call stack, which
2055     * defeats the display optimization to static link searching used
2056     * by JSOP_{GET,CALL}UPVAR.
2057     */
2058     if (cg->flags & TCF_FUN_IS_GENERATOR)
2059     return JS_TRUE;
2060    
2061 siliconforks 460 return MakeUpvarForEval(pn, cg);
2062     }
2063     return JS_TRUE;
2064     }
2065 siliconforks 332
2066 siliconforks 460 if (dn->pn_dflags & PND_GVAR) {
2067     /*
2068     * If this is a global reference from within a function, leave pn_op as
2069     * JSOP_NAME, etc. We could emit JSOP_*GVAR ops within function code if
2070     * only we could depend on the global frame's slots being valid for all
2071     * calls to the function.
2072     */
2073     if (cg->flags & TCF_IN_FUNCTION)
2074 siliconforks 332 return JS_TRUE;
2075    
2076     /*
2077     * We are optimizing global variables and there may be no pre-existing
2078 siliconforks 460 * global property named atom when this global script runs. If atom was
2079     * declared via const or var, optimize pn to access fp->vars using the
2080     * appropriate JSOP_*GVAR op.
2081     *
2082     * FIXME: should be able to optimize global function access too.
2083 siliconforks 332 */
2084 siliconforks 460 JS_ASSERT(dn_kind == JSDefinition::VAR || dn_kind == JSDefinition::CONST);
2085 siliconforks 332
2086     switch (op) {
2087     case JSOP_NAME: op = JSOP_GETGVAR; break;
2088     case JSOP_SETNAME: op = JSOP_SETGVAR; break;
2089     case JSOP_SETCONST: /* NB: no change */ break;
2090     case JSOP_INCNAME: op = JSOP_INCGVAR; break;
2091     case JSOP_NAMEINC: op = JSOP_GVARINC; break;
2092     case JSOP_DECNAME: op = JSOP_DECGVAR; break;
2093     case JSOP_NAMEDEC: op = JSOP_GVARDEC; break;
2094     case JSOP_FORNAME: /* NB: no change */ break;
2095     case JSOP_DELNAME: /* NB: no change */ break;
2096     default: JS_NOT_REACHED("gvar");
2097     }
2098 siliconforks 460 pn->pn_op = op;
2099     pn->pn_cookie = cookie;
2100     pn->pn_dflags |= PND_BOUND;
2101 siliconforks 332 return JS_TRUE;
2102     }
2103    
2104 siliconforks 460 uintN level = UPVAR_FRAME_SKIP(cookie);
2105     JS_ASSERT(cg->staticLevel >= level);
2106    
2107     /*
2108     * A JSDefinition witnessed as a declaration by the parser cannot be an
2109     * upvar, unless it is the degenerate kind of upvar selected above (in the
2110     * code before the PND_GVAR test) for the special case of compile-and-go
2111     * code generated from eval called from a function, where the eval code
2112     * uses local vars defined in the function. We detect this upvar-for-eval
2113     * case by checking dn's op.
2114     */
2115     if (PN_OP(dn) == JSOP_GETUPVAR) {
2116     JS_ASSERT(cg->staticLevel >= level);
2117     if (op != JSOP_NAME)
2118     return JS_TRUE;
2119    
2120     JSStackFrame *caller = cg->compiler->callerFrame;
2121     JS_ASSERT(caller);
2122 siliconforks 507 JS_ASSERT(caller->script);
2123 siliconforks 460
2124     JSTreeContext *tc = cg;
2125     while (tc->staticLevel != level)
2126     tc = tc->parent;
2127     JS_ASSERT(tc->flags & TCF_COMPILING);
2128    
2129     JSCodeGenerator *evalcg = (JSCodeGenerator *) tc;
2130     JS_ASSERT(evalcg->flags & TCF_COMPILE_N_GO);
2131     JS_ASSERT(caller->fun && caller->varobj == evalcg->scopeChain);
2132    
2133 siliconforks 507 /*
2134     * Don't generate upvars on the left side of a for loop. See
2135     * bug 470758 and bug 520513.
2136     */
2137     if (evalcg->flags & TCF_IN_FOR_INIT)
2138     return JS_TRUE;
2139    
2140 siliconforks 460 if (cg->staticLevel == level) {
2141     pn->pn_op = JSOP_GETUPVAR;
2142     pn->pn_cookie = cookie;
2143     pn->pn_dflags |= PND_BOUND;
2144     return JS_TRUE;
2145     }
2146    
2147     return MakeUpvarForEval(pn, cg);
2148     }
2149    
2150     uintN skip = cg->staticLevel - level;
2151     if (skip != 0) {
2152     JS_ASSERT(cg->flags & TCF_IN_FUNCTION);
2153 siliconforks 507 JS_ASSERT_IF(UPVAR_FRAME_SLOT(cookie) != CALLEE_UPVAR_SLOT,
2154     cg->lexdeps.lookup(atom));
2155 siliconforks 460 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
2156     JS_ASSERT(cg->fun->u.i.skipmin <= skip);
2157    
2158 siliconforks 332 /*
2159 siliconforks 460 * If op is a mutating opcode, this upvar's static level is too big to
2160     * index into the display, or the function is heavyweight, we fall back
2161     * on JSOP_*NAME*.
2162 siliconforks 332 */
2163 siliconforks 460 if (op != JSOP_NAME)
2164     return JS_TRUE;
2165     if (level >= JS_DISPLAY_SIZE)
2166     return JS_TRUE;
2167     if (cg->flags & TCF_FUN_HEAVYWEIGHT)
2168     return JS_TRUE;
2169    
2170     if (FUN_FLAT_CLOSURE(cg->fun)) {
2171     op = JSOP_GETDSLOT;
2172     } else {
2173     /*
2174     * The function we're compiling may not be heavyweight, but if it
2175     * escapes as a funarg, we can't use JSOP_GETUPVAR/JSOP_CALLUPVAR.
2176     * JSCompiler::analyzeFunctions has arranged for this function's
2177     * enclosing functions to be heavyweight, so we can safely stick
2178     * with JSOP_NAME/JSOP_CALLNAME.
2179     */
2180     if (cg->funbox->node->pn_dflags & PND_FUNARG)
2181     return JS_TRUE;
2182    
2183     /*
2184     * Generator functions may be resumed from any call stack, which
2185     * defeats the display optimization to static link searching used
2186     * by JSOP_{GET,CALL}UPVAR.
2187     */
2188     if (cg->flags & TCF_FUN_IS_GENERATOR)
2189     return JS_TRUE;
2190    
2191     op = JSOP_GETUPVAR;
2192     }
2193    
2194     ale = cg->upvarList.lookup(atom);
2195     if (ale) {
2196     index = ALE_INDEX(ale);
2197     } else {
2198     if (!js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR))
2199     return JS_FALSE;
2200    
2201     ale = cg->upvarList.add(cg->compiler, atom);
2202     if (!ale)
2203     return JS_FALSE;
2204     index = ALE_INDEX(ale);
2205     JS_ASSERT(index == cg->upvarList.count - 1);
2206    
2207     uint32 *vector = cg->upvarMap.vector;
2208     if (!vector) {
2209     uint32 length = cg->lexdeps.count;
2210    
2211 siliconforks 507 vector = (uint32 *) js_calloc(length * sizeof *vector);
2212 siliconforks 460 if (!vector) {
2213     JS_ReportOutOfMemory(cx);
2214     return JS_FALSE;
2215 siliconforks 332 }
2216 siliconforks 460 cg->upvarMap.vector = vector;
2217     cg->upvarMap.length = length;
2218 siliconforks 332 }
2219 siliconforks 460
2220     uintN slot = UPVAR_FRAME_SLOT(cookie);
2221 siliconforks 507 if (slot != CALLEE_UPVAR_SLOT && dn_kind != JSDefinition::ARG) {
2222 siliconforks 460 JSTreeContext *tc = cg;
2223     do {
2224     tc = tc->parent;
2225     } while (tc->staticLevel != level);
2226     if (tc->flags & TCF_IN_FUNCTION)
2227     slot += tc->fun->nargs;
2228     }
2229    
2230     vector[index] = MAKE_UPVAR_COOKIE(skip, slot);
2231 siliconforks 332 }
2232 siliconforks 460
2233     pn->pn_op = op;
2234     pn->pn_cookie = index;
2235     pn->pn_dflags |= PND_BOUND;
2236     return JS_TRUE;
2237 siliconforks 332 }
2238    
2239     /*
2240 siliconforks 460 * We are compiling a function body and may be able to optimize name
2241     * to stack slot. Look for an argument or variable in the function and
2242     * rewrite pn_op and update pn accordingly.
2243 siliconforks 332 */
2244 siliconforks 460 switch (dn_kind) {
2245     case JSDefinition::UNKNOWN:
2246 siliconforks 332 return JS_TRUE;
2247 siliconforks 460
2248     case JSDefinition::LET:
2249     switch (op) {
2250     case JSOP_NAME: op = JSOP_GETLOCAL; break;
2251     case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2252     case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2253     case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2254     case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2255     case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2256     case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2257     default: JS_NOT_REACHED("let");
2258     }
2259     break;
2260    
2261     case JSDefinition::ARG:
2262     switch (op) {
2263     case JSOP_NAME: op = JSOP_GETARG; break;
2264     case JSOP_SETNAME: op = JSOP_SETARG; break;
2265     case JSOP_INCNAME: op = JSOP_INCARG; break;
2266     case JSOP_NAMEINC: op = JSOP_ARGINC; break;
2267     case JSOP_DECNAME: op = JSOP_DECARG; break;
2268     case JSOP_NAMEDEC: op = JSOP_ARGDEC; break;
2269     case JSOP_FORNAME: op = JSOP_FORARG; break;
2270     default: JS_NOT_REACHED("arg");
2271     }
2272     JS_ASSERT(!pn->isConst());
2273     break;
2274    
2275     case JSDefinition::VAR:
2276     if (PN_OP(dn) == JSOP_CALLEE) {
2277     JS_ASSERT(op != JSOP_CALLEE);
2278     JS_ASSERT((cg->fun->flags & JSFUN_LAMBDA) && atom == cg->fun->atom);
2279    
2280     switch (op) {
2281     default:
2282     /*
2283     * Leave pn->pn_op == JSOP_NAME if cg->fun is heavyweight, as
2284     * we cannot be sure cg->fun is not something of the form:
2285     *
2286     * var ff = (function f(s) { eval(s); return f; });
2287     *
2288     * where a caller invokes ff("var f = 42"). The result returned
2289     * for such an invocation must be 42, since the callee name is
2290     * lexically bound in an outer declarative environment from the
2291     * function's activation. See jsfun.cpp:call_resolve.
2292     */
2293     JS_ASSERT(op != JSOP_DELNAME);
2294     if (!(cg->flags & TCF_FUN_HEAVYWEIGHT)) {
2295     op = JSOP_CALLEE;
2296     pn->pn_dflags |= PND_CONST;
2297     }
2298     break;
2299     }
2300     pn->pn_op = op;
2301     pn->pn_dflags |= PND_BOUND;
2302     return JS_TRUE;
2303     }
2304     /* FALL THROUGH */
2305    
2306     default:
2307     JS_ASSERT_IF(dn_kind != JSDefinition::FUNCTION,
2308     dn_kind == JSDefinition::VAR ||
2309     dn_kind == JSDefinition::CONST);
2310     switch (op) {
2311     case JSOP_NAME: op = JSOP_GETLOCAL; break;
2312     case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2313     case JSOP_SETCONST: op = JSOP_SETLOCAL; break;
2314     case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2315     case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2316     case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2317     case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2318     case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2319     default: JS_NOT_REACHED("local");
2320     }
2321     JS_ASSERT_IF(dn_kind == JSDefinition::CONST, pn->pn_dflags & PND_CONST);
2322     break;
2323 siliconforks 332 }
2324 siliconforks 460
2325     JS_ASSERT(op != PN_OP(pn));
2326     pn->pn_op = op;
2327     pn->pn_cookie = UPVAR_FRAME_SLOT(cookie);
2328     pn->pn_dflags |= PND_BOUND;
2329 siliconforks 332 return JS_TRUE;
2330     }
2331    
2332     /*
2333     * If pn contains a useful expression, return true with *answer set to true.
2334     * If pn contains a useless expression, return true with *answer set to false.
2335     * Return false on error.
2336     *
2337     * The caller should initialize *answer to false and invoke this function on
2338     * an expression statement or similar subtree to decide whether the tree could
2339     * produce code that has any side effects. For an expression statement, we
2340     * define useless code as code with no side effects, because the main effect,
2341     * the value left on the stack after the code executes, will be discarded by a
2342     * pop bytecode.
2343     */
2344     static JSBool
2345     CheckSideEffects(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2346     JSBool *answer)
2347     {
2348     JSBool ok;
2349     JSParseNode *pn2;
2350    
2351     ok = JS_TRUE;
2352     if (!pn || *answer)
2353     return ok;
2354    
2355     switch (pn->pn_arity) {
2356     case PN_FUNC:
2357     /*
2358 siliconforks 460 * A named function, contrary to ES3, is no longer useful, because we
2359     * bind its name lexically (using JSOP_CALLEE) instead of creating an
2360     * Object instance and binding a readonly, permanent property in it
2361     * (the object and binding can be detected and hijacked or captured).
2362     * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
2363 siliconforks 332 */
2364 siliconforks 460 *answer = JS_FALSE;
2365 siliconforks 332 break;
2366    
2367     case PN_LIST:
2368 siliconforks 399 if (pn->pn_op == JSOP_NOP ||
2369     pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2370     pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2371 siliconforks 332 /*
2372 siliconforks 399 * Non-operators along with ||, &&, ===, and !== never invoke
2373     * toString or valueOf.
2374     */
2375     for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
2376     ok &= CheckSideEffects(cx, cg, pn2, answer);
2377     } else {
2378     /*
2379 siliconforks 332 * All invocation operations (construct: TOK_NEW, call: TOK_LP)
2380     * are presumed to be useful, because they may have side effects
2381     * even if their main effect (their return value) is discarded.
2382     *
2383     * TOK_LB binary trees of 3 or more nodes are flattened into lists
2384     * to avoid too much recursion. All such lists must be presumed
2385     * to be useful because each index operation could invoke a getter
2386     * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2387     * does not apply here: arguments[i][j] might invoke a getter).
2388     *
2389 siliconforks 399 * Likewise, array and object initialisers may call prototype
2390     * setters (the __defineSetter__ built-in, and writable __proto__
2391     * on Array.prototype create this hazard). Initialiser list nodes
2392     * have JSOP_NEWINIT in their pn_op.
2393 siliconforks 332 */
2394     *answer = JS_TRUE;
2395     }
2396     break;
2397    
2398     case PN_TERNARY:
2399     ok = CheckSideEffects(cx, cg, pn->pn_kid1, answer) &&
2400     CheckSideEffects(cx, cg, pn->pn_kid2, answer) &&
2401     CheckSideEffects(cx, cg, pn->pn_kid3, answer);
2402     break;
2403    
2404     case PN_BINARY:
2405     if (pn->pn_type == TOK_ASSIGN) {
2406     /*
2407     * Assignment is presumed to be useful, even if the next operation
2408     * is another assignment overwriting this one's ostensible effect,
2409     * because the left operand may be a property with a setter that
2410     * has side effects.
2411     *
2412     * The only exception is assignment of a useless value to a const
2413     * declared in the function currently being compiled.
2414     */
2415     pn2 = pn->pn_left;
2416     if (pn2->pn_type != TOK_NAME) {
2417     *answer = JS_TRUE;
2418     } else {
2419     if (!BindNameToSlot(cx, cg, pn2))
2420     return JS_FALSE;
2421     if (!CheckSideEffects(cx, cg, pn->pn_right, answer))
2422     return JS_FALSE;
2423 siliconforks 460 if (!*answer && (pn->pn_op != JSOP_NOP || !pn2->isConst()))
2424 siliconforks 332 *answer = JS_TRUE;
2425     }
2426     } else {
2427 siliconforks 399 if (pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2428     pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2429     /*
2430     * ||, &&, ===, and !== do not convert their operands via
2431     * toString or valueOf method calls.
2432     */
2433     ok = CheckSideEffects(cx, cg, pn->pn_left, answer) &&
2434     CheckSideEffects(cx, cg, pn->pn_right, answer);
2435     } else {
2436     /*
2437     * We can't easily prove that neither operand ever denotes an
2438     * object with a toString or valueOf method.
2439     */
2440     *answer = JS_TRUE;
2441     }
2442 siliconforks 332 }
2443     break;
2444    
2445     case PN_UNARY:
2446     switch (pn->pn_type) {
2447     case TOK_DELETE:
2448     pn2 = pn->pn_kid;
2449     switch (pn2->pn_type) {
2450     case TOK_NAME:
2451 siliconforks 460 if (!BindNameToSlot(cx, cg, pn2))
2452     return JS_FALSE;
2453     if (pn2->isConst()) {
2454     *answer = JS_FALSE;
2455     break;
2456     }
2457     /* FALL THROUGH */
2458 siliconforks 332 case TOK_DOT:
2459     #if JS_HAS_XML_SUPPORT
2460     case TOK_DBLDOT:
2461     #endif
2462     #if JS_HAS_LVALUE_RETURN
2463     case TOK_LP:
2464     #endif
2465     case TOK_LB:
2466     /* All these delete addressing modes have effects too. */
2467     *answer = JS_TRUE;
2468     break;
2469     default:
2470     ok = CheckSideEffects(cx, cg, pn2, answer);
2471     break;
2472     }
2473     break;
2474    
2475 siliconforks 399 case TOK_UNARYOP:
2476     if (pn->pn_op == JSOP_NOT) {
2477     /* ! does not convert its operand via toString or valueOf. */
2478     ok = CheckSideEffects(cx, cg, pn->pn_kid, answer);
2479     break;
2480     }
2481     /* FALL THROUGH */
2482    
2483 siliconforks 332 default:
2484     /*
2485     * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP
2486     * have direct effects. Of the remaining unary-arity node types,
2487     * we can't easily prove that the operand never denotes an object
2488     * with a toString or valueOf method.
2489     */
2490     *answer = JS_TRUE;
2491     break;
2492     }
2493     break;
2494    
2495     case PN_NAME:
2496     /*
2497     * Take care to avoid trying to bind a label name (labels, both for
2498     * statements and property values in object initialisers, have pn_op
2499     * defaulted to JSOP_NOP).
2500     */
2501     if (pn->pn_type == TOK_NAME && pn->pn_op != JSOP_NOP) {
2502     if (!BindNameToSlot(cx, cg, pn))
2503     return JS_FALSE;
2504 siliconforks 460 if (pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE &&
2505     pn->pn_cookie == FREE_UPVAR_COOKIE) {
2506 siliconforks 332 /*
2507 siliconforks 460 * Not an argument or local variable use, and not a use of a
2508     * unshadowed named function expression's given name, so this
2509     * expression could invoke a getter that has side effects.
2510 siliconforks 332 */
2511     *answer = JS_TRUE;
2512     }
2513     }
2514 siliconforks 460 pn2 = pn->maybeExpr();
2515 siliconforks 332 if (pn->pn_type == TOK_DOT) {
2516     if (pn2->pn_type == TOK_NAME && !BindNameToSlot(cx, cg, pn2))
2517     return JS_FALSE;
2518     if (!(pn2->pn_op == JSOP_ARGUMENTS &&
2519     pn->pn_atom == cx->runtime->atomState.lengthAtom)) {
2520     /*
2521     * Any dotted property reference could call a getter, except
2522     * for arguments.length where arguments is unambiguous.
2523     */
2524     *answer = JS_TRUE;
2525     }
2526     }
2527     ok = CheckSideEffects(cx, cg, pn2, answer);
2528     break;
2529    
2530 siliconforks 460 case PN_NAMESET:
2531     ok = CheckSideEffects(cx, cg, pn->pn_tree, answer);
2532     break;
2533    
2534 siliconforks 332 case PN_NULLARY:
2535     if (pn->pn_type == TOK_DEBUGGER)
2536     *answer = JS_TRUE;
2537     break;
2538     }
2539     return ok;
2540     }
2541    
2542     static JSBool
2543     EmitNameOp(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2544     JSBool callContext)
2545     {
2546     JSOp op;
2547    
2548     if (!BindNameToSlot(cx, cg, pn))
2549     return JS_FALSE;
2550     op = PN_OP(pn);
2551    
2552     if (callContext) {
2553     switch (op) {
2554     case JSOP_NAME:
2555     op = JSOP_CALLNAME;
2556     break;
2557     case JSOP_GETGVAR:
2558     op = JSOP_CALLGVAR;
2559     break;
2560     case JSOP_GETARG:
2561     op = JSOP_CALLARG;
2562     break;
2563     case JSOP_GETLOCAL:
2564     op = JSOP_CALLLOCAL;
2565     break;
2566     case JSOP_GETUPVAR:
2567     op = JSOP_CALLUPVAR;
2568     break;
2569 siliconforks 460 case JSOP_GETDSLOT:
2570     op = JSOP_CALLDSLOT;
2571     break;
2572 siliconforks 332 default:
2573 siliconforks 460 JS_ASSERT(op == JSOP_ARGUMENTS || op == JSOP_CALLEE);
2574 siliconforks 332 break;
2575     }
2576     }
2577    
2578 siliconforks 460 if (op == JSOP_ARGUMENTS || op == JSOP_CALLEE) {
2579<