/[jscoverage]/trunk/js/jsemit.cpp
ViewVC logotype

Contents of /trunk/js/jsemit.cpp

Parent Directory Parent Directory | Revision Log Revision Log


Revision 332 - (show annotations)
Thu Oct 23 19:03:33 2008 UTC (11 years, 9 months ago) by siliconforks
File size: 242117 byte(s)
Add SpiderMonkey from Firefox 3.1b1.

The following directories and files were removed:
correct/, correct.js
liveconnect/
nanojit/
t/
v8/
vprof/
xpconnect/
all JavaScript files (Y.js, call.js, if.js, math-partial-sums.js, md5.js, perfect.js, trace-test.js, trace.js)


1 /* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
3 *
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 *
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
11 *
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
16 *
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
19 *
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
24 *
25 * Contributor(s):
26 *
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
38 *
39 * ***** END LICENSE BLOCK ***** */
40
41 /*
42 * JS bytecode generation.
43 */
44 #include "jsstddef.h"
45 #ifdef HAVE_MEMORY_H
46 #include <memory.h>
47 #endif
48 #include <string.h>
49 #include "jstypes.h"
50 #include "jsarena.h" /* Added by JSIFY */
51 #include "jsutil.h" /* Added by JSIFY */
52 #include "jsbit.h"
53 #include "jsprf.h"
54 #include "jsapi.h"
55 #include "jsatom.h"
56 #include "jsbool.h"
57 #include "jscntxt.h"
58 #include "jsversion.h"
59 #include "jsemit.h"
60 #include "jsfun.h"
61 #include "jsnum.h"
62 #include "jsopcode.h"
63 #include "jsparse.h"
64 #include "jsregexp.h"
65 #include "jsscan.h"
66 #include "jsscope.h"
67 #include "jsscript.h"
68 #include "jsautooplen.h"
69 #include "jsstaticcheck.h"
70
71 /* Allocation chunk counts, must be powers of two in general. */
72 #define BYTECODE_CHUNK 256 /* code allocation increment */
73 #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
74 #define TRYNOTE_CHUNK 64 /* trynote allocation increment */
75
76 /* Macros to compute byte sizes from typed element counts. */
77 #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
78 #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
79 #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
80
81 static JSBool
82 NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
83 uintN stackDepth, size_t start, size_t end);
84
85 JS_FRIEND_API(void)
86 js_InitCodeGenerator(JSContext *cx, JSCodeGenerator *cg, JSParseContext *pc,
87 JSArenaPool *codePool, JSArenaPool *notePool,
88 uintN lineno)
89 {
90 memset(cg, 0, sizeof *cg);
91 TREE_CONTEXT_INIT(&cg->treeContext, pc);
92 cg->codePool = codePool;
93 cg->notePool = notePool;
94 cg->codeMark = JS_ARENA_MARK(codePool);
95 cg->noteMark = JS_ARENA_MARK(notePool);
96 cg->current = &cg->main;
97 cg->firstLine = cg->prolog.currentLine = cg->main.currentLine = lineno;
98 ATOM_LIST_INIT(&cg->atomList);
99 cg->prolog.noteMask = cg->main.noteMask = SRCNOTE_CHUNK - 1;
100 ATOM_LIST_INIT(&cg->constList);
101 ATOM_LIST_INIT(&cg->upvarList);
102 }
103
104 JS_FRIEND_API(void)
105 js_FinishCodeGenerator(JSContext *cx, JSCodeGenerator *cg)
106 {
107 TREE_CONTEXT_FINISH(cx, &cg->treeContext);
108 JS_ARENA_RELEASE(cg->codePool, cg->codeMark);
109 JS_ARENA_RELEASE(cg->notePool, cg->noteMark);
110
111 /* NB: non-null only after OOM. */
112 if (cg->spanDeps)
113 JS_free(cx, cg->spanDeps);
114
115 if (cg->upvarMap.vector)
116 JS_free(cx, cg->upvarMap.vector);
117 }
118
119 static ptrdiff_t
120 EmitCheck(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t delta)
121 {
122 jsbytecode *base, *limit, *next;
123 ptrdiff_t offset, length;
124 size_t incr, size;
125
126 base = CG_BASE(cg);
127 next = CG_NEXT(cg);
128 limit = CG_LIMIT(cg);
129 offset = PTRDIFF(next, base, jsbytecode);
130 if (next + delta > limit) {
131 length = offset + delta;
132 length = (length <= BYTECODE_CHUNK)
133 ? BYTECODE_CHUNK
134 : JS_BIT(JS_CeilingLog2(length));
135 incr = BYTECODE_SIZE(length);
136 if (!base) {
137 JS_ARENA_ALLOCATE_CAST(base, jsbytecode *, cg->codePool, incr);
138 } else {
139 size = BYTECODE_SIZE(PTRDIFF(limit, base, jsbytecode));
140 incr -= size;
141 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
142 }
143 if (!base) {
144 js_ReportOutOfScriptQuota(cx);
145 return -1;
146 }
147 CG_BASE(cg) = base;
148 CG_LIMIT(cg) = base + length;
149 CG_NEXT(cg) = base + offset;
150 }
151 return offset;
152 }
153
154 static void
155 UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target)
156 {
157 jsbytecode *pc;
158 JSOp op;
159 const JSCodeSpec *cs;
160 uintN depth;
161 intN nuses, ndefs;
162
163 pc = CG_CODE(cg, target);
164 op = (JSOp) *pc;
165 cs = &js_CodeSpec[op];
166 if (cs->format & JOF_TMPSLOT_MASK) {
167 depth = (uintN) cg->stackDepth +
168 ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT);
169 if (depth > cg->maxStackDepth)
170 cg->maxStackDepth = depth;
171 }
172 nuses = cs->nuses;
173 if (nuses < 0)
174 nuses = js_GetVariableStackUseLength(op, pc);
175 cg->stackDepth -= nuses;
176 JS_ASSERT(cg->stackDepth >= 0);
177 if (cg->stackDepth < 0) {
178 char numBuf[12];
179 JSTokenStream *ts;
180
181 JS_snprintf(numBuf, sizeof numBuf, "%d", target);
182 ts = &cg->treeContext.parseContext->tokenStream;
183 JS_ReportErrorFlagsAndNumber(cx, JSREPORT_WARNING,
184 js_GetErrorMessage, NULL,
185 JSMSG_STACK_UNDERFLOW,
186 ts->filename ? ts->filename : "stdin",
187 numBuf);
188 }
189 ndefs = cs->ndefs;
190 if (ndefs < 0) {
191 JSObject *blockObj;
192
193 /* We just executed IndexParsedObject */
194 JS_ASSERT(op == JSOP_ENTERBLOCK);
195 JS_ASSERT(nuses == 0);
196 blockObj = cg->objectList.lastPob->object;
197 JS_ASSERT(STOBJ_GET_CLASS(blockObj) == &js_BlockClass);
198 JS_ASSERT(JSVAL_IS_VOID(blockObj->fslots[JSSLOT_BLOCK_DEPTH]));
199
200 OBJ_SET_BLOCK_DEPTH(cx, blockObj, cg->stackDepth);
201 ndefs = OBJ_BLOCK_COUNT(cx, blockObj);
202 }
203 cg->stackDepth += ndefs;
204 if ((uintN)cg->stackDepth > cg->maxStackDepth)
205 cg->maxStackDepth = cg->stackDepth;
206 }
207
208 ptrdiff_t
209 js_Emit1(JSContext *cx, JSCodeGenerator *cg, JSOp op)
210 {
211 ptrdiff_t offset = EmitCheck(cx, cg, op, 1);
212
213 if (offset >= 0) {
214 *CG_NEXT(cg)++ = (jsbytecode)op;
215 UpdateDepth(cx, cg, offset);
216 }
217 return offset;
218 }
219
220 ptrdiff_t
221 js_Emit2(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1)
222 {
223 ptrdiff_t offset = EmitCheck(cx, cg, op, 2);
224
225 if (offset >= 0) {
226 jsbytecode *next = CG_NEXT(cg);
227 next[0] = (jsbytecode)op;
228 next[1] = op1;
229 CG_NEXT(cg) = next + 2;
230 UpdateDepth(cx, cg, offset);
231 }
232 return offset;
233 }
234
235 ptrdiff_t
236 js_Emit3(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1,
237 jsbytecode op2)
238 {
239 ptrdiff_t offset = EmitCheck(cx, cg, op, 3);
240
241 if (offset >= 0) {
242 jsbytecode *next = CG_NEXT(cg);
243 next[0] = (jsbytecode)op;
244 next[1] = op1;
245 next[2] = op2;
246 CG_NEXT(cg) = next + 3;
247 UpdateDepth(cx, cg, offset);
248 }
249 return offset;
250 }
251
252 ptrdiff_t
253 js_EmitN(JSContext *cx, JSCodeGenerator *cg, JSOp op, size_t extra)
254 {
255 ptrdiff_t length = 1 + (ptrdiff_t)extra;
256 ptrdiff_t offset = EmitCheck(cx, cg, op, length);
257
258 if (offset >= 0) {
259 jsbytecode *next = CG_NEXT(cg);
260 *next = (jsbytecode)op;
261 memset(next + 1, 0, BYTECODE_SIZE(extra));
262 CG_NEXT(cg) = next + length;
263
264 /*
265 * Don't UpdateDepth if op's use-count comes from the immediate
266 * operand yet to be stored in the extra bytes after op.
267 */
268 if (js_CodeSpec[op].nuses >= 0)
269 UpdateDepth(cx, cg, offset);
270 }
271 return offset;
272 }
273
274 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
275 const char js_with_statement_str[] = "with statement";
276 const char js_finally_block_str[] = "finally block";
277 const char js_script_str[] = "script";
278
279 static const char *statementName[] = {
280 "label statement", /* LABEL */
281 "if statement", /* IF */
282 "else statement", /* ELSE */
283 "destructuring body", /* BODY */
284 "switch statement", /* SWITCH */
285 "block", /* BLOCK */
286 js_with_statement_str, /* WITH */
287 "catch block", /* CATCH */
288 "try block", /* TRY */
289 js_finally_block_str, /* FINALLY */
290 js_finally_block_str, /* SUBROUTINE */
291 "do loop", /* DO_LOOP */
292 "for loop", /* FOR_LOOP */
293 "for/in loop", /* FOR_IN_LOOP */
294 "while loop", /* WHILE_LOOP */
295 };
296
297 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT);
298
299 static const char *
300 StatementName(JSCodeGenerator *cg)
301 {
302 if (!cg->treeContext.topStmt)
303 return js_script_str;
304 return statementName[cg->treeContext.topStmt->type];
305 }
306
307 static void
308 ReportStatementTooLarge(JSContext *cx, JSCodeGenerator *cg)
309 {
310 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET,
311 StatementName(cg));
312 }
313
314 /**
315 Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
316 and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
317 into unconditional (gotos and gosubs), and conditional jumps or branches
318 (which pop a value, test it, and jump depending on its value). Most jumps
319 have just one immediate operand, a signed offset from the jump opcode's pc
320 to the target bytecode. The lookup and table switch opcodes may contain
321 many jump offsets.
322
323 Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
324 fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
325 suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
326 the extended form of the JSOP_OR branch opcode). The unextended or short
327 formats have 16-bit signed immediate offset operands, the extended or long
328 formats have 32-bit signed immediates. The span-dependency problem consists
329 of selecting as few long instructions as possible, or about as few -- since
330 jumps can span other jumps, extending one jump may cause another to need to
331 be extended.
332
333 Most JS scripts are short, so need no extended jumps. We optimize for this
334 case by generating short jumps until we know a long jump is needed. After
335 that point, we keep generating short jumps, but each jump's 16-bit immediate
336 offset operand is actually an unsigned index into cg->spanDeps, an array of
337 JSSpanDep structs. Each struct tells the top offset in the script of the
338 opcode, the "before" offset of the jump (which will be the same as top for
339 simplex jumps, but which will index further into the bytecode array for a
340 non-initial jump offset in a lookup or table switch), the after "offset"
341 adjusted during span-dependent instruction selection (initially the same
342 value as the "before" offset), and the jump target (more below).
343
344 Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
345 ensure that all bytecode generated so far can be inspected to discover where
346 the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
347 that we generate span-dependency records sorted by their offsets, so we can
348 binary-search when trying to find a JSSpanDep for a given bytecode offset,
349 or the nearest JSSpanDep at or above a given pc.
350
351 To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
352 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
353 tells us that we need to binary-search for the cg->spanDeps entry by the
354 jump opcode's bytecode offset (sd->before).
355
356 Jump targets need to be maintained in a data structure that lets us look
357 up an already-known target by its address (jumps may have a common target),
358 and that also lets us update the addresses (script-relative, a.k.a. absolute
359 offsets) of targets that come after a jump target (for when a jump below
360 that target needs to be extended). We use an AVL tree, implemented using
361 recursion, but with some tricky optimizations to its height-balancing code
362 (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
363
364 A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
365 positive sign, even though they link "backward" (i.e., toward lower bytecode
366 address). We don't want to waste space and search time in the AVL tree for
367 such temporary backpatch deltas, so we use a single-bit wildcard scheme to
368 tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
369 in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
370 target, or is still awaiting backpatching.
371
372 Note that backpatch chains would present a problem for BuildSpanDepTable,
373 which inspects bytecode to build cg->spanDeps on demand, when the first
374 short jump offset overflows. To solve this temporary problem, we emit a
375 proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
376 nuses/ndefs counts help keep the stack balanced, but whose opcode format
377 distinguishes its backpatch delta immediate operand from a normal jump
378 offset.
379 */
380 static int
381 BalanceJumpTargets(JSJumpTarget **jtp)
382 {
383 JSJumpTarget *jt, *jt2, *root;
384 int dir, otherDir, heightChanged;
385 JSBool doubleRotate;
386
387 jt = *jtp;
388 JS_ASSERT(jt->balance != 0);
389
390 if (jt->balance < -1) {
391 dir = JT_RIGHT;
392 doubleRotate = (jt->kids[JT_LEFT]->balance > 0);
393 } else if (jt->balance > 1) {
394 dir = JT_LEFT;
395 doubleRotate = (jt->kids[JT_RIGHT]->balance < 0);
396 } else {
397 return 0;
398 }
399
400 otherDir = JT_OTHER_DIR(dir);
401 if (doubleRotate) {
402 jt2 = jt->kids[otherDir];
403 *jtp = root = jt2->kids[dir];
404
405 jt->kids[otherDir] = root->kids[dir];
406 root->kids[dir] = jt;
407
408 jt2->kids[dir] = root->kids[otherDir];
409 root->kids[otherDir] = jt2;
410
411 heightChanged = 1;
412 root->kids[JT_LEFT]->balance = -JS_MAX(root->balance, 0);
413 root->kids[JT_RIGHT]->balance = -JS_MIN(root->balance, 0);
414 root->balance = 0;
415 } else {
416 *jtp = root = jt->kids[otherDir];
417 jt->kids[otherDir] = root->kids[dir];
418 root->kids[dir] = jt;
419
420 heightChanged = (root->balance != 0);
421 jt->balance = -((dir == JT_LEFT) ? --root->balance : ++root->balance);
422 }
423
424 return heightChanged;
425 }
426
427 typedef struct AddJumpTargetArgs {
428 JSContext *cx;
429 JSCodeGenerator *cg;
430 ptrdiff_t offset;
431 JSJumpTarget *node;
432 } AddJumpTargetArgs;
433
434 static int
435 AddJumpTarget(AddJumpTargetArgs *args, JSJumpTarget **jtp)
436 {
437 JSJumpTarget *jt;
438 int balanceDelta;
439
440 jt = *jtp;
441 if (!jt) {
442 JSCodeGenerator *cg = args->cg;
443
444 jt = cg->jtFreeList;
445 if (jt) {
446 cg->jtFreeList = jt->kids[JT_LEFT];
447 } else {
448 JS_ARENA_ALLOCATE_CAST(jt, JSJumpTarget *, &args->cx->tempPool,
449 sizeof *jt);
450 if (!jt) {
451 js_ReportOutOfScriptQuota(args->cx);
452 return 0;
453 }
454 }
455 jt->offset = args->offset;
456 jt->balance = 0;
457 jt->kids[JT_LEFT] = jt->kids[JT_RIGHT] = NULL;
458 cg->numJumpTargets++;
459 args->node = jt;
460 *jtp = jt;
461 return 1;
462 }
463
464 if (jt->offset == args->offset) {
465 args->node = jt;
466 return 0;
467 }
468
469 if (args->offset < jt->offset)
470 balanceDelta = -AddJumpTarget(args, &jt->kids[JT_LEFT]);
471 else
472 balanceDelta = AddJumpTarget(args, &jt->kids[JT_RIGHT]);
473 if (!args->node)
474 return 0;
475
476 jt->balance += balanceDelta;
477 return (balanceDelta && jt->balance)
478 ? 1 - BalanceJumpTargets(jtp)
479 : 0;
480 }
481
482 #ifdef DEBUG_brendan
483 static int AVLCheck(JSJumpTarget *jt)
484 {
485 int lh, rh;
486
487 if (!jt) return 0;
488 JS_ASSERT(-1 <= jt->balance && jt->balance <= 1);
489 lh = AVLCheck(jt->kids[JT_LEFT]);
490 rh = AVLCheck(jt->kids[JT_RIGHT]);
491 JS_ASSERT(jt->balance == rh - lh);
492 return 1 + JS_MAX(lh, rh);
493 }
494 #endif
495
496 static JSBool
497 SetSpanDepTarget(JSContext *cx, JSCodeGenerator *cg, JSSpanDep *sd,
498 ptrdiff_t off)
499 {
500 AddJumpTargetArgs args;
501
502 if (off < JUMPX_OFFSET_MIN || JUMPX_OFFSET_MAX < off) {
503 ReportStatementTooLarge(cx, cg);
504 return JS_FALSE;
505 }
506
507 args.cx = cx;
508 args.cg = cg;
509 args.offset = sd->top + off;
510 args.node = NULL;
511 AddJumpTarget(&args, &cg->jumpTargets);
512 if (!args.node)
513 return JS_FALSE;
514
515 #ifdef DEBUG_brendan
516 AVLCheck(cg->jumpTargets);
517 #endif
518
519 SD_SET_TARGET(sd, args.node);
520 return JS_TRUE;
521 }
522
523 #define SPANDEPS_MIN 256
524 #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
525 #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
526
527 static JSBool
528 AddSpanDep(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, jsbytecode *pc2,
529 ptrdiff_t off)
530 {
531 uintN index;
532 JSSpanDep *sdbase, *sd;
533 size_t size;
534
535 index = cg->numSpanDeps;
536 if (index + 1 == 0) {
537 ReportStatementTooLarge(cx, cg);
538 return JS_FALSE;
539 }
540
541 if ((index & (index - 1)) == 0 &&
542 (!(sdbase = cg->spanDeps) || index >= SPANDEPS_MIN)) {
543 size = sdbase ? SPANDEPS_SIZE(index) : SPANDEPS_SIZE_MIN / 2;
544 sdbase = (JSSpanDep *) JS_realloc(cx, sdbase, size + size);
545 if (!sdbase)
546 return JS_FALSE;
547 cg->spanDeps = sdbase;
548 }
549
550 cg->numSpanDeps = index + 1;
551 sd = cg->spanDeps + index;
552 sd->top = PTRDIFF(pc, CG_BASE(cg), jsbytecode);
553 sd->offset = sd->before = PTRDIFF(pc2, CG_BASE(cg), jsbytecode);
554
555 if (js_CodeSpec[*pc].format & JOF_BACKPATCH) {
556 /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
557 if (off != 0) {
558 JS_ASSERT(off >= 1 + JUMP_OFFSET_LEN);
559 if (off > BPDELTA_MAX) {
560 ReportStatementTooLarge(cx, cg);
561 return JS_FALSE;
562 }
563 }
564 SD_SET_BPDELTA(sd, off);
565 } else if (off == 0) {
566 /* Jump offset will be patched directly, without backpatch chaining. */
567 SD_SET_TARGET(sd, 0);
568 } else {
569 /* The jump offset in off is non-zero, therefore it's already known. */
570 if (!SetSpanDepTarget(cx, cg, sd, off))
571 return JS_FALSE;
572 }
573
574 if (index > SPANDEP_INDEX_MAX)
575 index = SPANDEP_INDEX_HUGE;
576 SET_SPANDEP_INDEX(pc2, index);
577 return JS_TRUE;
578 }
579
580 static jsbytecode *
581 AddSwitchSpanDeps(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc)
582 {
583 JSOp op;
584 jsbytecode *pc2;
585 ptrdiff_t off;
586 jsint low, high;
587 uintN njumps, indexlen;
588
589 op = (JSOp) *pc;
590 JS_ASSERT(op == JSOP_TABLESWITCH || op == JSOP_LOOKUPSWITCH);
591 pc2 = pc;
592 off = GET_JUMP_OFFSET(pc2);
593 if (!AddSpanDep(cx, cg, pc, pc2, off))
594 return NULL;
595 pc2 += JUMP_OFFSET_LEN;
596 if (op == JSOP_TABLESWITCH) {
597 low = GET_JUMP_OFFSET(pc2);
598 pc2 += JUMP_OFFSET_LEN;
599 high = GET_JUMP_OFFSET(pc2);
600 pc2 += JUMP_OFFSET_LEN;
601 njumps = (uintN) (high - low + 1);
602 indexlen = 0;
603 } else {
604 njumps = GET_UINT16(pc2);
605 pc2 += UINT16_LEN;
606 indexlen = INDEX_LEN;
607 }
608 while (njumps) {
609 --njumps;
610 pc2 += indexlen;
611 off = GET_JUMP_OFFSET(pc2);
612 if (!AddSpanDep(cx, cg, pc, pc2, off))
613 return NULL;
614 pc2 += JUMP_OFFSET_LEN;
615 }
616 return 1 + pc2;
617 }
618
619 static JSBool
620 BuildSpanDepTable(JSContext *cx, JSCodeGenerator *cg)
621 {
622 jsbytecode *pc, *end;
623 JSOp op;
624 const JSCodeSpec *cs;
625 ptrdiff_t off;
626
627 pc = CG_BASE(cg) + cg->spanDepTodo;
628 end = CG_NEXT(cg);
629 while (pc != end) {
630 JS_ASSERT(pc < end);
631 op = (JSOp)*pc;
632 cs = &js_CodeSpec[op];
633
634 switch (JOF_TYPE(cs->format)) {
635 case JOF_TABLESWITCH:
636 case JOF_LOOKUPSWITCH:
637 pc = AddSwitchSpanDeps(cx, cg, pc);
638 if (!pc)
639 return JS_FALSE;
640 break;
641
642 case JOF_JUMP:
643 off = GET_JUMP_OFFSET(pc);
644 if (!AddSpanDep(cx, cg, pc, pc, off))
645 return JS_FALSE;
646 /* FALL THROUGH */
647 default:
648 pc += cs->length;
649 break;
650 }
651 }
652
653 return JS_TRUE;
654 }
655
656 static JSSpanDep *
657 GetSpanDep(JSCodeGenerator *cg, jsbytecode *pc)
658 {
659 uintN index;
660 ptrdiff_t offset;
661 int lo, hi, mid;
662 JSSpanDep *sd;
663
664 index = GET_SPANDEP_INDEX(pc);
665 if (index != SPANDEP_INDEX_HUGE)
666 return cg->spanDeps + index;
667
668 offset = PTRDIFF(pc, CG_BASE(cg), jsbytecode);
669 lo = 0;
670 hi = cg->numSpanDeps - 1;
671 while (lo <= hi) {
672 mid = (lo + hi) / 2;
673 sd = cg->spanDeps + mid;
674 if (sd->before == offset)
675 return sd;
676 if (sd->before < offset)
677 lo = mid + 1;
678 else
679 hi = mid - 1;
680 }
681
682 JS_ASSERT(0);
683 return NULL;
684 }
685
686 static JSBool
687 SetBackPatchDelta(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
688 ptrdiff_t delta)
689 {
690 JSSpanDep *sd;
691
692 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
693 if (!cg->spanDeps && delta < JUMP_OFFSET_MAX) {
694 SET_JUMP_OFFSET(pc, delta);
695 return JS_TRUE;
696 }
697
698 if (delta > BPDELTA_MAX) {
699 ReportStatementTooLarge(cx, cg);
700 return JS_FALSE;
701 }
702
703 if (!cg->spanDeps && !BuildSpanDepTable(cx, cg))
704 return JS_FALSE;
705
706 sd = GetSpanDep(cg, pc);
707 JS_ASSERT(SD_GET_BPDELTA(sd) == 0);
708 SD_SET_BPDELTA(sd, delta);
709 return JS_TRUE;
710 }
711
712 static void
713 UpdateJumpTargets(JSJumpTarget *jt, ptrdiff_t pivot, ptrdiff_t delta)
714 {
715 if (jt->offset > pivot) {
716 jt->offset += delta;
717 if (jt->kids[JT_LEFT])
718 UpdateJumpTargets(jt->kids[JT_LEFT], pivot, delta);
719 }
720 if (jt->kids[JT_RIGHT])
721 UpdateJumpTargets(jt->kids[JT_RIGHT], pivot, delta);
722 }
723
724 static JSSpanDep *
725 FindNearestSpanDep(JSCodeGenerator *cg, ptrdiff_t offset, int lo,
726 JSSpanDep *guard)
727 {
728 int num, hi, mid;
729 JSSpanDep *sdbase, *sd;
730
731 num = cg->numSpanDeps;
732 JS_ASSERT(num > 0);
733 hi = num - 1;
734 sdbase = cg->spanDeps;
735 while (lo <= hi) {
736 mid = (lo + hi) / 2;
737 sd = sdbase + mid;
738 if (sd->before == offset)
739 return sd;
740 if (sd->before < offset)
741 lo = mid + 1;
742 else
743 hi = mid - 1;
744 }
745 if (lo == num)
746 return guard;
747 sd = sdbase + lo;
748 JS_ASSERT(sd->before >= offset && (lo == 0 || sd[-1].before < offset));
749 return sd;
750 }
751
752 static void
753 FreeJumpTargets(JSCodeGenerator *cg, JSJumpTarget *jt)
754 {
755 if (jt->kids[JT_LEFT])
756 FreeJumpTargets(cg, jt->kids[JT_LEFT]);
757 if (jt->kids[JT_RIGHT])
758 FreeJumpTargets(cg, jt->kids[JT_RIGHT]);
759 jt->kids[JT_LEFT] = cg->jtFreeList;
760 cg->jtFreeList = jt;
761 }
762
763 static JSBool
764 OptimizeSpanDeps(JSContext *cx, JSCodeGenerator *cg)
765 {
766 jsbytecode *pc, *oldpc, *base, *limit, *next;
767 JSSpanDep *sd, *sd2, *sdbase, *sdlimit, *sdtop, guard;
768 ptrdiff_t offset, growth, delta, top, pivot, span, length, target;
769 JSBool done;
770 JSOp op;
771 uint32 type;
772 size_t size, incr;
773 jssrcnote *sn, *snlimit;
774 JSSrcNoteSpec *spec;
775 uintN i, n, noteIndex;
776 JSTryNode *tryNode;
777 #ifdef DEBUG_brendan
778 int passes = 0;
779 #endif
780
781 base = CG_BASE(cg);
782 sdbase = cg->spanDeps;
783 sdlimit = sdbase + cg->numSpanDeps;
784 offset = CG_OFFSET(cg);
785 growth = 0;
786
787 do {
788 done = JS_TRUE;
789 delta = 0;
790 top = pivot = -1;
791 sdtop = NULL;
792 pc = NULL;
793 op = JSOP_NOP;
794 type = 0;
795 #ifdef DEBUG_brendan
796 passes++;
797 #endif
798
799 for (sd = sdbase; sd < sdlimit; sd++) {
800 JS_ASSERT(JT_HAS_TAG(sd->target));
801 sd->offset += delta;
802
803 if (sd->top != top) {
804 sdtop = sd;
805 top = sd->top;
806 JS_ASSERT(top == sd->before);
807 pivot = sd->offset;
808 pc = base + top;
809 op = (JSOp) *pc;
810 type = JOF_OPTYPE(op);
811 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
812 /*
813 * We already extended all the jump offset operands for
814 * the opcode at sd->top. Jumps and branches have only
815 * one jump offset operand, but switches have many, all
816 * of which are adjacent in cg->spanDeps.
817 */
818 continue;
819 }
820
821 JS_ASSERT(type == JOF_JUMP ||
822 type == JOF_TABLESWITCH ||
823 type == JOF_LOOKUPSWITCH);
824 }
825
826 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
827 span = SD_SPAN(sd, pivot);
828 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
829 ptrdiff_t deltaFromTop = 0;
830
831 done = JS_FALSE;
832
833 switch (op) {
834 case JSOP_GOTO: op = JSOP_GOTOX; break;
835 case JSOP_IFEQ: op = JSOP_IFEQX; break;
836 case JSOP_IFNE: op = JSOP_IFNEX; break;
837 case JSOP_OR: op = JSOP_ORX; break;
838 case JSOP_AND: op = JSOP_ANDX; break;
839 case JSOP_GOSUB: op = JSOP_GOSUBX; break;
840 case JSOP_CASE: op = JSOP_CASEX; break;
841 case JSOP_DEFAULT: op = JSOP_DEFAULTX; break;
842 case JSOP_TABLESWITCH: op = JSOP_TABLESWITCHX; break;
843 case JSOP_LOOKUPSWITCH: op = JSOP_LOOKUPSWITCHX; break;
844 default:
845 ReportStatementTooLarge(cx, cg);
846 return JS_FALSE;
847 }
848 *pc = (jsbytecode) op;
849
850 for (sd2 = sdtop; sd2 < sdlimit && sd2->top == top; sd2++) {
851 if (sd2 <= sd) {
852 /*
853 * sd2->offset already includes delta as it stood
854 * before we entered this loop, but it must also
855 * include the delta relative to top due to all the
856 * extended jump offset immediates for the opcode
857 * starting at top, which we extend in this loop.
858 *
859 * If there is only one extended jump offset, then
860 * sd2->offset won't change and this for loop will
861 * iterate once only.
862 */
863 sd2->offset += deltaFromTop;
864 deltaFromTop += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
865 } else {
866 /*
867 * sd2 comes after sd, and won't be revisited by
868 * the outer for loop, so we have to increase its
869 * offset by delta, not merely by deltaFromTop.
870 */
871 sd2->offset += delta;
872 }
873
874 delta += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
875 UpdateJumpTargets(cg->jumpTargets, sd2->offset,
876 JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
877 }
878 sd = sd2 - 1;
879 }
880 }
881 }
882
883 growth += delta;
884 } while (!done);
885
886 if (growth) {
887 #ifdef DEBUG_brendan
888 JSTokenStream *ts = &cg->treeContext.parseContext->tokenStream;
889
890 printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
891 ts->filename ? ts->filename : "stdin", cg->firstLine,
892 growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), cg->numSpanDeps,
893 passes, offset + growth, offset, growth);
894 #endif
895
896 /*
897 * Ensure that we have room for the extended jumps, but don't round up
898 * to a power of two -- we're done generating code, so we cut to fit.
899 */
900 limit = CG_LIMIT(cg);
901 length = offset + growth;
902 next = base + length;
903 if (next > limit) {
904 JS_ASSERT(length > BYTECODE_CHUNK);
905 size = BYTECODE_SIZE(PTRDIFF(limit, base, jsbytecode));
906 incr = BYTECODE_SIZE(length) - size;
907 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
908 if (!base) {
909 js_ReportOutOfScriptQuota(cx);
910 return JS_FALSE;
911 }
912 CG_BASE(cg) = base;
913 CG_LIMIT(cg) = next = base + length;
914 }
915 CG_NEXT(cg) = next;
916
917 /*
918 * Set up a fake span dependency record to guard the end of the code
919 * being generated. This guard record is returned as a fencepost by
920 * FindNearestSpanDep if there is no real spandep at or above a given
921 * unextended code offset.
922 */
923 guard.top = -1;
924 guard.offset = offset + growth;
925 guard.before = offset;
926 guard.target = NULL;
927 }
928
929 /*
930 * Now work backwards through the span dependencies, copying chunks of
931 * bytecode between each extended jump toward the end of the grown code
932 * space, and restoring immediate offset operands for all jump bytecodes.
933 * The first chunk of bytecodes, starting at base and ending at the first
934 * extended jump offset (NB: this chunk includes the operation bytecode
935 * just before that immediate jump offset), doesn't need to be copied.
936 */
937 JS_ASSERT(sd == sdlimit);
938 top = -1;
939 while (--sd >= sdbase) {
940 if (sd->top != top) {
941 top = sd->top;
942 op = (JSOp) base[top];
943 type = JOF_OPTYPE(op);
944
945 for (sd2 = sd - 1; sd2 >= sdbase && sd2->top == top; sd2--)
946 continue;
947 sd2++;
948 pivot = sd2->offset;
949 JS_ASSERT(top == sd2->before);
950 }
951
952 oldpc = base + sd->before;
953 span = SD_SPAN(sd, pivot);
954
955 /*
956 * If this jump didn't need to be extended, restore its span immediate
957 * offset operand now, overwriting the index of sd within cg->spanDeps
958 * that was stored temporarily after *pc when BuildSpanDepTable ran.
959 *
960 * Note that span might fit in 16 bits even for an extended jump op,
961 * if the op has multiple span operands, not all of which overflowed
962 * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
963 * range for a short jump, but others are not).
964 */
965 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
966 JS_ASSERT(JUMP_OFFSET_MIN <= span && span <= JUMP_OFFSET_MAX);
967 SET_JUMP_OFFSET(oldpc, span);
968 continue;
969 }
970
971 /*
972 * Set up parameters needed to copy the next run of bytecode starting
973 * at offset (which is a cursor into the unextended, original bytecode
974 * vector), down to sd->before (a cursor of the same scale as offset,
975 * it's the index of the original jump pc). Reuse delta to count the
976 * nominal number of bytes to copy.
977 */
978 pc = base + sd->offset;
979 delta = offset - sd->before;
980 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
981
982 /*
983 * Don't bother copying the jump offset we're about to reset, but do
984 * copy the bytecode at oldpc (which comes just before its immediate
985 * jump offset operand), on the next iteration through the loop, by
986 * including it in offset's new value.
987 */
988 offset = sd->before + 1;
989 size = BYTECODE_SIZE(delta - (1 + JUMP_OFFSET_LEN));
990 if (size) {
991 memmove(pc + 1 + JUMPX_OFFSET_LEN,
992 oldpc + 1 + JUMP_OFFSET_LEN,
993 size);
994 }
995
996 SET_JUMPX_OFFSET(pc, span);
997 }
998
999 if (growth) {
1000 /*
1001 * Fix source note deltas. Don't hardwire the delta fixup adjustment,
1002 * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
1003 * at each sd that moved. The future may bring different offset sizes
1004 * for span-dependent instruction operands. However, we fix only main
1005 * notes here, not prolog notes -- we know that prolog opcodes are not
1006 * span-dependent, and aren't likely ever to be.
1007 */
1008 offset = growth = 0;
1009 sd = sdbase;
1010 for (sn = cg->main.notes, snlimit = sn + cg->main.noteCount;
1011 sn < snlimit;
1012 sn = SN_NEXT(sn)) {
1013 /*
1014 * Recall that the offset of a given note includes its delta, and
1015 * tells the offset of the annotated bytecode from the main entry
1016 * point of the script.
1017 */
1018 offset += SN_DELTA(sn);
1019 while (sd < sdlimit && sd->before < offset) {
1020 /*
1021 * To compute the delta to add to sn, we need to look at the
1022 * spandep after sd, whose offset - (before + growth) tells by
1023 * how many bytes sd's instruction grew.
1024 */
1025 sd2 = sd + 1;
1026 if (sd2 == sdlimit)
1027 sd2 = &guard;
1028 delta = sd2->offset - (sd2->before + growth);
1029 if (delta > 0) {
1030 JS_ASSERT(delta == JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
1031 sn = js_AddToSrcNoteDelta(cx, cg, sn, delta);
1032 if (!sn)
1033 return JS_FALSE;
1034 snlimit = cg->main.notes + cg->main.noteCount;
1035 growth += delta;
1036 }
1037 sd++;
1038 }
1039
1040 /*
1041 * If sn has span-dependent offset operands, check whether each
1042 * covers further span-dependencies, and increase those operands
1043 * accordingly. Some source notes measure offset not from the
1044 * annotated pc, but from that pc plus some small bias. NB: we
1045 * assume that spec->offsetBias can't itself span span-dependent
1046 * instructions!
1047 */
1048 spec = &js_SrcNoteSpec[SN_TYPE(sn)];
1049 if (spec->isSpanDep) {
1050 pivot = offset + spec->offsetBias;
1051 n = spec->arity;
1052 for (i = 0; i < n; i++) {
1053 span = js_GetSrcNoteOffset(sn, i);
1054 if (span == 0)
1055 continue;
1056 target = pivot + span * spec->isSpanDep;
1057 sd2 = FindNearestSpanDep(cg, target,
1058 (target >= pivot)
1059 ? sd - sdbase
1060 : 0,
1061 &guard);
1062
1063 /*
1064 * Increase target by sd2's before-vs-after offset delta,
1065 * which is absolute (i.e., relative to start of script,
1066 * as is target). Recompute the span by subtracting its
1067 * adjusted pivot from target.
1068 */
1069 target += sd2->offset - sd2->before;
1070 span = target - (pivot + growth);
1071 span *= spec->isSpanDep;
1072 noteIndex = sn - cg->main.notes;
1073 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, i, span))
1074 return JS_FALSE;
1075 sn = cg->main.notes + noteIndex;
1076 snlimit = cg->main.notes + cg->main.noteCount;
1077 }
1078 }
1079 }
1080 cg->main.lastNoteOffset += growth;
1081
1082 /*
1083 * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1084 * not clear how we can beat that).
1085 */
1086 for (tryNode = cg->lastTryNode; tryNode; tryNode = tryNode->prev) {
1087 /*
1088 * First, look for the nearest span dependency at/above tn->start.
1089 * There may not be any such spandep, in which case the guard will
1090 * be returned.
1091 */
1092 offset = tryNode->note.start;
1093 sd = FindNearestSpanDep(cg, offset, 0, &guard);
1094 delta = sd->offset - sd->before;
1095 tryNode->note.start = offset + delta;
1096
1097 /*
1098 * Next, find the nearest spandep at/above tn->start + tn->length.
1099 * Use its delta minus tn->start's delta to increase tn->length.
1100 */
1101 length = tryNode->note.length;
1102 sd2 = FindNearestSpanDep(cg, offset + length, sd - sdbase, &guard);
1103 if (sd2 != sd) {
1104 tryNode->note.length =
1105 length + sd2->offset - sd2->before - delta;
1106 }
1107 }
1108 }
1109
1110 #ifdef DEBUG_brendan
1111 {
1112 uintN bigspans = 0;
1113 top = -1;
1114 for (sd = sdbase; sd < sdlimit; sd++) {
1115 offset = sd->offset;
1116
1117 /* NB: sd->top cursors into the original, unextended bytecode vector. */
1118 if (sd->top != top) {
1119 JS_ASSERT(top == -1 ||
1120 !JOF_TYPE_IS_EXTENDED_JUMP(type) ||
1121 bigspans != 0);
1122 bigspans = 0;
1123 top = sd->top;
1124 JS_ASSERT(top == sd->before);
1125 op = (JSOp) base[offset];
1126 type = JOF_OPTYPE(op);
1127 JS_ASSERT(type == JOF_JUMP ||
1128 type == JOF_JUMPX ||
1129 type == JOF_TABLESWITCH ||
1130 type == JOF_TABLESWITCHX ||
1131 type == JOF_LOOKUPSWITCH ||
1132 type == JOF_LOOKUPSWITCHX);
1133 pivot = offset;
1134 }
1135
1136 pc = base + offset;
1137 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
1138 span = GET_JUMPX_OFFSET(pc);
1139 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
1140 bigspans++;
1141 } else {
1142 JS_ASSERT(type == JOF_TABLESWITCHX ||
1143 type == JOF_LOOKUPSWITCHX);
1144 }
1145 } else {
1146 span = GET_JUMP_OFFSET(pc);
1147 }
1148 JS_ASSERT(SD_SPAN(sd, pivot) == span);
1149 }
1150 JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type) || bigspans != 0);
1151 }
1152 #endif
1153
1154 /*
1155 * Reset so we optimize at most once -- cg may be used for further code
1156 * generation of successive, independent, top-level statements. No jump
1157 * can span top-level statements, because JS lacks goto.
1158 */
1159 size = SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg->numSpanDeps)));
1160 JS_free(cx, cg->spanDeps);
1161 cg->spanDeps = NULL;
1162 FreeJumpTargets(cg, cg->jumpTargets);
1163 cg->jumpTargets = NULL;
1164 cg->numSpanDeps = cg->numJumpTargets = 0;
1165 cg->spanDepTodo = CG_OFFSET(cg);
1166 return JS_TRUE;
1167 }
1168
1169 static ptrdiff_t
1170 EmitJump(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t off)
1171 {
1172 JSBool extend;
1173 ptrdiff_t jmp;
1174 jsbytecode *pc;
1175
1176 extend = off < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < off;
1177 if (extend && !cg->spanDeps && !BuildSpanDepTable(cx, cg))
1178 return -1;
1179
1180 jmp = js_Emit3(cx, cg, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off));
1181 if (jmp >= 0 && (extend || cg->spanDeps)) {
1182 pc = CG_CODE(cg, jmp);
1183 if (!AddSpanDep(cx, cg, pc, pc, off))
1184 return -1;
1185 }
1186 return jmp;
1187 }
1188
1189 static ptrdiff_t
1190 GetJumpOffset(JSCodeGenerator *cg, jsbytecode *pc)
1191 {
1192 JSSpanDep *sd;
1193 JSJumpTarget *jt;
1194 ptrdiff_t top;
1195
1196 if (!cg->spanDeps)
1197 return GET_JUMP_OFFSET(pc);
1198
1199 sd = GetSpanDep(cg, pc);
1200 jt = sd->target;
1201 if (!JT_HAS_TAG(jt))
1202 return JT_TO_BPDELTA(jt);
1203
1204 top = sd->top;
1205 while (--sd >= cg->spanDeps && sd->top == top)
1206 continue;
1207 sd++;
1208 return JT_CLR_TAG(jt)->offset - sd->offset;
1209 }
1210
1211 JSBool
1212 js_SetJumpOffset(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
1213 ptrdiff_t off)
1214 {
1215 if (!cg->spanDeps) {
1216 if (JUMP_OFFSET_MIN <= off && off <= JUMP_OFFSET_MAX) {
1217 SET_JUMP_OFFSET(pc, off);
1218 return JS_TRUE;
1219 }
1220
1221 if (!BuildSpanDepTable(cx, cg))
1222 return JS_FALSE;
1223 }
1224
1225 return SetSpanDepTarget(cx, cg, GetSpanDep(cg, pc), off);
1226 }
1227
1228 JSBool
1229 js_InStatement(JSTreeContext *tc, JSStmtType type)
1230 {
1231 JSStmtInfo *stmt;
1232
1233 for (stmt = tc->topStmt; stmt; stmt = stmt->down) {
1234 if (stmt->type == type)
1235 return JS_TRUE;
1236 }
1237 return JS_FALSE;
1238 }
1239
1240 void
1241 js_PushStatement(JSTreeContext *tc, JSStmtInfo *stmt, JSStmtType type,
1242 ptrdiff_t top)
1243 {
1244 stmt->type = type;
1245 stmt->flags = 0;
1246 SET_STATEMENT_TOP(stmt, top);
1247 stmt->u.label = NULL;
1248 JS_ASSERT(!stmt->u.blockObj);
1249 stmt->down = tc->topStmt;
1250 tc->topStmt = stmt;
1251 if (STMT_LINKS_SCOPE(stmt)) {
1252 stmt->downScope = tc->topScopeStmt;
1253 tc->topScopeStmt = stmt;
1254 } else {
1255 stmt->downScope = NULL;
1256 }
1257 }
1258
1259 void
1260 js_PushBlockScope(JSTreeContext *tc, JSStmtInfo *stmt, JSObject *blockObj,
1261 ptrdiff_t top)
1262 {
1263
1264 js_PushStatement(tc, stmt, STMT_BLOCK, top);
1265 stmt->flags |= SIF_SCOPE;
1266 STOBJ_SET_PARENT(blockObj, tc->blockChain);
1267 stmt->downScope = tc->topScopeStmt;
1268 tc->topScopeStmt = stmt;
1269 tc->blockChain = blockObj;
1270 stmt->u.blockObj = blockObj;
1271 }
1272
1273 /*
1274 * Emit a backpatch op with offset pointing to the previous jump of this type,
1275 * so that we can walk back up the chain fixing up the op and jump offset.
1276 */
1277 static ptrdiff_t
1278 EmitBackPatchOp(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t *lastp)
1279 {
1280 ptrdiff_t offset, delta;
1281
1282 offset = CG_OFFSET(cg);
1283 delta = offset - *lastp;
1284 *lastp = offset;
1285 JS_ASSERT(delta > 0);
1286 return EmitJump(cx, cg, op, delta);
1287 }
1288
1289 /*
1290 * Macro to emit a bytecode followed by a uint16 immediate operand stored in
1291 * big-endian order, used for arg and var numbers as well as for atomIndexes.
1292 * NB: We use cx and cg from our caller's lexical environment, and return
1293 * false on error.
1294 */
1295 #define EMIT_UINT16_IMM_OP(op, i) \
1296 JS_BEGIN_MACRO \
1297 if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
1298 return JS_FALSE; \
1299 JS_END_MACRO
1300
1301 static JSBool
1302 FlushPops(JSContext *cx, JSCodeGenerator *cg, intN *npops)
1303 {
1304 JS_ASSERT(*npops != 0);
1305 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1306 return JS_FALSE;
1307 EMIT_UINT16_IMM_OP(JSOP_POPN, *npops);
1308 *npops = 0;
1309 return JS_TRUE;
1310 }
1311
1312 /*
1313 * Emit additional bytecode(s) for non-local jumps.
1314 */
1315 static JSBool
1316 EmitNonLocalJumpFixup(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt)
1317 {
1318 intN depth, npops;
1319 JSStmtInfo *stmt;
1320
1321 /*
1322 * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1323 * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1324 * end of a with statement, so we save cg->stackDepth here and restore it
1325 * just before a successful return.
1326 */
1327 depth = cg->stackDepth;
1328 npops = 0;
1329
1330 #define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE
1331
1332 for (stmt = cg->treeContext.topStmt; stmt != toStmt; stmt = stmt->down) {
1333 switch (stmt->type) {
1334 case STMT_FINALLY:
1335 FLUSH_POPS();
1336 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1337 return JS_FALSE;
1338 if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(*stmt)) < 0)
1339 return JS_FALSE;
1340 break;
1341
1342 case STMT_WITH:
1343 /* There's a With object on the stack that we need to pop. */
1344 FLUSH_POPS();
1345 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1346 return JS_FALSE;
1347 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
1348 return JS_FALSE;
1349 break;
1350
1351 case STMT_FOR_IN_LOOP:
1352 /*
1353 * The iterator and the object being iterated need to be popped.
1354 */
1355 FLUSH_POPS();
1356 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1357 return JS_FALSE;
1358 if (js_Emit1(cx, cg, JSOP_ENDITER) < 0)
1359 return JS_FALSE;
1360 break;
1361
1362 case STMT_SUBROUTINE:
1363 /*
1364 * There's a [exception or hole, retsub pc-index] pair on the
1365 * stack that we need to pop.
1366 */
1367 npops += 2;
1368 break;
1369
1370 default:;
1371 }
1372
1373 if (stmt->flags & SIF_SCOPE) {
1374 uintN i;
1375
1376 /* There is a Block object with locals on the stack to pop. */
1377 FLUSH_POPS();
1378 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1379 return JS_FALSE;
1380 i = OBJ_BLOCK_COUNT(cx, stmt->u.blockObj);
1381 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, i);
1382 }
1383 }
1384
1385 FLUSH_POPS();
1386 cg->stackDepth = depth;
1387 return JS_TRUE;
1388
1389 #undef FLUSH_POPS
1390 }
1391
1392 static ptrdiff_t
1393 EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
1394 ptrdiff_t *lastp, JSAtomListElement *label, JSSrcNoteType noteType)
1395 {
1396 intN index;
1397
1398 if (!EmitNonLocalJumpFixup(cx, cg, toStmt))
1399 return -1;
1400
1401 if (label)
1402 index = js_NewSrcNote2(cx, cg, noteType, (ptrdiff_t) ALE_INDEX(label));
1403 else if (noteType != SRC_NULL)
1404 index = js_NewSrcNote(cx, cg, noteType);
1405 else
1406 index = 0;
1407 if (index < 0)
1408 return -1;
1409
1410 return EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, lastp);
1411 }
1412
1413 static JSBool
1414 BackPatch(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t last,
1415 jsbytecode *target, jsbytecode op)
1416 {
1417 jsbytecode *pc, *stop;
1418 ptrdiff_t delta, span;
1419
1420 pc = CG_CODE(cg, last);
1421 stop = CG_CODE(cg, -1);
1422 while (pc != stop) {
1423 delta = GetJumpOffset(cg, pc);
1424 span = PTRDIFF(target, pc, jsbytecode);
1425 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, span);
1426
1427 /*
1428 * Set *pc after jump offset in case bpdelta didn't overflow, but span
1429 * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1430 * and need to see the JSOP_BACKPATCH* op at *pc).
1431 */
1432 *pc = op;
1433 pc -= delta;
1434 }
1435 return JS_TRUE;
1436 }
1437
1438 void
1439 js_PopStatement(JSTreeContext *tc)
1440 {
1441 JSStmtInfo *stmt;
1442
1443 stmt = tc->topStmt;
1444 tc->topStmt = stmt->down;
1445 if (STMT_LINKS_SCOPE(stmt)) {
1446 tc->topScopeStmt = stmt->downScope;
1447 if (stmt->flags & SIF_SCOPE) {
1448 tc->blockChain = STOBJ_GET_PARENT(stmt->u.blockObj);
1449 JS_SCOPE_DEPTH_METERING(--tc->scopeDepth);
1450 }
1451 }
1452 }
1453
1454 JSBool
1455 js_PopStatementCG(JSContext *cx, JSCodeGenerator *cg)
1456 {
1457 JSStmtInfo *stmt;
1458
1459 stmt = cg->treeContext.topStmt;
1460 if (!STMT_IS_TRYING(stmt) &&
1461 (!BackPatch(cx, cg, stmt->breaks, CG_NEXT(cg), JSOP_GOTO) ||
1462 !BackPatch(cx, cg, stmt->continues, CG_CODE(cg, stmt->update),
1463 JSOP_GOTO))) {
1464 return JS_FALSE;
1465 }
1466 js_PopStatement(&cg->treeContext);
1467 return JS_TRUE;
1468 }
1469
1470 JSBool
1471 js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1472 JSParseNode *pn)
1473 {
1474 jsdouble dval;
1475 jsint ival;
1476 JSAtom *valueAtom;
1477 jsval v;
1478 JSAtomListElement *ale;
1479
1480 /* XXX just do numbers for now */
1481 if (pn->pn_type == TOK_NUMBER) {
1482 dval = pn->pn_dval;
1483 if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
1484 v = INT_TO_JSVAL(ival);
1485 } else {
1486 /*
1487 * We atomize double to root a jsdouble instance that we wrap as
1488 * jsval and store in cg->constList. This works because atoms are
1489 * protected from GC during compilation.
1490 */
1491 valueAtom = js_AtomizeDouble(cx, dval);
1492 if (!valueAtom)
1493 return JS_FALSE;
1494 v = ATOM_KEY(valueAtom);
1495 }
1496 ale = js_IndexAtom(cx, atom, &cg->constList);
1497 if (!ale)
1498 return JS_FALSE;
1499 ALE_SET_VALUE(ale, v);
1500 }
1501 return JS_TRUE;
1502 }
1503
1504 JSStmtInfo *
1505 js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp)
1506 {
1507 JSStmtInfo *stmt;
1508 JSObject *obj;
1509 JSScope *scope;
1510 JSScopeProperty *sprop;
1511
1512 for (stmt = tc->topScopeStmt; stmt; stmt = stmt->downScope) {
1513 if (stmt->type == STMT_WITH)
1514 break;
1515
1516 /* Skip "maybe scope" statements that don't contain let bindings. */
1517 if (!(stmt->flags & SIF_SCOPE))
1518 continue;
1519
1520 obj = stmt->u.blockObj;
1521 JS_ASSERT(LOCKED_OBJ_GET_CLASS(obj) == &js_BlockClass);
1522 scope = OBJ_SCOPE(obj);
1523 sprop = SCOPE_GET_PROPERTY(scope, ATOM_TO_JSID(atom));
1524 if (sprop) {
1525 JS_ASSERT(sprop->flags & SPROP_HAS_SHORTID);
1526
1527 if (slotp) {
1528 JS_ASSERT(JSVAL_IS_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]));
1529 *slotp = JSVAL_TO_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]) +
1530 sprop->shortid;
1531 }
1532 return stmt;
1533 }
1534 }
1535
1536 if (slotp)
1537 *slotp = -1;
1538 return stmt;
1539 }
1540
1541 /*
1542 * Check if the attributes describe a property holding a compile-time constant
1543 * or a permanent, read-only property without a getter.
1544 */
1545 #define IS_CONSTANT_PROPERTY(attrs) \
1546 (((attrs) & (JSPROP_READONLY | JSPROP_PERMANENT | JSPROP_GETTER)) == \
1547 (JSPROP_READONLY | JSPROP_PERMANENT))
1548
1549 /*
1550 * The function sets vp to JSVAL_HOLE when the atom does not corresponds to a
1551 * name defining a constant.
1552 */
1553 static JSBool
1554 LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1555 jsval *vp)
1556 {
1557 JSBool ok;
1558 JSStmtInfo *stmt;
1559 JSAtomListElement *ale;
1560 JSObject *obj, *pobj;
1561 JSProperty *prop;
1562 uintN attrs;
1563
1564 /*
1565 * Chase down the cg stack, but only until we reach the outermost cg.
1566 * This enables propagating consts from top-level into switch cases in a
1567 * function compiled along with the top-level script.
1568 */
1569 *vp = JSVAL_HOLE;
1570 do {
1571 if (cg->treeContext.flags & (TCF_IN_FUNCTION | TCF_COMPILE_N_GO)) {
1572 /* XXX this will need revising when 'let const' is added. */
1573 stmt = js_LexicalLookup(&cg->treeContext, atom, NULL);
1574 if (stmt)
1575 return JS_TRUE;
1576
1577 ATOM_LIST_SEARCH(ale, &cg->constList, atom);
1578 if (ale) {
1579 JS_ASSERT(ALE_VALUE(ale) != JSVAL_HOLE);
1580 *vp = ALE_VALUE(ale);
1581 return JS_TRUE;
1582 }
1583
1584 /*
1585 * Try looking in the variable object for a direct property that
1586 * is readonly and permanent. We know such a property can't be
1587 * shadowed by another property on obj's prototype chain, or a
1588 * with object or catch variable; nor can prop's value be changed,
1589 * nor can prop be deleted.
1590 */
1591 if (cg->treeContext.flags & TCF_IN_FUNCTION) {
1592 if (js_LookupLocal(cx, cg->treeContext.u.fun, atom, NULL) !=
1593 JSLOCAL_NONE) {
1594 break;
1595 }
1596 } else {
1597 JS_ASSERT(cg->treeContext.flags & TCF_COMPILE_N_GO);
1598 obj = cg->treeContext.u.scopeChain;
1599 ok = OBJ_LOOKUP_PROPERTY(cx, obj, ATOM_TO_JSID(atom), &pobj,
1600 &prop);
1601 if (!ok)
1602 return JS_FALSE;
1603 if (pobj == obj) {
1604 /*
1605 * We're compiling code that will be executed immediately,
1606 * not re-executed against a different scope chain and/or
1607 * variable object. Therefore we can get constant values
1608 * from our variable object here.
1609 */
1610 ok = OBJ_GET_ATTRIBUTES(cx, obj, ATOM_TO_JSID(atom), prop,
1611 &attrs);
1612 if (ok && IS_CONSTANT_PROPERTY(attrs)) {
1613 ok = OBJ_GET_PROPERTY(cx, obj, ATOM_TO_JSID(atom), vp);
1614 JS_ASSERT_IF(ok, *vp != JSVAL_HOLE);
1615 }
1616 }
1617 if (prop)
1618 OBJ_DROP_PROPERTY(cx, pobj, prop);
1619 if (!ok)
1620 return JS_FALSE;
1621 if (prop)
1622 break;
1623 }
1624 }
1625 } while ((cg = cg->parent) != NULL);
1626 return JS_TRUE;
1627 }
1628
1629 /*
1630 * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment
1631 * reset instruction is necessary, JSOP_FALSE to indicate an error or either
1632 * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue
1633 * after the main bytecode sequence.
1634 */
1635 static JSOp
1636 EmitBigIndexPrefix(JSContext *cx, JSCodeGenerator *cg, uintN index)
1637 {
1638 uintN indexBase;
1639
1640 /*
1641 * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only
1642 * for big indexes.
1643 */
1644 JS_STATIC_ASSERT(INDEX_LIMIT <= JS_BIT(24));
1645 JS_STATIC_ASSERT(INDEX_LIMIT >=
1646 (JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 2) << 16);
1647
1648 if (index < JS_BIT(16))
1649 return JSOP_NOP;
1650 indexBase = index >> 16;
1651 if (indexBase <= JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 1) {
1652 if (js_Emit1(cx, cg, (JSOp)(JSOP_INDEXBASE1 + indexBase - 1)) < 0)
1653 return JSOP_FALSE;
1654 return JSOP_RESETBASE0;
1655 }
1656
1657 if (index >= INDEX_LIMIT) {
1658 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1659 JSMSG_TOO_MANY_LITERALS);
1660 return JSOP_FALSE;
1661 }
1662
1663 if (js_Emit2(cx, cg, JSOP_INDEXBASE, (JSOp)indexBase) < 0)
1664 return JSOP_FALSE;
1665 return JSOP_RESETBASE;
1666 }
1667
1668 /*
1669 * Emit a bytecode and its 2-byte constant index immediate operand. If the
1670 * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate
1671 * operand effectively extends the 16-bit immediate of the prefixed opcode,
1672 * by changing index "segment" (see jsinterp.c). We optimize segments 1-3
1673 * with single-byte JSOP_INDEXBASE[123] codes.
1674 *
1675 * Such prefixing currently requires a suffix to restore the "zero segment"
1676 * register setting, but this could be optimized further.
1677 */
1678 static JSBool
1679 EmitIndexOp(JSContext *cx, JSOp op, uintN index, JSCodeGenerator *cg)
1680 {
1681 JSOp bigSuffix;
1682
1683 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1684 if (bigSuffix == JSOP_FALSE)
1685 return JS_FALSE;
1686 EMIT_UINT16_IMM_OP(op, index);
1687 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1688 }
1689
1690 /*
1691 * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro
1692 * caller's lexical environment, and embedding a false return on error.
1693 */
1694 #define EMIT_INDEX_OP(op, index) \
1695 JS_BEGIN_MACRO \
1696 if (!EmitIndexOp(cx, op, index, cg)) \
1697 return JS_FALSE; \
1698 JS_END_MACRO
1699
1700
1701 static JSBool
1702 EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
1703 {
1704 JSAtomListElement *ale;
1705
1706 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1707 if (op == JSOP_GETPROP &&
1708 pn->pn_atom == cx->runtime->atomState.lengthAtom) {
1709 return js_Emit1(cx, cg, JSOP_LENGTH) >= 0;
1710 }
1711 ale = js_IndexAtom(cx, pn->pn_atom, &cg->atomList);
1712 if (!ale)
1713 return JS_FALSE;
1714 return EmitIndexOp(cx, op, ALE_INDEX(ale), cg);
1715 }
1716
1717 static uintN
1718 IndexParsedObject(JSParsedObjectBox *pob, JSEmittedObjectList *list);
1719
1720 static JSBool
1721 EmitObjectOp(JSContext *cx, JSParsedObjectBox *pob, JSOp op,
1722 JSCodeGenerator *cg)
1723 {
1724 JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
1725 return EmitIndexOp(cx, op, IndexParsedObject(pob, &cg->objectList), cg);
1726 }
1727
1728 /*
1729 * What good are ARGNO_LEN and SLOTNO_LEN, you ask? The answer is that, apart
1730 * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in
1731 * other parts of the code there's no necessary relationship between the two.
1732 * The abstraction cracks here in order to share EmitSlotIndexOp code among
1733 * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases.
1734 */
1735 JS_STATIC_ASSERT(ARGNO_LEN == 2);
1736 JS_STATIC_ASSERT(SLOTNO_LEN == 2);
1737
1738 static JSBool
1739 EmitSlotIndexOp(JSContext *cx, JSOp op, uintN slot, uintN index,
1740 JSCodeGenerator *cg)
1741 {
1742 JSOp bigSuffix;
1743 ptrdiff_t off;
1744 jsbytecode *pc;
1745
1746 JS_ASSERT(JOF_OPTYPE(op) == JOF_SLOTATOM ||
1747 JOF_OPTYPE(op) == JOF_SLOTOBJECT);
1748 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1749 if (bigSuffix == JSOP_FALSE)
1750 return JS_FALSE;
1751
1752 /* Emit [op, slot, index]. */
1753 off = js_EmitN(cx, cg, op, 2 + INDEX_LEN);
1754 if (off < 0)
1755 return JS_FALSE;
1756 pc = CG_CODE(cg, off);
1757 SET_UINT16(pc, slot);
1758 pc += 2;
1759 SET_INDEX(pc, index);
1760 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1761 }
1762
1763 /*
1764 * Adjust the slot for a block local to account for the number of variables
1765 * that share the same index space with locals. Due to the incremental code
1766 * generation for top-level script, we do the adjustment via code patching in
1767 * js_CompileScript; see comments there.
1768 *
1769 * The function returns -1 on failures.
1770 */
1771 static jsint
1772 AdjustBlockSlot(JSContext *cx, JSCodeGenerator *cg, jsint slot)
1773 {
1774 JS_ASSERT((jsuint) slot < cg->maxStackDepth);
1775 if (cg->treeContext.flags & TCF_IN_FUNCTION) {
1776 slot += cg->treeContext.u.fun->u.i.nvars;
1777 if ((uintN) slot >= SLOTNO_LIMIT) {
1778 js_ReportCompileErrorNumber(cx, CG_TS(cg), NULL,
1779 JSREPORT_ERROR,
1780 JSMSG_TOO_MANY_LOCALS);
1781 slot = -1;
1782 }
1783 }
1784 return slot;
1785 }
1786
1787 /*
1788 * This routine tries to optimize name gets and sets to stack slot loads and
1789 * stores, given the variables object and scope chain in cx's top frame, the
1790 * compile-time context in tc, and a TOK_NAME node pn. It returns false on
1791 * error, true on success.
1792 *
1793 * The caller can inspect pn->pn_slot for a non-negative slot number to tell
1794 * whether optimization occurred, in which case BindNameToSlot also updated
1795 * pn->pn_op. If pn->pn_slot is still -1 on return, pn->pn_op nevertheless
1796 * may have been optimized, e.g., from JSOP_NAME to JSOP_ARGUMENTS. Whether
1797 * or not pn->pn_op was modified, if this function finds an argument or local
1798 * variable name, pn->pn_const will be true for const properties after a
1799 * successful return.
1800 *
1801 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1802 * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
1803 * in js_EmitTree.
1804 */
1805 static JSBool
1806 BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
1807 {
1808 JSTreeContext *tc;
1809 JSAtom *atom;
1810 JSStmtInfo *stmt;
1811 jsint slot;
1812 JSOp op;
1813 JSLocalKind localKind;
1814 uintN index;
1815 JSAtomListElement *ale;
1816 JSBool constOp;
1817
1818 JS_ASSERT(pn->pn_type == TOK_NAME);
1819 if (pn->pn_slot >= 0 || pn->pn_op == JSOP_ARGUMENTS)
1820 return JS_TRUE;
1821
1822 /* QNAME references can never be optimized to use arg/var storage. */
1823 if (pn->pn_op == JSOP_QNAMEPART)
1824 return JS_TRUE;
1825
1826 /*
1827 * We can't optimize if we are compiling a with statement and its body,
1828 * or we're in a catch block whose exception variable has the same name
1829 * as this node. FIXME: we should be able to optimize catch vars to be
1830 * block-locals.
1831 */
1832 tc = &cg->treeContext;
1833 atom = pn->pn_atom;
1834 stmt = js_LexicalLookup(tc, atom, &slot);
1835 if (stmt) {
1836 if (stmt->type == STMT_WITH)
1837 return JS_TRUE;
1838
1839 JS_ASSERT(stmt->flags & SIF_SCOPE);
1840 JS_ASSERT(slot >= 0);
1841 op = PN_OP(pn);
1842 switch (op) {
1843 case JSOP_NAME: op = JSOP_GETLOCAL; break;
1844 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
1845 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
1846 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
1847 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
1848 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
1849 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
1850 case JSOP_DELNAME: op = JSOP_FALSE; break;
1851 default: JS_ASSERT(0);
1852 }
1853 if (op != pn->pn_op) {
1854 slot = AdjustBlockSlot(cx, cg, slot);
1855 if (slot < 0)
1856 return JS_FALSE;
1857 pn->pn_op = op;
1858 pn->pn_slot = slot;
1859 }
1860 return JS_TRUE;
1861 }
1862
1863 /*
1864 * We can't optimize if var and closure (a local function not in a larger
1865 * expression and not at top-level within another's body) collide.
1866 * XXX suboptimal: keep track of colliding names and deoptimize only those
1867 */
1868 if (tc->flags & TCF_FUN_CLOSURE_VS_VAR)
1869 return JS_TRUE;
1870
1871 if (!(tc->flags & TCF_IN_FUNCTION)) {
1872 JSStackFrame *caller;
1873
1874 caller = tc->parseContext->callerFrame;
1875 if (caller) {
1876 JS_ASSERT(tc->flags & TCF_COMPILE_N_GO);
1877 JS_ASSERT(caller->script);
1878 if (!caller->fun || caller->varobj != tc->u.scopeChain)
1879 return JS_TRUE;
1880
1881 /*
1882 * We are compiling eval or debug script inside a function frame
1883 * and the scope chain matches function's variable object.
1884 * Optimize access to function's arguments and variable and the
1885 * arguments object.
1886 */
1887 if (PN_OP(pn) != JSOP_NAME || cg->staticDepth > JS_DISPLAY_SIZE)
1888 goto arguments_check;
1889 localKind = js_LookupLocal(cx, caller->fun, atom, &index);
1890 if (localKind == JSLOCAL_NONE)
1891 goto arguments_check;
1892
1893 ATOM_LIST_SEARCH(ale, &cg->upvarList, atom);
1894 if (!ale) {
1895 uint32 cookie, length, *vector;
1896
1897 ale = js_IndexAtom(cx, atom, &cg->upvarList);
1898 if (!ale)
1899 return JS_FALSE;
1900 JS_ASSERT(ALE_INDEX(ale) == cg->upvarList.count - 1);
1901
1902 length = cg->upvarMap.length;
1903 JS_ASSERT(ALE_INDEX(ale) <= length);
1904 if (ALE_INDEX(ale) == length) {
1905 length = 2 * JS_MAX(2, length);
1906 vector = (uint32 *)
1907 JS_realloc(cx, cg->upvarMap.vector,
1908 length * sizeof *vector);
1909 if (!vector)
1910 return JS_FALSE;
1911 cg->upvarMap.vector = vector;
1912 cg->upvarMap.length = length;
1913 }
1914
1915 if (localKind != JSLOCAL_ARG)
1916 index += caller->fun->nargs;
1917 if (index >= JS_BIT(16)) {
1918 cg->treeContext.flags |= TCF_FUN_USES_NONLOCALS;
1919 return JS_TRUE;
1920 }
1921
1922 cookie = MAKE_UPVAR_COOKIE(1, index);
1923 cg->upvarMap.vector[ALE_INDEX(ale)] = cookie;
1924 }
1925
1926 pn->pn_op = JSOP_GETUPVAR;
1927 pn->pn_slot = ALE_INDEX(ale);
1928 return JS_TRUE;
1929 }
1930
1931 /*
1932 * We are optimizing global variables and there may be no pre-existing
1933 * global property named atom. If atom was declared via const or var,
1934 * optimize pn to access fp->vars using the appropriate JSOP_*GVAR op.
1935 */
1936 ATOM_LIST_SEARCH(ale, &tc->decls, atom);
1937 if (!ale) {
1938 /* Use precedes declaration, or name is never declared. */
1939 return JS_TRUE;
1940 }
1941 constOp = (ALE_JSOP(ale) == JSOP_DEFCONST);
1942
1943 /* Index atom so we can map fast global number to name. */
1944 ale = js_IndexAtom(cx, atom, &cg->atomList);
1945 if (!ale)
1946 return JS_FALSE;
1947
1948 /* Defend against tc->ngvars 16-bit overflow. */
1949 slot = ALE_INDEX(ale);
1950 if ((slot + 1) >> 16)
1951 return JS_TRUE;
1952
1953 if ((uint16)(slot + 1) > tc->ngvars)
1954 tc->ngvars = (uint16)(slot + 1);
1955
1956 op = PN_OP(pn);
1957 switch (op) {
1958 case JSOP_NAME: op = JSOP_GETGVAR; break;
1959 case JSOP_SETNAME: op = JSOP_SETGVAR; break;
1960 case JSOP_SETCONST: /* NB: no change */ break;
1961 case JSOP_INCNAME: op = JSOP_INCGVAR; break;
1962 case JSOP_NAMEINC: op = JSOP_GVARINC; break;
1963 case JSOP_DECNAME: op = JSOP_DECGVAR; break;
1964 case JSOP_NAMEDEC: op = JSOP_GVARDEC; break;
1965 case JSOP_FORNAME: /* NB: no change */ break;
1966 case JSOP_DELNAME: /* NB: no change */ break;
1967 default: JS_NOT_REACHED("gvar");
1968 }
1969 pn->pn_const = constOp;
1970 if (op != pn->pn_op) {
1971 pn->pn_op = op;
1972 pn->pn_slot = slot;
1973 }
1974 return JS_TRUE;
1975 }
1976
1977 if (tc->flags & TCF_IN_FUNCTION) {
1978 /*
1979 * We are compiling a function body and may be able to optimize name
1980 * to stack slot. Look for an argument or variable in the function and
1981 * rewrite pn_op and update pn accordingly.
1982 */
1983 localKind = js_LookupLocal(cx, tc->u.fun, atom, &index);
1984 if (localKind != JSLOCAL_NONE) {
1985 op = PN_OP(pn);
1986 if (localKind == JSLOCAL_ARG) {
1987 switch (op) {
1988 case JSOP_NAME: op = JSOP_GETARG; break;
1989 case JSOP_SETNAME: op = JSOP_SETARG; break;
1990 case JSOP_INCNAME: op = JSOP_INCARG; break;
1991 case JSOP_NAMEINC: op = JSOP_ARGINC; break;
1992 case JSOP_DECNAME: op = JSOP_DECARG; break;
1993 case JSOP_NAMEDEC: op = JSOP_ARGDEC; break;
1994 case JSOP_FORNAME: op = JSOP_FORARG; break;
1995 case JSOP_DELNAME: op = JSOP_FALSE; break;
1996 default: JS_NOT_REACHED("arg");
1997 }
1998 pn->pn_const = JS_FALSE;
1999 } else {
2000 JS_ASSERT(localKind == JSLOCAL_VAR ||
2001 localKind == JSLOCAL_CONST);
2002 switch (op) {
2003 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2004 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2005 case JSOP_SETCONST: op = JSOP_SETLOCAL; break;
2006 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2007 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2008 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2009 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2010 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2011 case JSOP_DELNAME: op = JSOP_FALSE; break;
2012 default: JS_NOT_REACHED("local");
2013 }
2014 pn->pn_const = (localKind == JSLOCAL_CONST);
2015 }
2016 pn->pn_op = op;
2017 pn->pn_slot = index;
2018 return JS_TRUE;
2019 }
2020 tc->flags |= TCF_FUN_USES_NONLOCALS;
2021 }
2022
2023 arguments_check:
2024 /*
2025 * Here we either compiling a function body or an eval or debug script
2026 * inside a function and couldn't optimize pn, so it's not a global or
2027 * local slot name. We are also outside of any with blocks. Check if we
2028 * can optimize the predefined arguments variable.
2029 */
2030 JS_ASSERT((tc->flags & TCF_IN_FUNCTION) ||
2031 (tc->parseContext->callerFrame &&
2032 tc->parseContext->callerFrame->fun &&
2033 tc->parseContext->callerFrame->varobj == tc->u.scopeChain));
2034 if (pn->pn_op == JSOP_NAME &&
2035 atom == cx->runtime->atomState.argumentsAtom) {
2036 pn->pn_op = JSOP_ARGUMENTS;
2037 return JS_TRUE;
2038 }
2039 return JS_TRUE;
2040 }
2041
2042 /*
2043 * If pn contains a useful expression, return true with *answer set to true.
2044 * If pn contains a useless expression, return true with *answer set to false.
2045 * Return false on error.
2046 *
2047 * The caller should initialize *answer to false and invoke this function on
2048 * an expression statement or similar subtree to decide whether the tree could
2049 * produce code that has any side effects. For an expression statement, we
2050 * define useless code as code with no side effects, because the main effect,
2051 * the value left on the stack after the code executes, will be discarded by a
2052 * pop bytecode.
2053 */
2054 static JSBool
2055 CheckSideEffects(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2056 JSBool *answer)
2057 {
2058 JSBool ok;
2059 JSFunction *fun;
2060 JSParseNode *pn2;
2061
2062 ok = JS_TRUE;
2063 if (!pn || *answer)
2064 return ok;
2065
2066 switch (pn->pn_arity) {
2067 case PN_FUNC:
2068 /*
2069 * A named function is presumed useful: we can't yet know that it is
2070 * not called. The side effects are the creation of a scope object
2071 * to parent this function object, and the binding of the function's
2072 * name in that scope object. See comments at case JSOP_NAMEDFUNOBJ:
2073 * in jsinterp.c.
2074 */
2075 fun = (JSFunction *) pn->pn_funpob->object;
2076 if (fun->atom)
2077 *answer = JS_TRUE;
2078 break;
2079
2080 case PN_LIST:
2081 if (pn->pn_type == TOK_NEW ||
2082 pn->pn_type == TOK_LP ||
2083 pn->pn_type == TOK_LB ||
2084 pn->pn_type == TOK_RB ||
2085 pn->pn_type == TOK_RC) {
2086 /*
2087 * All invocation operations (construct: TOK_NEW, call: TOK_LP)
2088 * are presumed to be useful, because they may have side effects
2089 * even if their main effect (their return value) is discarded.
2090 *
2091 * TOK_LB binary trees of 3 or more nodes are flattened into lists
2092 * to avoid too much recursion. All such lists must be presumed
2093 * to be useful because each index operation could invoke a getter
2094 * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2095 * does not apply here: arguments[i][j] might invoke a getter).
2096 *
2097 * Array and object initializers (TOK_RB and TOK_RC lists) must be
2098 * considered useful, because they are sugar for constructor calls
2099 * (to Array and Object, respectively).
2100 */
2101 *answer = JS_TRUE;
2102 } else {
2103 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
2104 ok &= CheckSideEffects(cx, cg, pn2, answer);
2105 }
2106 break;
2107
2108 case PN_TERNARY:
2109 ok = CheckSideEffects(cx, cg, pn->pn_kid1, answer) &&
2110 CheckSideEffects(cx, cg, pn->pn_kid2, answer) &&
2111 CheckSideEffects(cx, cg, pn->pn_kid3, answer);
2112 break;
2113
2114 case PN_BINARY:
2115 if (pn->pn_type == TOK_ASSIGN) {
2116 /*
2117 * Assignment is presumed to be useful, even if the next operation
2118 * is another assignment overwriting this one's ostensible effect,
2119 * because the left operand may be a property with a setter that
2120 * has side effects.
2121 *
2122 * The only exception is assignment of a useless value to a const
2123 * declared in the function currently being compiled.
2124 */
2125 pn2 = pn->pn_left;
2126 if (pn2->pn_type != TOK_NAME) {
2127 *answer = JS_TRUE;
2128 } else {
2129 if (!BindNameToSlot(cx, cg, pn2))
2130 return JS_FALSE;
2131 if (!CheckSideEffects(cx, cg, pn->pn_right, answer))
2132 return JS_FALSE;
2133 if (!*answer &&
2134 (pn->pn_op != JSOP_NOP ||
2135 pn2->pn_slot < 0 ||
2136 !pn2->pn_const)) {
2137 *answer = JS_TRUE;
2138 }
2139 }
2140 } else {
2141 /*
2142 * We can't easily prove that neither operand ever denotes an
2143 * object with a toString or valueOf method.
2144 */
2145 *answer = JS_TRUE;
2146 }
2147 break;
2148
2149 case PN_UNARY:
2150 switch (pn->pn_type) {
2151 case TOK_RP:
2152 ok = CheckSideEffects(cx, cg, pn->pn_kid, answer);
2153 break;
2154
2155 case TOK_DELETE:
2156 pn2 = pn->pn_kid;
2157 switch (pn2->pn_type) {
2158 case TOK_NAME:
2159 case TOK_DOT:
2160 #if JS_HAS_XML_SUPPORT
2161 case TOK_DBLDOT:
2162 #endif
2163 #if JS_HAS_LVALUE_RETURN
2164 case TOK_LP:
2165 #endif
2166 case TOK_LB:
2167 /* All these delete addressing modes have effects too. */
2168 *answer = JS_TRUE;
2169 break;
2170 default:
2171 ok = CheckSideEffects(cx, cg, pn2, answer);
2172 break;
2173 }
2174 break;
2175
2176 default:
2177 /*
2178 * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP
2179 * have direct effects. Of the remaining unary-arity node types,
2180 * we can't easily prove that the operand never denotes an object
2181 * with a toString or valueOf method.
2182 */
2183 *answer = JS_TRUE;
2184 break;
2185 }
2186 break;
2187
2188 case PN_NAME:
2189 /*
2190 * Take care to avoid trying to bind a label name (labels, both for
2191 * statements and property values in object initialisers, have pn_op
2192 * defaulted to JSOP_NOP).
2193 */
2194 if (pn->pn_type == TOK_NAME && pn->pn_op != JSOP_NOP) {
2195 if (!BindNameToSlot(cx, cg, pn))
2196 return JS_FALSE;
2197 if (pn->pn_slot < 0 && pn->pn_op != JSOP_ARGUMENTS) {
2198 /*
2199 * Not an argument or local variable use, so this expression
2200 * could invoke a getter that has side effects.
2201 */
2202 *answer = JS_TRUE;
2203 }
2204 }
2205 pn2 = pn->pn_expr;
2206 if (pn->pn_type == TOK_DOT) {
2207 if (pn2->pn_type == TOK_NAME && !BindNameToSlot(cx, cg, pn2))
2208 return JS_FALSE;
2209 if (!(pn2->pn_op == JSOP_ARGUMENTS &&
2210 pn->pn_atom == cx->runtime->atomState.lengthAtom)) {
2211 /*
2212 * Any dotted property reference could call a getter, except
2213 * for arguments.length where arguments is unambiguous.
2214 */
2215 *answer = JS_TRUE;
2216 }
2217 }
2218 ok = CheckSideEffects(cx, cg, pn2, answer);
2219 break;
2220
2221 case PN_NULLARY:
2222 if (pn->pn_type == TOK_DEBUGGER)
2223 *answer = JS_TRUE;
2224 break;
2225 }
2226 return ok;
2227 }
2228
2229 static JSBool
2230 EmitNameOp(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2231 JSBool callContext)
2232 {
2233 JSOp op;
2234
2235 if (!BindNameToSlot(cx, cg, pn))
2236 return JS_FALSE;
2237 op = PN_OP(pn);
2238
2239 if (callContext) {
2240 switch (op) {
2241 case JSOP_NAME:
2242 op = JSOP_CALLNAME;
2243 break;
2244 case JSOP_GETGVAR:
2245 op = JSOP_CALLGVAR;
2246 break;
2247 case JSOP_GETARG:
2248 op = JSOP_CALLARG;
2249 break;
2250 case JSOP_GETLOCAL:
2251 op = JSOP_CALLLOCAL;
2252 break;
2253 case JSOP_GETUPVAR:
2254 op = JSOP_CALLUPVAR;
2255 break;
2256 default:
2257 JS_ASSERT(op == JSOP_ARGUMENTS);
2258 break;
2259 }
2260 }
2261
2262 if (op == JSOP_ARGUMENTS) {
2263 if (js_Emit1(cx, cg, op) < 0)
2264 return JS_FALSE;
2265 if (callContext && js_Emit1(cx, cg, JSOP_NULL) < 0)
2266 return JS_FALSE;
2267 } else {
2268 if (pn->pn_slot >= 0) {
2269 EMIT_UINT16_IMM_OP(op, pn->pn_slot);
2270 } else {
2271 if (!EmitAtomOp(cx, pn, op, cg))
2272 return JS_FALSE;
2273 }
2274 }
2275
2276 return JS_TRUE;
2277 }
2278
2279 #if JS_HAS_XML_SUPPORT
2280 static JSBool
2281 EmitXMLName(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2282 {
2283 JSParseNode *pn2;
2284 uintN oldflags;
2285
2286 JS_ASSERT(pn->pn_type == TOK_UNARYOP);
2287 JS_ASSERT(pn->pn_op == JSOP_XMLNAME);
2288 JS_ASSERT(op == JSOP_XMLNAME || op == JSOP_CALLXMLNAME);
2289
2290 pn2 = pn->pn_kid;
2291 oldflags = cg->treeContext.flags;
2292 cg->treeContext.flags &= ~TCF_IN_FOR_INIT;
2293 if (!js_EmitTree(cx, cg, pn2))
2294 return JS_FALSE;
2295 cg->treeContext.flags |= oldflags & TCF_IN_FOR_INIT;
2296 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2297 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2298 return JS_FALSE;
2299 }
2300
2301 return js_Emit1(cx, cg, op) >= 0;
2302 }
2303 #endif
2304
2305 static JSBool
2306 EmitPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg,
2307 JSBool callContext)
2308 {
2309 JSParseNode *pn2, *pndot, *pnup, *pndown;
2310 ptrdiff_t top;
2311
2312 pn2 = pn->pn_expr;
2313 if (callContext) {
2314 JS_ASSERT(pn->pn_type == TOK_DOT);
2315 JS_ASSERT(op == JSOP_GETPROP);
2316 op = JSOP_CALLPROP;
2317 } else if (op == JSOP_GETPROP && pn->pn_type == TOK_DOT) {
2318 if (pn2->pn_op == JSOP_THIS) {
2319 if (pn->pn_atom != cx->runtime->atomState.lengthAtom) {
2320 /* Fast path for gets of |this.foo|. */
2321 return EmitAtomOp(cx, pn, JSOP_GETTHISPROP, cg);
2322 }
2323 } else if (pn2->pn_type == TOK_NAME) {
2324 /*
2325 * Try to optimize:
2326 * - arguments.length into JSOP_ARGCNT
2327 * - argname.prop into JSOP_GETARGPROP
2328 * - localname.prop into JSOP_GETLOCALPROP
2329 * but don't do this if the property is 'length' -- prefer to emit
2330 * JSOP_GETARG, etc., and then JSOP_LENGTH.
2331 */
2332 if (!BindNameToSlot(cx, cg, pn2))
2333 return JS_FALSE;
2334 if (pn->pn_atom == cx->runtime->atomState.lengthAtom) {
2335 if (pn2->pn_op == JSOP_ARGUMENTS)
2336 return js_Emit1(cx, cg, JSOP_ARGCNT) >= 0;
2337 } else {
2338 switch (pn2->pn_op) {
2339 case JSOP_GETARG:
2340 op = JSOP_GETARGPROP;
2341 goto do_indexconst;
2342 case JSOP_GETLOCAL:
2343 op = JSOP_GETLOCALPROP;
2344 do_indexconst: {
2345 JSAtomListElement *ale;
2346 jsatomid atomIndex;
2347
2348 ale = js_IndexAtom(cx, pn->pn_atom, &cg->atomList);
2349 if (!ale)
2350 return JS_FALSE;
2351 atomIndex = ALE_INDEX(ale);
2352 return EmitSlotIndexOp(cx, op, pn2->pn_slot, atomIndex, cg);
2353 }
2354
2355 default:;
2356 }
2357 }
2358 }
2359 }
2360
2361 /*
2362 * If the object operand is also a dotted property reference, reverse the
2363 * list linked via pn_expr temporarily so we can iterate over it from the
2364 * bottom up (reversing again as we go), to avoid excessive recursion.
2365 */
2366 if (pn2->pn_type == TOK_DOT) {
2367 pndot = pn2;
2368 pnup = NULL;
2369 top = CG_OFFSET(cg);
2370 for (;;) {
2371 /* Reverse pndot->pn_expr to point up, not down. */
2372 pndot->pn_offset = top;
2373 pndown = pndot->pn_expr;
2374 pndot->pn_expr = pnup;
2375 if (pndown->pn_type != TOK_DOT)
2376 break;
2377 pnup = pndot;
2378 pndot = pndown;
2379 }
2380
2381 /* pndown is a primary expression, not a dotted property reference. */
2382 if (!js_EmitTree(cx, cg, pndown))
2383 return JS_FALSE;
2384
2385 do {
2386 /* Walk back up the list, emitting annotated name ops. */
2387 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2388 CG_OFFSET(cg) - pndown->pn_offset) < 0) {
2389 return JS_FALSE;
2390 }
2391 if (!EmitAtomOp(cx, pndot, PN_OP(pndot), cg))
2392 return JS_FALSE;
2393
2394 /* Reverse the pn_expr link again. */
2395 pnup = pndot->pn_expr;
2396 pndot->pn_expr = pndown;
2397 pndown = pndot;
2398 } while ((pndot = pnup) != NULL);
2399 } else {
2400 if (!js_EmitTree(cx, cg, pn2))
2401 return JS_FALSE;
2402 }
2403
2404 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2405 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2406 return JS_FALSE;
2407 }
2408
2409 return EmitAtomOp(cx, pn, op, cg);
2410 }
2411
2412 static JSBool
2413 EmitElemOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2414 {
2415 ptrdiff_t top;
2416 JSParseNode *left, *right, *next, ltmp, rtmp;
2417 jsint slot;
2418
2419 top = CG_OFFSET(cg);
2420 if (pn->pn_arity == PN_LIST) {
2421 /* Left-associative operator chain to avoid too much recursion. */
2422 JS_ASSERT(pn->pn_op == JSOP_GETELEM);
2423 JS_ASSERT(pn->pn_count >= 3);
2424 left = pn->pn_head;
2425 right = PN_LAST(pn);
2426 next = left->pn_next;
2427 JS_ASSERT(next != right);
2428
2429 /*
2430 * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2431 * one or more index expression and JSOP_GETELEM op pairs.
2432 */
2433 if (left->pn_type == TOK_NAME && next->pn_type == TOK_NUMBER) {
2434 if (!BindNameToSlot(cx, cg, left))
2435 return JS_FALSE;
2436 if (left->pn_op == JSOP_ARGUMENTS &&
2437 JSDOUBLE_IS_INT(next->pn_dval, slot) &&
2438 (jsuint)slot < JS_BIT(16)) {
2439 /*
2440 * arguments[i]() requires arguments object as "this".
2441 * Check that we never generates list for that usage.
2442 */
2443 JS_ASSERT(op != JSOP_CALLELEM || next->pn_next);
2444 left->pn_offset = next->pn_offset = top;
2445 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2446 left = next;
2447 next = left->pn_next;
2448 }
2449 }
2450
2451 /*
2452 * Check whether we generated JSOP_ARGSUB, just above, and have only
2453 * one more index expression to emit. Given arguments[0][j], we must
2454 * skip the while loop altogether, falling through to emit code for j
2455 * (in the subtree referenced by right), followed by the annotated op,
2456 * at the bottom of this function.
2457 */
2458 JS_ASSERT(next != right || pn->pn_count == 3);
2459 if (left == pn->pn_head) {
2460 if (!js_EmitTree(cx, cg, left))
2461 return JS_FALSE;
2462 }
2463 while (next != right) {
2464 if (!js_EmitTree(cx, cg, next))
2465 return JS_FALSE;
2466 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2467 return JS_FALSE;
2468 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
2469 return JS_FALSE;
2470 next = next->pn_next;
2471 }
2472 } else {
2473 if (pn->pn_arity == PN_NAME) {
2474 /*
2475 * Set left and right so pn appears to be a TOK_LB node, instead
2476 * of a TOK_DOT node. See the TOK_FOR/IN case in js_EmitTree, and
2477 * EmitDestructuringOps nearer below. In the destructuring case,
2478 * the base expression (pn_expr) of the name may be null, which
2479 * means we have to emit a JSOP_BINDNAME.
2480 */
2481 left = pn->pn_expr;
2482 if (!left) {
2483 left = &ltmp;
2484 left->pn_type = TOK_STRING;
2485 left->pn_op = JSOP_BINDNAME;
2486 left->pn_arity = PN_NULLARY;
2487 left->pn_pos = pn->pn_pos;
2488 left->pn_atom = pn->pn_atom;
2489 }
2490 right = &rtmp;
2491 right->pn_type = TOK_STRING;
2492 JS_ASSERT(ATOM_IS_STRING(pn->pn_atom));
2493 right->pn_op = js_IsIdentifier(ATOM_TO_STRING(pn->pn_atom))
2494 ? JSOP_QNAMEPART
2495 : JSOP_STRING;
2496 right->pn_arity = PN_NULLARY;
2497 right->pn_pos = pn->pn_pos;
2498 right->pn_atom = pn->pn_atom;
2499 } else {
2500 JS_ASSERT(pn->pn_arity == PN_BINARY);
2501 left = pn->pn_left;
2502 right = pn->pn_right;
2503 }
2504
2505 /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2506 if (op == JSOP_GETELEM &&
2507 left->pn_type == TOK_NAME &&
2508 right->pn_type == TOK_NUMBER) {
2509 if (!BindNameToSlot(cx, cg, left))
2510 return JS_FALSE;
2511 if (left->pn_op == JSOP_ARGUMENTS &&
2512 JSDOUBLE_IS_INT(right->pn_dval, slot) &&
2513 (jsuint)slot < JS_BIT(16)) {
2514 left->pn_offset = right->pn_offset = top;
2515 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2516 return JS_TRUE;
2517 }
2518 }
2519
2520 if (!js_EmitTree(cx, cg, left))
2521 return JS_FALSE;
2522 }
2523
2524 /* The right side of the descendant operator is implicitly quoted. */
2525 JS_ASSERT(op != JSOP_DESCENDANTS || right->pn_type != TOK_STRING ||
2526 right->pn_op == JSOP_QNAMEPART);
2527 if (!js_EmitTree(cx, cg, right))
2528 return JS_FALSE;
2529 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2530 return JS_FALSE;
2531 return js_Emit1(cx, cg, op) >= 0;
2532 }
2533
2534 static JSBool
2535 EmitNumberOp(JSContext *cx, jsdouble dval, JSCodeGenerator *cg)
2536 {
2537 jsint ival;
2538 uint32 u;
2539 ptrdiff_t off;
2540 jsbytecode *pc;
2541 JSAtom *atom;
2542 JSAtomListElement *ale;
2543
2544 if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
2545 if (ival == 0)
2546 return js_Emit1(cx, cg, JSOP_ZERO) >= 0;
2547 if (ival == 1)
2548 return js_Emit1(cx, cg, JSOP_ONE) >= 0;
2549 if ((jsint)(int8)ival == ival)
2550 return js_Emit2(cx, cg, JSOP_INT8, (jsbytecode)(int8)ival) >= 0;
2551
2552 u = (uint32)ival;
2553 if (u < JS_BIT(16)) {
2554 EMIT_UINT16_IMM_OP(JSOP_UINT16, u);
2555 } else if (u < JS_BIT(24)) {
2556 off = js_EmitN(cx, cg, JSOP_UINT24, 3);
2557 if (off < 0)
2558 return JS_FALSE;
2559 pc = CG_CODE(cg, off);
2560 SET_UINT24(pc, u);
2561 } else {
2562 off = js_EmitN(cx, cg, JSOP_INT32, 4);
2563 if (off < 0)
2564 return JS_FALSE;
2565 pc = CG_CODE(cg, off);
2566 SET_INT32(pc, ival);
2567 }
2568 return JS_TRUE;
2569 }
2570
2571 atom = js_AtomizeDouble(cx, dval);
2572 if (!atom)
2573 return JS_FALSE;
2574
2575 ale = js_IndexAtom(cx, atom, &cg->atomList);
2576 if (!ale)
2577 return JS_FALSE;
2578 return EmitIndexOp(cx, JSOP_DOUBLE, ALE_INDEX(ale), cg);
2579 }
2580
2581 static JSBool
2582 EmitSwitch(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2583 JSStmtInfo *stmtInfo)
2584 {
2585 JSOp switchOp;
2586 JSBool ok, hasDefault, constPropagated;
2587 ptrdiff_t top, off, defaultOffset;
2588 JSParseNode *pn2, *pn3, *pn4;
2589 uint32 caseCount, tableLength;
2590 JSParseNode **table;
2591 jsdouble d;
2592 jsint i, low, high;
2593 jsval v;
2594 JSAtom *atom;
2595 JSAtomListElement *ale;
2596 intN noteIndex;
2597 size_t switchSize, tableSize;
2598 jsbytecode *pc, *savepc;
2599 #if JS_HAS_BLOCK_SCOPE
2600 jsint count;
2601 #endif
2602
2603 /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
2604 switchOp = JSOP_TABLESWITCH;
2605 ok = JS_TRUE;
2606 hasDefault = constPropagated = JS_FALSE;
2607 defaultOffset = -1;
2608
2609 /*
2610 * If the switch contains let variables scoped by its body, model the
2611 * resulting block on the stack first, before emitting the discriminant's
2612 * bytecode (in case the discriminant contains a stack-model dependency
2613 * such as a let expression).
2614 */
2615 pn2 = pn->pn_right;
2616 #if JS_HAS_BLOCK_SCOPE
2617 if (pn2->pn_type == TOK_LEXICALSCOPE) {
2618 /*
2619 * Push the body's block scope before discriminant code-gen for proper
2620 * static block scope linkage in case the discriminant contains a let
2621 * expression. The block's locals must lie under the discriminant on
2622 * the stack so that case-dispatch bytecodes can find the discriminant
2623 * on top of stack.
2624 */
2625 count = OBJ_BLOCK_COUNT(cx, pn2->pn_pob->object);
2626 js_PushBlockScope(&cg->treeContext, stmtInfo, pn2->pn_pob->object, -1);
2627 stmtInfo->type = STMT_SWITCH;
2628
2629 /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
2630 if (!EmitObjectOp(cx, pn2->pn_pob, JSOP_ENTERBLOCK, cg))
2631 return JS_FALSE;
2632
2633 /*
2634 * Pop the switch's statement info around discriminant code-gen. Note
2635 * how this leaves cg->treeContext.blockChain referencing the switch's
2636 * block scope object, which is necessary for correct block parenting
2637 * in the case where the discriminant contains a let expression.
2638 */
2639 cg->treeContext.topStmt = stmtInfo->down;
2640 cg->treeContext.topScopeStmt = stmtInfo->downScope;
2641 }
2642 #ifdef __GNUC__
2643 else {
2644 count = 0;
2645 }
2646 #endif
2647 #endif
2648
2649 /*
2650 * Emit code for the discriminant first (or nearly first, in the case of a
2651 * switch whose body is a block scope).
2652 */
2653 if (!js_EmitTree(cx, cg, pn->pn_left))
2654 return JS_FALSE;
2655
2656 /* Switch bytecodes run from here till end of final case. */
2657 top = CG_OFFSET(cg);
2658 #if !JS_HAS_BLOCK_SCOPE
2659 js_PushStatement(&cg->treeContext, stmtInfo, STMT_SWITCH, top);
2660 #else
2661 if (pn2->pn_type == TOK_LC) {
2662 js_PushStatement(&cg->treeContext, stmtInfo, STMT_SWITCH, top);
2663 } else {
2664 /* Re-push the switch's statement info record. */
2665 cg->treeContext.topStmt = cg->treeContext.topScopeStmt = stmtInfo;
2666
2667 /* Set the statement info record's idea of top. */
2668 stmtInfo->update = top;
2669
2670 /* Advance pn2 to refer to the switch case list. */
2671 pn2 = pn2->pn_expr;
2672 }
2673 #endif
2674
2675 caseCount = pn2->pn_count;
2676 tableLength = 0;
2677 table = NULL;
2678
2679 if (caseCount == 0 ||
2680 (caseCount == 1 &&
2681 (hasDefault = (pn2->pn_head->pn_type == TOK_DEFAULT)))) {
2682 caseCount = 0;
2683 low = 0;
2684 high = -1;
2685 } else {
2686 #define INTMAP_LENGTH 256
2687 jsbitmap intmap_space[INTMAP_LENGTH];
2688 jsbitmap *intmap = NULL;
2689 int32 intmap_bitlen = 0;
2690
2691 low = JSVAL_INT_MAX;
2692 high = JSVAL_INT_MIN;
2693
2694 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2695 if (pn3->pn_type == TOK_DEFAULT) {
2696 hasDefault = JS_TRUE;
2697 caseCount--; /* one of the "cases" was the default */
2698 continue;
2699 }
2700
2701 JS_ASSERT(pn3->pn_type == TOK_CASE);
2702 if (switchOp == JSOP_CONDSWITCH)
2703 continue;
2704
2705 pn4 = pn3->pn_left;
2706 switch (pn4->pn_type) {
2707 case TOK_NUMBER:
2708 d = pn4->pn_dval;
2709 if (JSDOUBLE_IS_INT(d, i) && INT_FITS_IN_JSVAL(i)) {
2710 pn3->pn_val = INT_TO_JSVAL(i);
2711 } else {
2712 atom = js_AtomizeDouble(cx, d);
2713 if (!atom) {
2714 ok = JS_FALSE;
2715 goto release;
2716 }
2717 pn3->pn_val = ATOM_KEY(atom);
2718 }
2719 break;
2720 case TOK_STRING:
2721 pn3->pn_val = ATOM_KEY(pn4->pn_atom);
2722 break;
2723 case TOK_NAME:
2724 if (!pn4->pn_expr) {
2725 ok = LookupCompileTimeConstant(cx, cg, pn4->pn_atom, &v);
2726 if (!ok)
2727 goto release;
2728 if (v != JSVAL_HOLE) {
2729 if (!JSVAL_IS_PRIMITIVE(v)) {
2730 /*
2731 * XXX JSOP_LOOKUPSWITCH does not support const-
2732 * propagated object values, see bug 407186.
2733 */
2734 switchOp = JSOP_CONDSWITCH;
2735 continue;
2736 }
2737 pn3->pn_val = v;
2738 constPropagated = JS_TRUE;
2739 break;
2740 }
2741 }
2742 /* FALL THROUGH */
2743 case TOK_PRIMARY:
2744 if (pn4->pn_op == JSOP_TRUE) {
2745 pn3->pn_val = JSVAL_TRUE;
2746 break;
2747 }
2748 if (pn4->pn_op == JSOP_FALSE) {
2749 pn3->pn_val = JSVAL_FALSE;
2750 break;
2751 }
2752 /* FALL THROUGH */
2753 default:
2754 switchOp = JSOP_CONDSWITCH;
2755 continue;
2756 }
2757
2758 JS_ASSERT(JSVAL_IS_PRIMITIVE(pn3->pn_val));
2759
2760 if (switchOp != JSOP_TABLESWITCH)
2761 continue;
2762 if (!JSVAL_IS_INT(pn3->pn_val)) {
2763 switchOp = JSOP_LOOKUPSWITCH;
2764 continue;
2765 }
2766 i = JSVAL_TO_INT(pn3->pn_val);
2767 if ((jsuint)(i + (jsint)JS_BIT(15)) >= (jsuint)JS_BIT(16)) {
2768 switchOp = JSOP_LOOKUPSWITCH;
2769 continue;
2770 }
2771 if (i < low)
2772 low = i;
2773 if (high < i)
2774 high = i;
2775
2776 /*
2777 * Check for duplicates, which require a JSOP_LOOKUPSWITCH.
2778 * We bias i by 65536 if it's negative, and hope that's a rare
2779 * case (because it requires a malloc'd bitmap).
2780 */
2781 if (i < 0)
2782 i += JS_BIT(16);
2783 if (i >= intmap_bitlen) {
2784 if (!intmap &&
2785 i < (INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2)) {
2786 intmap = intmap_space;
2787 intmap_bitlen = INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2;
2788 } else {
2789 /* Just grab 8K for the worst-case bitmap. */
2790 intmap_bitlen = JS_BIT(16);
2791 intmap = (jsbitmap *)
2792 JS_malloc(cx,
2793 (JS_BIT(16) >> JS_BITS_PER_WORD_LOG2)
2794 * sizeof(jsbitmap));
2795 if (!intmap) {
2796 JS_ReportOutOfMemory(cx);
2797 return JS_FALSE;
2798 }
2799 }
2800 memset(intmap, 0, intmap_bitlen >> JS_BITS_PER_BYTE_LOG2);
2801 }
2802 if (JS_TEST_BIT(intmap, i)) {
2803 switchOp = JSOP_LOOKUPSWITCH;
2804 continue;
2805 }
2806 JS_SET_BIT(intmap, i);
2807 }
2808
2809 release:
2810 if (intmap && intmap != intmap_space)
2811 JS_free(cx, intmap);
2812 if (!ok)
2813 return JS_FALSE;
2814
2815 /*
2816 * Compute table length and select lookup instead if overlarge or
2817 * more than half-sparse.
2818 */
2819 if (switchOp == JSOP_TABLESWITCH) {
2820 tableLength = (uint32)(high - low + 1);
2821 if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount)
2822 switchOp = JSOP_LOOKUPSWITCH;
2823 } else if (switchOp == JSOP_LOOKUPSWITCH) {
2824 /*
2825 * Lookup switch supports only atom indexes below 64K limit.
2826 * Conservatively estimate the maximum possible index during
2827 * switch generation and use conditional switch if it exceeds
2828 * the limit.
2829 */
2830 if (caseCount + cg->atomList.count > JS_BIT(16))
2831 switchOp = JSOP_CONDSWITCH;
2832 }
2833 }
2834
2835 /*
2836 * Emit a note with two offsets: first tells total switch code length,
2837 * second tells offset to first JSOP_CASE if condswitch.
2838 */
2839 noteIndex = js_NewSrcNote3(cx, cg, SRC_SWITCH, 0, 0);
2840 if (noteIndex < 0)
2841 return JS_FALSE;
2842
2843 if (switchOp == JSOP_CONDSWITCH) {
2844 /*
2845 * 0 bytes of immediate for unoptimized ECMAv2 switch.
2846 */
2847 switchSize = 0;
2848 } else if (switchOp == JSOP_TABLESWITCH) {
2849 /*
2850 * 3 offsets (len, low, high) before the table, 1 per entry.
2851 */
2852 switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength));
2853 } else {
2854 /*
2855 * JSOP_LOOKUPSWITCH:
2856 * 1 offset (len) and 1 atom index (npairs) before the table,
2857 * 1 atom index and 1 jump offset per entry.
2858 */
2859 switchSize = (size_t)(JUMP_OFFSET_LEN + INDEX_LEN +
2860 (INDEX_LEN + JUMP_OFFSET_LEN) * caseCount);
2861 }
2862
2863 /*
2864 * Emit switchOp followed by switchSize bytes of jump or lookup table.
2865 *
2866 * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
2867 * to emit the immediate operand(s) by which bytecode readers such as
2868 * BuildSpanDepTable discover the length of the switch opcode *before*
2869 * calling js_SetJumpOffset (which may call BuildSpanDepTable). It's
2870 * also important to zero all unknown jump offset immediate operands,
2871 * so they can be converted to span dependencies with null targets to
2872 * be computed later (js_EmitN zeros switchSize bytes after switchOp).
2873 */
2874 if (js_EmitN(cx, cg, switchOp, switchSize) < 0)
2875 return JS_FALSE;
2876
2877 off = -1;
2878 if (switchOp == JSOP_CONDSWITCH) {
2879 intN caseNoteIndex = -1;
2880 JSBool beforeCases = JS_TRUE;
2881
2882 /* Emit code for evaluating cases and jumping to case statements. */
2883 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2884 pn4 = pn3->pn_left;
2885 if (pn4 && !js_EmitTree(cx, cg, pn4))
2886 return JS_FALSE;
2887 if (caseNoteIndex >= 0) {
2888 /* off is the previous JSOP_CASE's bytecode offset. */
2889 if (!js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
2890 CG_OFFSET(cg) - off)) {
2891 return JS_FALSE;
2892 }
2893 }
2894 if (!pn4) {
2895 JS_ASSERT(pn3->pn_type == TOK_DEFAULT);
2896 continue;
2897 }
2898 caseNoteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
2899 if (caseNoteIndex < 0)
2900 return JS_FALSE;
2901 off = EmitJump(cx, cg, JSOP_CASE, 0);
2902 if (off < 0)
2903 return JS_FALSE;
2904 pn3->pn_offset = off;
2905 if (beforeCases) {
2906 uintN noteCount, noteCountDelta;
2907
2908 /* Switch note's second offset is to first JSOP_CASE. */
2909 noteCount = CG_NOTE_COUNT(cg);
2910 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
2911 off - top)) {
2912 return JS_FALSE;
2913 }
2914 noteCountDelta = CG_NOTE_COUNT(cg) - noteCount;
2915 if (noteCountDelta != 0)
2916 caseNoteIndex += noteCountDelta;
2917 beforeCases = JS_FALSE;
2918 }
2919 }
2920
2921 /*
2922 * If we didn't have an explicit default (which could fall in between
2923 * cases, preventing us from fusing this js_SetSrcNoteOffset with the
2924 * call in the loop above), link the last case to the implicit default
2925 * for the decompiler.
2926 */
2927 if (!hasDefault &&
2928 caseNoteIndex >= 0 &&
2929 !js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
2930 CG_OFFSET(cg) - off)) {
2931 return JS_FALSE;
2932 }
2933
2934 /* Emit default even if no explicit default statement. */
2935 defaultOffset = EmitJump(cx, cg, JSOP_DEFAULT, 0);
2936 if (defaultOffset < 0)
2937 return JS_FALSE;
2938 } else {
2939 pc = CG_CODE(cg, top + JUMP_OFFSET_LEN);
2940
2941 if (switchOp == JSOP_TABLESWITCH) {
2942 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
2943 SET_JUMP_OFFSET(pc, low);
2944 pc += JUMP_OFFSET_LEN;
2945 SET_JUMP_OFFSET(pc, high);
2946 pc += JUMP_OFFSET_LEN;
2947
2948 /*
2949 * Use malloc to avoid arena bloat for programs with many switches.
2950 * We free table if non-null at label out, so all control flow must
2951 * exit this function through goto out or goto bad.
2952 */
2953 if (tableLength != 0) {
2954 tableSize = (size_t)tableLength * sizeof *table;
2955 table = (JSParseNode **) JS_malloc(cx, tableSize);
2956 if (!table)
2957 return JS_FALSE;
2958 memset(table, 0, tableSize);
2959 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2960 if (pn3->pn_type == TOK_DEFAULT)
2961 continue;
2962 i = JSVAL_TO_INT(pn3->pn_val);
2963 i -= low;
2964 JS_ASSERT((uint32)i < tableLength);
2965 table[i] = pn3;
2966 }
2967 }
2968 } else {
2969 JS_ASSERT(switchOp == JSOP_LOOKUPSWITCH);
2970
2971 /* Fill in the number of cases. */
2972 SET_INDEX(pc, caseCount);
2973 pc += INDEX_LEN;
2974 }
2975
2976 /*
2977 * After this point, all control flow involving JSOP_TABLESWITCH
2978 * must set ok and goto out to exit this function. To keep things
2979 * simple, all switchOp cases exit that way.
2980 */
2981 MUST_FLOW_THROUGH("out");
2982 if (cg->spanDeps) {
2983 /*
2984 * We have already generated at least one big jump so we must
2985 * explicitly add span dependencies for the switch jumps. When
2986 * called below, js_SetJumpOffset can only do it when patching
2987 * the first big jump or when cg->spanDeps is null.
2988 */
2989 if (!AddSwitchSpanDeps(cx, cg, CG_CODE(cg, top)))
2990 goto bad;
2991 }
2992
2993 if (constPropagated) {
2994 /*
2995 * Skip switchOp, as we are not setting jump offsets in the two
2996 * for loops below. We'll restore CG_NEXT(cg) from savepc after,
2997 * unless there was an error.
2998 */
2999 savepc = CG_NEXT(cg);
3000 CG_NEXT(cg) = pc + 1;
3001 if (switchOp == JSOP_TABLESWITCH) {
3002 for (i = 0; i < (jsint)tableLength; i++) {
3003 pn3 = table[i];
3004 if (pn3 &&
3005 (pn4 = pn3->pn_left) != NULL &&
3006 pn4->pn_type == TOK_NAME) {
3007 /* Note a propagated constant with the const's name. */
3008 JS_ASSERT(!pn4->pn_expr);
3009 ale = js_IndexAtom(cx, pn4->pn_atom, &cg->atomList);
3010 if (!ale)
3011 goto bad;
3012 CG_NEXT(cg) = pc;
3013 if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
3014 ALE_INDEX(ale)) < 0) {
3015 goto bad;
3016 }
3017 }
3018 pc += JUMP_OFFSET_LEN;
3019 }
3020 } else {
3021 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3022 pn4 = pn3->pn_left;
3023 if (pn4 && pn4->pn_type == TOK_NAME) {
3024 /* Note a propagated constant with the const's name. */
3025 JS_ASSERT(!pn4->pn_expr);
3026 ale = js_IndexAtom(cx, pn4->pn_atom, &cg->atomList);
3027 if (!ale)
3028 goto bad;
3029 CG_NEXT(cg) = pc;
3030 if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
3031 ALE_INDEX(ale)) < 0) {
3032 goto bad;
3033 }
3034 }
3035 pc += INDEX_LEN + JUMP_OFFSET_LEN;
3036 }
3037 }
3038 CG_NEXT(cg) = savepc;
3039 }
3040 }
3041
3042 /* Emit code for each case's statements, copying pn_offset up to pn3. */
3043 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3044 if (switchOp == JSOP_CONDSWITCH && pn3->pn_type != TOK_DEFAULT)
3045 CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx, cg, pn3->pn_offset, goto bad);
3046 pn4 = pn3->pn_right;
3047 ok = js_EmitTree(cx, cg, pn4);
3048 if (!ok)
3049 goto out;
3050 pn3->pn_offset = pn4->pn_offset;
3051 if (pn3->pn_type == TOK_DEFAULT)
3052 off = pn3->pn_offset - top;
3053 }
3054
3055 if (!hasDefault) {
3056 /* If no default case, offset for default is to end of switch. */
3057 off = CG_OFFSET(cg) - top;
3058 }
3059
3060 /* We better have set "off" by now. */
3061 JS_ASSERT(off != -1);
3062
3063 /* Set the default offset (to end of switch if no default). */
3064 if (switchOp == JSOP_CONDSWITCH) {
3065 pc = NULL;
3066 JS_ASSERT(defaultOffset != -1);
3067 ok = js_SetJumpOffset(cx, cg, CG_CODE(cg, defaultOffset),
3068 off - (defaultOffset - top));
3069 if (!ok)
3070 goto out;
3071 } else {
3072 pc = CG_CODE(cg, top);
3073 ok = js_SetJumpOffset(cx, cg, pc, off);
3074 if (!ok)
3075 goto out;
3076 pc += JUMP_OFFSET_LEN;
3077 }
3078
3079 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
3080 off = CG_OFFSET(cg) - top;
3081 ok = js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, off);
3082 if (!ok)
3083 goto out;
3084
3085 if (switchOp == JSOP_TABLESWITCH) {
3086 /* Skip over the already-initialized switch bounds. */
3087 pc += 2 * JUMP_OFFSET_LEN;
3088
3089 /* Fill in the jump table, if there is one. */
3090 for (i = 0; i < (jsint)tableLength; i++) {
3091 pn3 = table[i];
3092 off = pn3 ? pn3->pn_offset - top : 0;
3093 ok = js_SetJumpOffset(cx, cg, pc, off);
3094 if (!ok)
3095 goto out;
3096 pc += JUMP_OFFSET_LEN;
3097 }
3098 } else if (switchOp == JSOP_LOOKUPSWITCH) {
3099 /* Skip over the already-initialized number of cases. */
3100 pc += INDEX_LEN;
3101
3102 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3103 if (pn3->pn_type == TOK_DEFAULT)
3104 continue;
3105 if (!js_AtomizePrimitiveValue(cx, pn3->pn_val, &atom))
3106 goto bad;
3107 ale = js_IndexAtom(cx, atom, &cg->atomList);
3108 if (!ale)
3109 goto bad;
3110 SET_INDEX(pc, ALE_INDEX(ale));
3111 pc += INDEX_LEN;
3112
3113 off = pn3->pn_offset - top;
3114 ok = js_SetJumpOffset(cx, cg, pc, off);
3115 if (!ok)
3116 goto out;
3117 pc += JUMP_OFFSET_LEN;
3118 }
3119 }
3120
3121 out:
3122 if (table)
3123 JS_free(cx, table);
3124 if (ok) {
3125 ok = js_PopStatementCG(cx, cg);
3126
3127 #if JS_HAS_BLOCK_SCOPE
3128 if (ok && pn->pn_right->pn_type == TOK_LEXICALSCOPE)
3129 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count);
3130 #endif
3131 }
3132 return ok;
3133
3134 bad:
3135 ok = JS_FALSE;
3136 goto out;
3137 }
3138
3139 JSBool
3140 js_EmitFunctionScript(JSContext *cx, JSCodeGenerator *cg, JSParseNode *body)
3141 {
3142 if (cg->treeContext.flags & TCF_FUN_IS_GENERATOR) {
3143 /* JSOP_GENERATOR must be the first instruction. */
3144 CG_SWITCH_TO_PROLOG(cg);
3145 JS_ASSERT(CG_NEXT(cg) == CG_BASE(cg));
3146 if (js_Emit1(cx, cg, JSOP_GENERATOR) < 0)
3147 return JS_FALSE;
3148 CG_SWITCH_TO_MAIN(cg);
3149 }
3150
3151 return js_EmitTree(cx, cg, body) &&
3152 js_Emit1(cx, cg, JSOP_STOP) >= 0 &&
3153 js_NewScriptFromCG(cx, cg);
3154 }
3155
3156 /* A macro for inlining at the top of js_EmitTree (whence it came). */
3157 #define UPDATE_LINE_NUMBER_NOTES(cx, cg, pn) \
3158 JS_BEGIN_MACRO \
3159 uintN line_ = (pn)->pn_pos.begin.lineno; \
3160 uintN delta_ = line_ - CG_CURRENT_LINE(cg); \
3161 if (delta_ != 0) { \
3162 /* \
3163 * Encode any change in the current source line number by using \
3164 * either several SRC_NEWLINE notes or just one SRC_SETLINE note, \
3165 * whichever consumes less space. \
3166 * \
3167 * NB: We handle backward line number deltas (possible with for \
3168 * loops where the update part is emitted after the body, but its \
3169 * line number is <= any line number in the body) here by letting \
3170 * unsigned delta_ wrap to a very large number, which triggers a \
3171 * SRC_SETLINE. \
3172 */ \
3173 CG_CURRENT_LINE(cg) = line_; \
3174 if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \
3175 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)\
3176 return JS_FALSE; \
3177 } else { \
3178 do { \
3179 if (js_NewSrcNote(cx, cg, SRC_NEWLINE) < 0) \
3180 return JS_FALSE; \
3181 } while (--delta_ != 0); \
3182 } \
3183 } \
3184 JS_END_MACRO
3185
3186 /* A function, so that we avoid macro-bloating all the other callsites. */
3187 static JSBool
3188 UpdateLineNumberNotes(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3189 {
3190 UPDATE_LINE_NUMBER_NOTES(cx, cg, pn);
3191 return JS_TRUE;
3192 }
3193
3194 static JSBool
3195 MaybeEmitVarDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3196 JSParseNode *pn, jsatomid *result)
3197 {
3198 jsatomid atomIndex;
3199 JSAtomListElement *ale;
3200
3201 if (pn->pn_slot >= 0) {
3202 atomIndex = (jsatomid) pn->pn_slot;
3203 } else {
3204 ale = js_IndexAtom(cx, pn->pn_atom, &cg->atomList);
3205 if (!ale)
3206 return JS_FALSE;
3207 atomIndex = ALE_INDEX(ale);
3208 }
3209
3210 if (JOF_OPTYPE(pn->pn_op) == JOF_ATOM &&
3211 (!(cg->treeContext.flags & TCF_IN_FUNCTION) ||
3212 (cg->treeContext.flags & TCF_FUN_HEAVYWEIGHT))) {
3213 /* Emit a prolog bytecode to predefine the variable. */
3214 CG_SWITCH_TO_PROLOG(cg);
3215 if (!UpdateLineNumberNotes(cx, cg, pn))
3216 return JS_FALSE;
3217 EMIT_INDEX_OP(prologOp, atomIndex);
3218 CG_SWITCH_TO_MAIN(cg);
3219 }
3220
3221 if (result)
3222 *result = atomIndex;
3223 return JS_TRUE;
3224 }
3225
3226 #if JS_HAS_DESTRUCTURING
3227
3228 typedef JSBool
3229 (*DestructuringDeclEmitter)(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3230 JSParseNode *pn);
3231
3232 static JSBool
3233 EmitDestructuringDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3234 JSParseNode *pn)
3235 {
3236 JS_ASSERT(pn->pn_type == TOK_NAME);
3237 if (!BindNameToSlot(cx, cg, pn))
3238 return JS_FALSE;
3239
3240 JS_ASSERT(pn->pn_op != JSOP_ARGUMENTS);
3241 return MaybeEmitVarDecl(cx, cg, prologOp, pn, NULL);
3242 }
3243
3244 static JSBool
3245 EmitDestructuringDecls(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3246 JSParseNode *pn)
3247 {
3248 JSParseNode *pn2, *pn3;
3249 DestructuringDeclEmitter emitter;
3250
3251 if (pn->pn_type == TOK_RB) {
3252 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3253 if (pn2->pn_type == TOK_COMMA)
3254 continue;
3255 emitter = (pn2->pn_type == TOK_NAME)
3256 ? EmitDestructuringDecl
3257 : EmitDestructuringDecls;
3258 if (!emitter(cx, cg, prologOp, pn2))
3259 return JS_FALSE;
3260 }
3261 } else {
3262 JS_ASSERT(pn->pn_type == TOK_RC);
3263 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3264 pn3 = pn2->pn_right;
3265 emitter = (pn3->pn_type == TOK_NAME)
3266 ? EmitDestructuringDecl
3267 : EmitDestructuringDecls;
3268 if (!emitter(cx, cg, prologOp, pn3))
3269 return JS_FALSE;
3270 }
3271 }
3272 return JS_TRUE;
3273 }
3274
3275 static JSBool
3276 EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn);
3277
3278 static JSBool
3279 EmitDestructuringLHS(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3280 {
3281