/[jscoverage]/trunk/js/jsemit.cpp
ViewVC logotype

Contents of /trunk/js/jsemit.cpp

Parent Directory Parent Directory | Revision Log Revision Log


Revision 460 - (show annotations)
Sat Sep 26 23:15:22 2009 UTC (10 years, 1 month ago) by siliconforks
File size: 255503 byte(s)
Upgrade to SpiderMonkey from Firefox 3.5.3.

1 /* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
3 *
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 *
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
11 *
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
16 *
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
19 *
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
24 *
25 * Contributor(s):
26 *
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
38 *
39 * ***** END LICENSE BLOCK ***** */
40
41 /*
42 * JS bytecode generation.
43 */
44 #include "jsstddef.h"
45 #ifdef HAVE_MEMORY_H
46 #include <memory.h>
47 #endif
48 #include <new>
49 #include <string.h>
50 #include "jstypes.h"
51 #include "jsarena.h" /* Added by JSIFY */
52 #include "jsutil.h" /* Added by JSIFY */
53 #include "jsbit.h"
54 #include "jsprf.h"
55 #include "jsapi.h"
56 #include "jsatom.h"
57 #include "jsbool.h"
58 #include "jscntxt.h"
59 #include "jsversion.h"
60 #include "jsemit.h"
61 #include "jsfun.h"
62 #include "jsnum.h"
63 #include "jsopcode.h"
64 #include "jsparse.h"
65 #include "jsregexp.h"
66 #include "jsscan.h"
67 #include "jsscope.h"
68 #include "jsscript.h"
69 #include "jsautooplen.h"
70 #include "jsstaticcheck.h"
71
72 /* Allocation chunk counts, must be powers of two in general. */
73 #define BYTECODE_CHUNK 256 /* code allocation increment */
74 #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
75 #define TRYNOTE_CHUNK 64 /* trynote allocation increment */
76
77 /* Macros to compute byte sizes from typed element counts. */
78 #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
79 #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
80 #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
81
82 static JSBool
83 NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
84 uintN stackDepth, size_t start, size_t end);
85
86 JSCodeGenerator::JSCodeGenerator(JSCompiler *jsc,
87 JSArenaPool *cpool, JSArenaPool *npool,
88 uintN lineno)
89 : JSTreeContext(jsc),
90 codePool(cpool), notePool(npool),
91 codeMark(JS_ARENA_MARK(cpool)), noteMark(JS_ARENA_MARK(npool)),
92 stackDepth(0), maxStackDepth(0),
93 ntrynotes(0), lastTryNode(NULL),
94 spanDeps(NULL), jumpTargets(NULL), jtFreeList(NULL),
95 numSpanDeps(0), numJumpTargets(0), spanDepTodo(0),
96 arrayCompDepth(0),
97 emitLevel(0)
98 {
99 flags = TCF_COMPILING;
100 memset(&prolog, 0, sizeof prolog);
101 memset(&main, 0, sizeof main);
102 current = &main;
103 firstLine = prolog.currentLine = main.currentLine = lineno;
104 prolog.noteMask = main.noteMask = SRCNOTE_CHUNK - 1;
105 memset(&upvarMap, 0, sizeof upvarMap);
106 }
107
108 JSCodeGenerator::~JSCodeGenerator()
109 {
110 JS_ARENA_RELEASE(codePool, codeMark);
111 JS_ARENA_RELEASE(notePool, noteMark);
112
113 /* NB: non-null only after OOM. */
114 if (spanDeps)
115 JS_free(compiler->context, spanDeps);
116
117 if (upvarMap.vector)
118 JS_free(compiler->context, upvarMap.vector);
119 }
120
121 static ptrdiff_t
122 EmitCheck(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t delta)
123 {
124 jsbytecode *base, *limit, *next;
125 ptrdiff_t offset, length;
126 size_t incr, size;
127
128 base = CG_BASE(cg);
129 next = CG_NEXT(cg);
130 limit = CG_LIMIT(cg);
131 offset = PTRDIFF(next, base, jsbytecode);
132 if (next + delta > limit) {
133 length = offset + delta;
134 length = (length <= BYTECODE_CHUNK)
135 ? BYTECODE_CHUNK
136 : JS_BIT(JS_CeilingLog2(length));
137 incr = BYTECODE_SIZE(length);
138 if (!base) {
139 JS_ARENA_ALLOCATE_CAST(base, jsbytecode *, cg->codePool, incr);
140 } else {
141 size = BYTECODE_SIZE(PTRDIFF(limit, base, jsbytecode));
142 incr -= size;
143 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
144 }
145 if (!base) {
146 js_ReportOutOfScriptQuota(cx);
147 return -1;
148 }
149 CG_BASE(cg) = base;
150 CG_LIMIT(cg) = base + length;
151 CG_NEXT(cg) = base + offset;
152 }
153 return offset;
154 }
155
156 static void
157 UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target)
158 {
159 jsbytecode *pc;
160 JSOp op;
161 const JSCodeSpec *cs;
162 uintN extra, depth, nuses;
163 intN ndefs;
164
165 pc = CG_CODE(cg, target);
166 op = (JSOp) *pc;
167 cs = &js_CodeSpec[op];
168 #ifdef JS_TRACER
169 extern uint8 js_opcode2extra[];
170 extra = js_opcode2extra[op];
171 #else
172 extra = 0;
173 #endif
174 if ((cs->format & JOF_TMPSLOT_MASK) || extra) {
175 depth = (uintN) cg->stackDepth +
176 ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT) +
177 extra;
178 if (depth > cg->maxStackDepth)
179 cg->maxStackDepth = depth;
180 }
181
182 nuses = js_GetStackUses(cs, op, pc);
183 cg->stackDepth -= nuses;
184 JS_ASSERT(cg->stackDepth >= 0);
185 if (cg->stackDepth < 0) {
186 char numBuf[12];
187 JSTokenStream *ts;
188
189 JS_snprintf(numBuf, sizeof numBuf, "%d", target);
190 ts = &cg->compiler->tokenStream;
191 JS_ReportErrorFlagsAndNumber(cx, JSREPORT_WARNING,
192 js_GetErrorMessage, NULL,
193 JSMSG_STACK_UNDERFLOW,
194 ts->filename ? ts->filename : "stdin",
195 numBuf);
196 }
197 ndefs = cs->ndefs;
198 if (ndefs < 0) {
199 JSObject *blockObj;
200
201 /* We just executed IndexParsedObject */
202 JS_ASSERT(op == JSOP_ENTERBLOCK);
203 JS_ASSERT(nuses == 0);
204 blockObj = cg->objectList.lastbox->object;
205 JS_ASSERT(STOBJ_GET_CLASS(blockObj) == &js_BlockClass);
206 JS_ASSERT(JSVAL_IS_VOID(blockObj->fslots[JSSLOT_BLOCK_DEPTH]));
207
208 OBJ_SET_BLOCK_DEPTH(cx, blockObj, cg->stackDepth);
209 ndefs = OBJ_BLOCK_COUNT(cx, blockObj);
210 }
211 cg->stackDepth += ndefs;
212 if ((uintN)cg->stackDepth > cg->maxStackDepth)
213 cg->maxStackDepth = cg->stackDepth;
214 }
215
216 ptrdiff_t
217 js_Emit1(JSContext *cx, JSCodeGenerator *cg, JSOp op)
218 {
219 ptrdiff_t offset = EmitCheck(cx, cg, op, 1);
220
221 if (offset >= 0) {
222 *CG_NEXT(cg)++ = (jsbytecode)op;
223 UpdateDepth(cx, cg, offset);
224 }
225 return offset;
226 }
227
228 ptrdiff_t
229 js_Emit2(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1)
230 {
231 ptrdiff_t offset = EmitCheck(cx, cg, op, 2);
232
233 if (offset >= 0) {
234 jsbytecode *next = CG_NEXT(cg);
235 next[0] = (jsbytecode)op;
236 next[1] = op1;
237 CG_NEXT(cg) = next + 2;
238 UpdateDepth(cx, cg, offset);
239 }
240 return offset;
241 }
242
243 ptrdiff_t
244 js_Emit3(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1,
245 jsbytecode op2)
246 {
247 ptrdiff_t offset = EmitCheck(cx, cg, op, 3);
248
249 if (offset >= 0) {
250 jsbytecode *next = CG_NEXT(cg);
251 next[0] = (jsbytecode)op;
252 next[1] = op1;
253 next[2] = op2;
254 CG_NEXT(cg) = next + 3;
255 UpdateDepth(cx, cg, offset);
256 }
257 return offset;
258 }
259
260 ptrdiff_t
261 js_EmitN(JSContext *cx, JSCodeGenerator *cg, JSOp op, size_t extra)
262 {
263 ptrdiff_t length = 1 + (ptrdiff_t)extra;
264 ptrdiff_t offset = EmitCheck(cx, cg, op, length);
265
266 if (offset >= 0) {
267 jsbytecode *next = CG_NEXT(cg);
268 *next = (jsbytecode)op;
269 memset(next + 1, 0, BYTECODE_SIZE(extra));
270 CG_NEXT(cg) = next + length;
271
272 /*
273 * Don't UpdateDepth if op's use-count comes from the immediate
274 * operand yet to be stored in the extra bytes after op.
275 */
276 if (js_CodeSpec[op].nuses >= 0)
277 UpdateDepth(cx, cg, offset);
278 }
279 return offset;
280 }
281
282 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
283 const char js_with_statement_str[] = "with statement";
284 const char js_finally_block_str[] = "finally block";
285 const char js_script_str[] = "script";
286
287 static const char *statementName[] = {
288 "label statement", /* LABEL */
289 "if statement", /* IF */
290 "else statement", /* ELSE */
291 "destructuring body", /* BODY */
292 "switch statement", /* SWITCH */
293 "block", /* BLOCK */
294 js_with_statement_str, /* WITH */
295 "catch block", /* CATCH */
296 "try block", /* TRY */
297 js_finally_block_str, /* FINALLY */
298 js_finally_block_str, /* SUBROUTINE */
299 "do loop", /* DO_LOOP */
300 "for loop", /* FOR_LOOP */
301 "for/in loop", /* FOR_IN_LOOP */
302 "while loop", /* WHILE_LOOP */
303 };
304
305 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT);
306
307 static const char *
308 StatementName(JSCodeGenerator *cg)
309 {
310 if (!cg->topStmt)
311 return js_script_str;
312 return statementName[cg->topStmt->type];
313 }
314
315 static void
316 ReportStatementTooLarge(JSContext *cx, JSCodeGenerator *cg)
317 {
318 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET,
319 StatementName(cg));
320 }
321
322 /**
323 Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
324 and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
325 into unconditional (gotos and gosubs), and conditional jumps or branches
326 (which pop a value, test it, and jump depending on its value). Most jumps
327 have just one immediate operand, a signed offset from the jump opcode's pc
328 to the target bytecode. The lookup and table switch opcodes may contain
329 many jump offsets.
330
331 Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
332 fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
333 suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
334 the extended form of the JSOP_OR branch opcode). The unextended or short
335 formats have 16-bit signed immediate offset operands, the extended or long
336 formats have 32-bit signed immediates. The span-dependency problem consists
337 of selecting as few long instructions as possible, or about as few -- since
338 jumps can span other jumps, extending one jump may cause another to need to
339 be extended.
340
341 Most JS scripts are short, so need no extended jumps. We optimize for this
342 case by generating short jumps until we know a long jump is needed. After
343 that point, we keep generating short jumps, but each jump's 16-bit immediate
344 offset operand is actually an unsigned index into cg->spanDeps, an array of
345 JSSpanDep structs. Each struct tells the top offset in the script of the
346 opcode, the "before" offset of the jump (which will be the same as top for
347 simplex jumps, but which will index further into the bytecode array for a
348 non-initial jump offset in a lookup or table switch), the after "offset"
349 adjusted during span-dependent instruction selection (initially the same
350 value as the "before" offset), and the jump target (more below).
351
352 Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
353 ensure that all bytecode generated so far can be inspected to discover where
354 the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
355 that we generate span-dependency records sorted by their offsets, so we can
356 binary-search when trying to find a JSSpanDep for a given bytecode offset,
357 or the nearest JSSpanDep at or above a given pc.
358
359 To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
360 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
361 tells us that we need to binary-search for the cg->spanDeps entry by the
362 jump opcode's bytecode offset (sd->before).
363
364 Jump targets need to be maintained in a data structure that lets us look
365 up an already-known target by its address (jumps may have a common target),
366 and that also lets us update the addresses (script-relative, a.k.a. absolute
367 offsets) of targets that come after a jump target (for when a jump below
368 that target needs to be extended). We use an AVL tree, implemented using
369 recursion, but with some tricky optimizations to its height-balancing code
370 (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
371
372 A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
373 positive sign, even though they link "backward" (i.e., toward lower bytecode
374 address). We don't want to waste space and search time in the AVL tree for
375 such temporary backpatch deltas, so we use a single-bit wildcard scheme to
376 tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
377 in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
378 target, or is still awaiting backpatching.
379
380 Note that backpatch chains would present a problem for BuildSpanDepTable,
381 which inspects bytecode to build cg->spanDeps on demand, when the first
382 short jump offset overflows. To solve this temporary problem, we emit a
383 proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
384 nuses/ndefs counts help keep the stack balanced, but whose opcode format
385 distinguishes its backpatch delta immediate operand from a normal jump
386 offset.
387 */
388 static int
389 BalanceJumpTargets(JSJumpTarget **jtp)
390 {
391 JSJumpTarget *jt, *jt2, *root;
392 int dir, otherDir, heightChanged;
393 JSBool doubleRotate;
394
395 jt = *jtp;
396 JS_ASSERT(jt->balance != 0);
397
398 if (jt->balance < -1) {
399 dir = JT_RIGHT;
400 doubleRotate = (jt->kids[JT_LEFT]->balance > 0);
401 } else if (jt->balance > 1) {
402 dir = JT_LEFT;
403 doubleRotate = (jt->kids[JT_RIGHT]->balance < 0);
404 } else {
405 return 0;
406 }
407
408 otherDir = JT_OTHER_DIR(dir);
409 if (doubleRotate) {
410 jt2 = jt->kids[otherDir];
411 *jtp = root = jt2->kids[dir];
412
413 jt->kids[otherDir] = root->kids[dir];
414 root->kids[dir] = jt;
415
416 jt2->kids[dir] = root->kids[otherDir];
417 root->kids[otherDir] = jt2;
418
419 heightChanged = 1;
420 root->kids[JT_LEFT]->balance = -JS_MAX(root->balance, 0);
421 root->kids[JT_RIGHT]->balance = -JS_MIN(root->balance, 0);
422 root->balance = 0;
423 } else {
424 *jtp = root = jt->kids[otherDir];
425 jt->kids[otherDir] = root->kids[dir];
426 root->kids[dir] = jt;
427
428 heightChanged = (root->balance != 0);
429 jt->balance = -((dir == JT_LEFT) ? --root->balance : ++root->balance);
430 }
431
432 return heightChanged;
433 }
434
435 typedef struct AddJumpTargetArgs {
436 JSContext *cx;
437 JSCodeGenerator *cg;
438 ptrdiff_t offset;
439 JSJumpTarget *node;
440 } AddJumpTargetArgs;
441
442 static int
443 AddJumpTarget(AddJumpTargetArgs *args, JSJumpTarget **jtp)
444 {
445 JSJumpTarget *jt;
446 int balanceDelta;
447
448 jt = *jtp;
449 if (!jt) {
450 JSCodeGenerator *cg = args->cg;
451
452 jt = cg->jtFreeList;
453 if (jt) {
454 cg->jtFreeList = jt->kids[JT_LEFT];
455 } else {
456 JS_ARENA_ALLOCATE_CAST(jt, JSJumpTarget *, &args->cx->tempPool,
457 sizeof *jt);
458 if (!jt) {
459 js_ReportOutOfScriptQuota(args->cx);
460 return 0;
461 }
462 }
463 jt->offset = args->offset;
464 jt->balance = 0;
465 jt->kids[JT_LEFT] = jt->kids[JT_RIGHT] = NULL;
466 cg->numJumpTargets++;
467 args->node = jt;
468 *jtp = jt;
469 return 1;
470 }
471
472 if (jt->offset == args->offset) {
473 args->node = jt;
474 return 0;
475 }
476
477 if (args->offset < jt->offset)
478 balanceDelta = -AddJumpTarget(args, &jt->kids[JT_LEFT]);
479 else
480 balanceDelta = AddJumpTarget(args, &jt->kids[JT_RIGHT]);
481 if (!args->node)
482 return 0;
483
484 jt->balance += balanceDelta;
485 return (balanceDelta && jt->balance)
486 ? 1 - BalanceJumpTargets(jtp)
487 : 0;
488 }
489
490 #ifdef DEBUG_brendan
491 static int AVLCheck(JSJumpTarget *jt)
492 {
493 int lh, rh;
494
495 if (!jt) return 0;
496 JS_ASSERT(-1 <= jt->balance && jt->balance <= 1);
497 lh = AVLCheck(jt->kids[JT_LEFT]);
498 rh = AVLCheck(jt->kids[JT_RIGHT]);
499 JS_ASSERT(jt->balance == rh - lh);
500 return 1 + JS_MAX(lh, rh);
501 }
502 #endif
503
504 static JSBool
505 SetSpanDepTarget(JSContext *cx, JSCodeGenerator *cg, JSSpanDep *sd,
506 ptrdiff_t off)
507 {
508 AddJumpTargetArgs args;
509
510 if (off < JUMPX_OFFSET_MIN || JUMPX_OFFSET_MAX < off) {
511 ReportStatementTooLarge(cx, cg);
512 return JS_FALSE;
513 }
514
515 args.cx = cx;
516 args.cg = cg;
517 args.offset = sd->top + off;
518 args.node = NULL;
519 AddJumpTarget(&args, &cg->jumpTargets);
520 if (!args.node)
521 return JS_FALSE;
522
523 #ifdef DEBUG_brendan
524 AVLCheck(cg->jumpTargets);
525 #endif
526
527 SD_SET_TARGET(sd, args.node);
528 return JS_TRUE;
529 }
530
531 #define SPANDEPS_MIN 256
532 #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
533 #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
534
535 static JSBool
536 AddSpanDep(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, jsbytecode *pc2,
537 ptrdiff_t off)
538 {
539 uintN index;
540 JSSpanDep *sdbase, *sd;
541 size_t size;
542
543 index = cg->numSpanDeps;
544 if (index + 1 == 0) {
545 ReportStatementTooLarge(cx, cg);
546 return JS_FALSE;
547 }
548
549 if ((index & (index - 1)) == 0 &&
550 (!(sdbase = cg->spanDeps) || index >= SPANDEPS_MIN)) {
551 size = sdbase ? SPANDEPS_SIZE(index) : SPANDEPS_SIZE_MIN / 2;
552 sdbase = (JSSpanDep *) JS_realloc(cx, sdbase, size + size);
553 if (!sdbase)
554 return JS_FALSE;
555 cg->spanDeps = sdbase;
556 }
557
558 cg->numSpanDeps = index + 1;
559 sd = cg->spanDeps + index;
560 sd->top = PTRDIFF(pc, CG_BASE(cg), jsbytecode);
561 sd->offset = sd->before = PTRDIFF(pc2, CG_BASE(cg), jsbytecode);
562
563 if (js_CodeSpec[*pc].format & JOF_BACKPATCH) {
564 /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
565 if (off != 0) {
566 JS_ASSERT(off >= 1 + JUMP_OFFSET_LEN);
567 if (off > BPDELTA_MAX) {
568 ReportStatementTooLarge(cx, cg);
569 return JS_FALSE;
570 }
571 }
572 SD_SET_BPDELTA(sd, off);
573 } else if (off == 0) {
574 /* Jump offset will be patched directly, without backpatch chaining. */
575 SD_SET_TARGET(sd, 0);
576 } else {
577 /* The jump offset in off is non-zero, therefore it's already known. */
578 if (!SetSpanDepTarget(cx, cg, sd, off))
579 return JS_FALSE;
580 }
581
582 if (index > SPANDEP_INDEX_MAX)
583 index = SPANDEP_INDEX_HUGE;
584 SET_SPANDEP_INDEX(pc2, index);
585 return JS_TRUE;
586 }
587
588 static jsbytecode *
589 AddSwitchSpanDeps(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc)
590 {
591 JSOp op;
592 jsbytecode *pc2;
593 ptrdiff_t off;
594 jsint low, high;
595 uintN njumps, indexlen;
596
597 op = (JSOp) *pc;
598 JS_ASSERT(op == JSOP_TABLESWITCH || op == JSOP_LOOKUPSWITCH);
599 pc2 = pc;
600 off = GET_JUMP_OFFSET(pc2);
601 if (!AddSpanDep(cx, cg, pc, pc2, off))
602 return NULL;
603 pc2 += JUMP_OFFSET_LEN;
604 if (op == JSOP_TABLESWITCH) {
605 low = GET_JUMP_OFFSET(pc2);
606 pc2 += JUMP_OFFSET_LEN;
607 high = GET_JUMP_OFFSET(pc2);
608 pc2 += JUMP_OFFSET_LEN;
609 njumps = (uintN) (high - low + 1);
610 indexlen = 0;
611 } else {
612 njumps = GET_UINT16(pc2);
613 pc2 += UINT16_LEN;
614 indexlen = INDEX_LEN;
615 }
616 while (njumps) {
617 --njumps;
618 pc2 += indexlen;
619 off = GET_JUMP_OFFSET(pc2);
620 if (!AddSpanDep(cx, cg, pc, pc2, off))
621 return NULL;
622 pc2 += JUMP_OFFSET_LEN;
623 }
624 return 1 + pc2;
625 }
626
627 static JSBool
628 BuildSpanDepTable(JSContext *cx, JSCodeGenerator *cg)
629 {
630 jsbytecode *pc, *end;
631 JSOp op;
632 const JSCodeSpec *cs;
633 ptrdiff_t off;
634
635 pc = CG_BASE(cg) + cg->spanDepTodo;
636 end = CG_NEXT(cg);
637 while (pc != end) {
638 JS_ASSERT(pc < end);
639 op = (JSOp)*pc;
640 cs = &js_CodeSpec[op];
641
642 switch (JOF_TYPE(cs->format)) {
643 case JOF_TABLESWITCH:
644 case JOF_LOOKUPSWITCH:
645 pc = AddSwitchSpanDeps(cx, cg, pc);
646 if (!pc)
647 return JS_FALSE;
648 break;
649
650 case JOF_JUMP:
651 off = GET_JUMP_OFFSET(pc);
652 if (!AddSpanDep(cx, cg, pc, pc, off))
653 return JS_FALSE;
654 /* FALL THROUGH */
655 default:
656 pc += cs->length;
657 break;
658 }
659 }
660
661 return JS_TRUE;
662 }
663
664 static JSSpanDep *
665 GetSpanDep(JSCodeGenerator *cg, jsbytecode *pc)
666 {
667 uintN index;
668 ptrdiff_t offset;
669 int lo, hi, mid;
670 JSSpanDep *sd;
671
672 index = GET_SPANDEP_INDEX(pc);
673 if (index != SPANDEP_INDEX_HUGE)
674 return cg->spanDeps + index;
675
676 offset = PTRDIFF(pc, CG_BASE(cg), jsbytecode);
677 lo = 0;
678 hi = cg->numSpanDeps - 1;
679 while (lo <= hi) {
680 mid = (lo + hi) / 2;
681 sd = cg->spanDeps + mid;
682 if (sd->before == offset)
683 return sd;
684 if (sd->before < offset)
685 lo = mid + 1;
686 else
687 hi = mid - 1;
688 }
689
690 JS_ASSERT(0);
691 return NULL;
692 }
693
694 static JSBool
695 SetBackPatchDelta(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
696 ptrdiff_t delta)
697 {
698 JSSpanDep *sd;
699
700 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
701 if (!cg->spanDeps && delta < JUMP_OFFSET_MAX) {
702 SET_JUMP_OFFSET(pc, delta);
703 return JS_TRUE;
704 }
705
706 if (delta > BPDELTA_MAX) {
707 ReportStatementTooLarge(cx, cg);
708 return JS_FALSE;
709 }
710
711 if (!cg->spanDeps && !BuildSpanDepTable(cx, cg))
712 return JS_FALSE;
713
714 sd = GetSpanDep(cg, pc);
715 JS_ASSERT(SD_GET_BPDELTA(sd) == 0);
716 SD_SET_BPDELTA(sd, delta);
717 return JS_TRUE;
718 }
719
720 static void
721 UpdateJumpTargets(JSJumpTarget *jt, ptrdiff_t pivot, ptrdiff_t delta)
722 {
723 if (jt->offset > pivot) {
724 jt->offset += delta;
725 if (jt->kids[JT_LEFT])
726 UpdateJumpTargets(jt->kids[JT_LEFT], pivot, delta);
727 }
728 if (jt->kids[JT_RIGHT])
729 UpdateJumpTargets(jt->kids[JT_RIGHT], pivot, delta);
730 }
731
732 static JSSpanDep *
733 FindNearestSpanDep(JSCodeGenerator *cg, ptrdiff_t offset, int lo,
734 JSSpanDep *guard)
735 {
736 int num, hi, mid;
737 JSSpanDep *sdbase, *sd;
738
739 num = cg->numSpanDeps;
740 JS_ASSERT(num > 0);
741 hi = num - 1;
742 sdbase = cg->spanDeps;
743 while (lo <= hi) {
744 mid = (lo + hi) / 2;
745 sd = sdbase + mid;
746 if (sd->before == offset)
747 return sd;
748 if (sd->before < offset)
749 lo = mid + 1;
750 else
751 hi = mid - 1;
752 }
753 if (lo == num)
754 return guard;
755 sd = sdbase + lo;
756 JS_ASSERT(sd->before >= offset && (lo == 0 || sd[-1].before < offset));
757 return sd;
758 }
759
760 static void
761 FreeJumpTargets(JSCodeGenerator *cg, JSJumpTarget *jt)
762 {
763 if (jt->kids[JT_LEFT])
764 FreeJumpTargets(cg, jt->kids[JT_LEFT]);
765 if (jt->kids[JT_RIGHT])
766 FreeJumpTargets(cg, jt->kids[JT_RIGHT]);
767 jt->kids[JT_LEFT] = cg->jtFreeList;
768 cg->jtFreeList = jt;
769 }
770
771 static JSBool
772 OptimizeSpanDeps(JSContext *cx, JSCodeGenerator *cg)
773 {
774 jsbytecode *pc, *oldpc, *base, *limit, *next;
775 JSSpanDep *sd, *sd2, *sdbase, *sdlimit, *sdtop, guard;
776 ptrdiff_t offset, growth, delta, top, pivot, span, length, target;
777 JSBool done;
778 JSOp op;
779 uint32 type;
780 size_t size, incr;
781 jssrcnote *sn, *snlimit;
782 JSSrcNoteSpec *spec;
783 uintN i, n, noteIndex;
784 JSTryNode *tryNode;
785 #ifdef DEBUG_brendan
786 int passes = 0;
787 #endif
788
789 base = CG_BASE(cg);
790 sdbase = cg->spanDeps;
791 sdlimit = sdbase + cg->numSpanDeps;
792 offset = CG_OFFSET(cg);
793 growth = 0;
794
795 do {
796 done = JS_TRUE;
797 delta = 0;
798 top = pivot = -1;
799 sdtop = NULL;
800 pc = NULL;
801 op = JSOP_NOP;
802 type = 0;
803 #ifdef DEBUG_brendan
804 passes++;
805 #endif
806
807 for (sd = sdbase; sd < sdlimit; sd++) {
808 JS_ASSERT(JT_HAS_TAG(sd->target));
809 sd->offset += delta;
810
811 if (sd->top != top) {
812 sdtop = sd;
813 top = sd->top;
814 JS_ASSERT(top == sd->before);
815 pivot = sd->offset;
816 pc = base + top;
817 op = (JSOp) *pc;
818 type = JOF_OPTYPE(op);
819 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
820 /*
821 * We already extended all the jump offset operands for
822 * the opcode at sd->top. Jumps and branches have only
823 * one jump offset operand, but switches have many, all
824 * of which are adjacent in cg->spanDeps.
825 */
826 continue;
827 }
828
829 JS_ASSERT(type == JOF_JUMP ||
830 type == JOF_TABLESWITCH ||
831 type == JOF_LOOKUPSWITCH);
832 }
833
834 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
835 span = SD_SPAN(sd, pivot);
836 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
837 ptrdiff_t deltaFromTop = 0;
838
839 done = JS_FALSE;
840
841 switch (op) {
842 case JSOP_GOTO: op = JSOP_GOTOX; break;
843 case JSOP_IFEQ: op = JSOP_IFEQX; break;
844 case JSOP_IFNE: op = JSOP_IFNEX; break;
845 case JSOP_OR: op = JSOP_ORX; break;
846 case JSOP_AND: op = JSOP_ANDX; break;
847 case JSOP_GOSUB: op = JSOP_GOSUBX; break;
848 case JSOP_CASE: op = JSOP_CASEX; break;
849 case JSOP_DEFAULT: op = JSOP_DEFAULTX; break;
850 case JSOP_TABLESWITCH: op = JSOP_TABLESWITCHX; break;
851 case JSOP_LOOKUPSWITCH: op = JSOP_LOOKUPSWITCHX; break;
852 default:
853 ReportStatementTooLarge(cx, cg);
854 return JS_FALSE;
855 }
856 *pc = (jsbytecode) op;
857
858 for (sd2 = sdtop; sd2 < sdlimit && sd2->top == top; sd2++) {
859 if (sd2 <= sd) {
860 /*
861 * sd2->offset already includes delta as it stood
862 * before we entered this loop, but it must also
863 * include the delta relative to top due to all the
864 * extended jump offset immediates for the opcode
865 * starting at top, which we extend in this loop.
866 *
867 * If there is only one extended jump offset, then
868 * sd2->offset won't change and this for loop will
869 * iterate once only.
870 */
871 sd2->offset += deltaFromTop;
872 deltaFromTop += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
873 } else {
874 /*
875 * sd2 comes after sd, and won't be revisited by
876 * the outer for loop, so we have to increase its
877 * offset by delta, not merely by deltaFromTop.
878 */
879 sd2->offset += delta;
880 }
881
882 delta += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
883 UpdateJumpTargets(cg->jumpTargets, sd2->offset,
884 JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
885 }
886 sd = sd2 - 1;
887 }
888 }
889 }
890
891 growth += delta;
892 } while (!done);
893
894 if (growth) {
895 #ifdef DEBUG_brendan
896 JSTokenStream *ts = &cg->compiler->tokenStream;
897
898 printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
899 ts->filename ? ts->filename : "stdin", cg->firstLine,
900 growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), cg->numSpanDeps,
901 passes, offset + growth, offset, growth);
902 #endif
903
904 /*
905 * Ensure that we have room for the extended jumps, but don't round up
906 * to a power of two -- we're done generating code, so we cut to fit.
907 */
908 limit = CG_LIMIT(cg);
909 length = offset + growth;
910 next = base + length;
911 if (next > limit) {
912 JS_ASSERT(length > BYTECODE_CHUNK);
913 size = BYTECODE_SIZE(PTRDIFF(limit, base, jsbytecode));
914 incr = BYTECODE_SIZE(length) - size;
915 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
916 if (!base) {
917 js_ReportOutOfScriptQuota(cx);
918 return JS_FALSE;
919 }
920 CG_BASE(cg) = base;
921 CG_LIMIT(cg) = next = base + length;
922 }
923 CG_NEXT(cg) = next;
924
925 /*
926 * Set up a fake span dependency record to guard the end of the code
927 * being generated. This guard record is returned as a fencepost by
928 * FindNearestSpanDep if there is no real spandep at or above a given
929 * unextended code offset.
930 */
931 guard.top = -1;
932 guard.offset = offset + growth;
933 guard.before = offset;
934 guard.target = NULL;
935 }
936
937 /*
938 * Now work backwards through the span dependencies, copying chunks of
939 * bytecode between each extended jump toward the end of the grown code
940 * space, and restoring immediate offset operands for all jump bytecodes.
941 * The first chunk of bytecodes, starting at base and ending at the first
942 * extended jump offset (NB: this chunk includes the operation bytecode
943 * just before that immediate jump offset), doesn't need to be copied.
944 */
945 JS_ASSERT(sd == sdlimit);
946 top = -1;
947 while (--sd >= sdbase) {
948 if (sd->top != top) {
949 top = sd->top;
950 op = (JSOp) base[top];
951 type = JOF_OPTYPE(op);
952
953 for (sd2 = sd - 1; sd2 >= sdbase && sd2->top == top; sd2--)
954 continue;
955 sd2++;
956 pivot = sd2->offset;
957 JS_ASSERT(top == sd2->before);
958 }
959
960 oldpc = base + sd->before;
961 span = SD_SPAN(sd, pivot);
962
963 /*
964 * If this jump didn't need to be extended, restore its span immediate
965 * offset operand now, overwriting the index of sd within cg->spanDeps
966 * that was stored temporarily after *pc when BuildSpanDepTable ran.
967 *
968 * Note that span might fit in 16 bits even for an extended jump op,
969 * if the op has multiple span operands, not all of which overflowed
970 * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
971 * range for a short jump, but others are not).
972 */
973 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
974 JS_ASSERT(JUMP_OFFSET_MIN <= span && span <= JUMP_OFFSET_MAX);
975 SET_JUMP_OFFSET(oldpc, span);
976 continue;
977 }
978
979 /*
980 * Set up parameters needed to copy the next run of bytecode starting
981 * at offset (which is a cursor into the unextended, original bytecode
982 * vector), down to sd->before (a cursor of the same scale as offset,
983 * it's the index of the original jump pc). Reuse delta to count the
984 * nominal number of bytes to copy.
985 */
986 pc = base + sd->offset;
987 delta = offset - sd->before;
988 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
989
990 /*
991 * Don't bother copying the jump offset we're about to reset, but do
992 * copy the bytecode at oldpc (which comes just before its immediate
993 * jump offset operand), on the next iteration through the loop, by
994 * including it in offset's new value.
995 */
996 offset = sd->before + 1;
997 size = BYTECODE_SIZE(delta - (1 + JUMP_OFFSET_LEN));
998 if (size) {
999 memmove(pc + 1 + JUMPX_OFFSET_LEN,
1000 oldpc + 1 + JUMP_OFFSET_LEN,
1001 size);
1002 }
1003
1004 SET_JUMPX_OFFSET(pc, span);
1005 }
1006
1007 if (growth) {
1008 /*
1009 * Fix source note deltas. Don't hardwire the delta fixup adjustment,
1010 * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
1011 * at each sd that moved. The future may bring different offset sizes
1012 * for span-dependent instruction operands. However, we fix only main
1013 * notes here, not prolog notes -- we know that prolog opcodes are not
1014 * span-dependent, and aren't likely ever to be.
1015 */
1016 offset = growth = 0;
1017 sd = sdbase;
1018 for (sn = cg->main.notes, snlimit = sn + cg->main.noteCount;
1019 sn < snlimit;
1020 sn = SN_NEXT(sn)) {
1021 /*
1022 * Recall that the offset of a given note includes its delta, and
1023 * tells the offset of the annotated bytecode from the main entry
1024 * point of the script.
1025 */
1026 offset += SN_DELTA(sn);
1027 while (sd < sdlimit && sd->before < offset) {
1028 /*
1029 * To compute the delta to add to sn, we need to look at the
1030 * spandep after sd, whose offset - (before + growth) tells by
1031 * how many bytes sd's instruction grew.
1032 */
1033 sd2 = sd + 1;
1034 if (sd2 == sdlimit)
1035 sd2 = &guard;
1036 delta = sd2->offset - (sd2->before + growth);
1037 if (delta > 0) {
1038 JS_ASSERT(delta == JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
1039 sn = js_AddToSrcNoteDelta(cx, cg, sn, delta);
1040 if (!sn)
1041 return JS_FALSE;
1042 snlimit = cg->main.notes + cg->main.noteCount;
1043 growth += delta;
1044 }
1045 sd++;
1046 }
1047
1048 /*
1049 * If sn has span-dependent offset operands, check whether each
1050 * covers further span-dependencies, and increase those operands
1051 * accordingly. Some source notes measure offset not from the
1052 * annotated pc, but from that pc plus some small bias. NB: we
1053 * assume that spec->offsetBias can't itself span span-dependent
1054 * instructions!
1055 */
1056 spec = &js_SrcNoteSpec[SN_TYPE(sn)];
1057 if (spec->isSpanDep) {
1058 pivot = offset + spec->offsetBias;
1059 n = spec->arity;
1060 for (i = 0; i < n; i++) {
1061 span = js_GetSrcNoteOffset(sn, i);
1062 if (span == 0)
1063 continue;
1064 target = pivot + span * spec->isSpanDep;
1065 sd2 = FindNearestSpanDep(cg, target,
1066 (target >= pivot)
1067 ? sd - sdbase
1068 : 0,
1069 &guard);
1070
1071 /*
1072 * Increase target by sd2's before-vs-after offset delta,
1073 * which is absolute (i.e., relative to start of script,
1074 * as is target). Recompute the span by subtracting its
1075 * adjusted pivot from target.
1076 */
1077 target += sd2->offset - sd2->before;
1078 span = target - (pivot + growth);
1079 span *= spec->isSpanDep;
1080 noteIndex = sn - cg->main.notes;
1081 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, i, span))
1082 return JS_FALSE;
1083 sn = cg->main.notes + noteIndex;
1084 snlimit = cg->main.notes + cg->main.noteCount;
1085 }
1086 }
1087 }
1088 cg->main.lastNoteOffset += growth;
1089
1090 /*
1091 * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1092 * not clear how we can beat that).
1093 */
1094 for (tryNode = cg->lastTryNode; tryNode; tryNode = tryNode->prev) {
1095 /*
1096 * First, look for the nearest span dependency at/above tn->start.
1097 * There may not be any such spandep, in which case the guard will
1098 * be returned.
1099 */
1100 offset = tryNode->note.start;
1101 sd = FindNearestSpanDep(cg, offset, 0, &guard);
1102 delta = sd->offset - sd->before;
1103 tryNode->note.start = offset + delta;
1104
1105 /*
1106 * Next, find the nearest spandep at/above tn->start + tn->length.
1107 * Use its delta minus tn->start's delta to increase tn->length.
1108 */
1109 length = tryNode->note.length;
1110 sd2 = FindNearestSpanDep(cg, offset + length, sd - sdbase, &guard);
1111 if (sd2 != sd) {
1112 tryNode->note.length =
1113 length + sd2->offset - sd2->before - delta;
1114 }
1115 }
1116 }
1117
1118 #ifdef DEBUG_brendan
1119 {
1120 uintN bigspans = 0;
1121 top = -1;
1122 for (sd = sdbase; sd < sdlimit; sd++) {
1123 offset = sd->offset;
1124
1125 /* NB: sd->top cursors into the original, unextended bytecode vector. */
1126 if (sd->top != top) {
1127 JS_ASSERT(top == -1 ||
1128 !JOF_TYPE_IS_EXTENDED_JUMP(type) ||
1129 bigspans != 0);
1130 bigspans = 0;
1131 top = sd->top;
1132 JS_ASSERT(top == sd->before);
1133 op = (JSOp) base[offset];
1134 type = JOF_OPTYPE(op);
1135 JS_ASSERT(type == JOF_JUMP ||
1136 type == JOF_JUMPX ||
1137 type == JOF_TABLESWITCH ||
1138 type == JOF_TABLESWITCHX ||
1139 type == JOF_LOOKUPSWITCH ||
1140 type == JOF_LOOKUPSWITCHX);
1141 pivot = offset;
1142 }
1143
1144 pc = base + offset;
1145 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
1146 span = GET_JUMPX_OFFSET(pc);
1147 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
1148 bigspans++;
1149 } else {
1150 JS_ASSERT(type == JOF_TABLESWITCHX ||
1151 type == JOF_LOOKUPSWITCHX);
1152 }
1153 } else {
1154 span = GET_JUMP_OFFSET(pc);
1155 }
1156 JS_ASSERT(SD_SPAN(sd, pivot) == span);
1157 }
1158 JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type) || bigspans != 0);
1159 }
1160 #endif
1161
1162 /*
1163 * Reset so we optimize at most once -- cg may be used for further code
1164 * generation of successive, independent, top-level statements. No jump
1165 * can span top-level statements, because JS lacks goto.
1166 */
1167 size = SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg->numSpanDeps)));
1168 JS_free(cx, cg->spanDeps);
1169 cg->spanDeps = NULL;
1170 FreeJumpTargets(cg, cg->jumpTargets);
1171 cg->jumpTargets = NULL;
1172 cg->numSpanDeps = cg->numJumpTargets = 0;
1173 cg->spanDepTodo = CG_OFFSET(cg);
1174 return JS_TRUE;
1175 }
1176
1177 static ptrdiff_t
1178 EmitJump(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t off)
1179 {
1180 JSBool extend;
1181 ptrdiff_t jmp;
1182 jsbytecode *pc;
1183
1184 extend = off < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < off;
1185 if (extend && !cg->spanDeps && !BuildSpanDepTable(cx, cg))
1186 return -1;
1187
1188 jmp = js_Emit3(cx, cg, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off));
1189 if (jmp >= 0 && (extend || cg->spanDeps)) {
1190 pc = CG_CODE(cg, jmp);
1191 if (!AddSpanDep(cx, cg, pc, pc, off))
1192 return -1;
1193 }
1194 return jmp;
1195 }
1196
1197 static ptrdiff_t
1198 GetJumpOffset(JSCodeGenerator *cg, jsbytecode *pc)
1199 {
1200 JSSpanDep *sd;
1201 JSJumpTarget *jt;
1202 ptrdiff_t top;
1203
1204 if (!cg->spanDeps)
1205 return GET_JUMP_OFFSET(pc);
1206
1207 sd = GetSpanDep(cg, pc);
1208 jt = sd->target;
1209 if (!JT_HAS_TAG(jt))
1210 return JT_TO_BPDELTA(jt);
1211
1212 top = sd->top;
1213 while (--sd >= cg->spanDeps && sd->top == top)
1214 continue;
1215 sd++;
1216 return JT_CLR_TAG(jt)->offset - sd->offset;
1217 }
1218
1219 JSBool
1220 js_SetJumpOffset(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
1221 ptrdiff_t off)
1222 {
1223 if (!cg->spanDeps) {
1224 if (JUMP_OFFSET_MIN <= off && off <= JUMP_OFFSET_MAX) {
1225 SET_JUMP_OFFSET(pc, off);
1226 return JS_TRUE;
1227 }
1228
1229 if (!BuildSpanDepTable(cx, cg))
1230 return JS_FALSE;
1231 }
1232
1233 return SetSpanDepTarget(cx, cg, GetSpanDep(cg, pc), off);
1234 }
1235
1236 bool
1237 JSTreeContext::inStatement(JSStmtType type)
1238 {
1239 for (JSStmtInfo *stmt = topStmt; stmt; stmt = stmt->down) {
1240 if (stmt->type == type)
1241 return true;
1242 }
1243 return false;
1244 }
1245
1246 void
1247 js_PushStatement(JSTreeContext *tc, JSStmtInfo *stmt, JSStmtType type,
1248 ptrdiff_t top)
1249 {
1250 stmt->type = type;
1251 stmt->flags = 0;
1252 stmt->blockid = tc->blockid();
1253 SET_STATEMENT_TOP(stmt, top);
1254 stmt->label = NULL;
1255 JS_ASSERT(!stmt->blockObj);
1256 stmt->down = tc->topStmt;
1257 tc->topStmt = stmt;
1258 if (STMT_LINKS_SCOPE(stmt)) {
1259 stmt->downScope = tc->topScopeStmt;
1260 tc->topScopeStmt = stmt;
1261 } else {
1262 stmt->downScope = NULL;
1263 }
1264 }
1265
1266 void
1267 js_PushBlockScope(JSTreeContext *tc, JSStmtInfo *stmt, JSObject *blockObj,
1268 ptrdiff_t top)
1269 {
1270 js_PushStatement(tc, stmt, STMT_BLOCK, top);
1271 stmt->flags |= SIF_SCOPE;
1272 STOBJ_SET_PARENT(blockObj, tc->blockChain);
1273 stmt->downScope = tc->topScopeStmt;
1274 tc->topScopeStmt = stmt;
1275 tc->blockChain = blockObj;
1276 stmt->blockObj = blockObj;
1277 }
1278
1279 /*
1280 * Emit a backpatch op with offset pointing to the previous jump of this type,
1281 * so that we can walk back up the chain fixing up the op and jump offset.
1282 */
1283 static ptrdiff_t
1284 EmitBackPatchOp(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t *lastp)
1285 {
1286 ptrdiff_t offset, delta;
1287
1288 offset = CG_OFFSET(cg);
1289 delta = offset - *lastp;
1290 *lastp = offset;
1291 JS_ASSERT(delta > 0);
1292 return EmitJump(cx, cg, op, delta);
1293 }
1294
1295 /*
1296 * Macro to emit a bytecode followed by a uint16 immediate operand stored in
1297 * big-endian order, used for arg and var numbers as well as for atomIndexes.
1298 * NB: We use cx and cg from our caller's lexical environment, and return
1299 * false on error.
1300 */
1301 #define EMIT_UINT16_IMM_OP(op, i) \
1302 JS_BEGIN_MACRO \
1303 if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
1304 return JS_FALSE; \
1305 JS_END_MACRO
1306
1307 static JSBool
1308 FlushPops(JSContext *cx, JSCodeGenerator *cg, intN *npops)
1309 {
1310 JS_ASSERT(*npops != 0);
1311 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1312 return JS_FALSE;
1313 EMIT_UINT16_IMM_OP(JSOP_POPN, *npops);
1314 *npops = 0;
1315 return JS_TRUE;
1316 }
1317
1318 /*
1319 * Emit additional bytecode(s) for non-local jumps.
1320 */
1321 static JSBool
1322 EmitNonLocalJumpFixup(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt)
1323 {
1324 intN depth, npops;
1325 JSStmtInfo *stmt;
1326
1327 /*
1328 * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1329 * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1330 * end of a with statement, so we save cg->stackDepth here and restore it
1331 * just before a successful return.
1332 */
1333 depth = cg->stackDepth;
1334 npops = 0;
1335
1336 #define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE
1337
1338 for (stmt = cg->topStmt; stmt != toStmt; stmt = stmt->down) {
1339 switch (stmt->type) {
1340 case STMT_FINALLY:
1341 FLUSH_POPS();
1342 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1343 return JS_FALSE;
1344 if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(*stmt)) < 0)
1345 return JS_FALSE;
1346 break;
1347
1348 case STMT_WITH:
1349 /* There's a With object on the stack that we need to pop. */
1350 FLUSH_POPS();
1351 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1352 return JS_FALSE;
1353 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
1354 return JS_FALSE;
1355 break;
1356
1357 case STMT_FOR_IN_LOOP:
1358 /*
1359 * The iterator and the object being iterated need to be popped.
1360 */
1361 FLUSH_POPS();
1362 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1363 return JS_FALSE;
1364 if (js_Emit1(cx, cg, JSOP_ENDITER) < 0)
1365 return JS_FALSE;
1366 break;
1367
1368 case STMT_SUBROUTINE:
1369 /*
1370 * There's a [exception or hole, retsub pc-index] pair on the
1371 * stack that we need to pop.
1372 */
1373 npops += 2;
1374 break;
1375
1376 default:;
1377 }
1378
1379 if (stmt->flags & SIF_SCOPE) {
1380 uintN i;
1381
1382 /* There is a Block object with locals on the stack to pop. */
1383 FLUSH_POPS();
1384 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1385 return JS_FALSE;
1386 i = OBJ_BLOCK_COUNT(cx, stmt->blockObj);
1387 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, i);
1388 }
1389 }
1390
1391 FLUSH_POPS();
1392 cg->stackDepth = depth;
1393 return JS_TRUE;
1394
1395 #undef FLUSH_POPS
1396 }
1397
1398 static ptrdiff_t
1399 EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
1400 ptrdiff_t *lastp, JSAtomListElement *label, JSSrcNoteType noteType)
1401 {
1402 intN index;
1403
1404 if (!EmitNonLocalJumpFixup(cx, cg, toStmt))
1405 return -1;
1406
1407 if (label)
1408 index = js_NewSrcNote2(cx, cg, noteType, (ptrdiff_t) ALE_INDEX(label));
1409 else if (noteType != SRC_NULL)
1410 index = js_NewSrcNote(cx, cg, noteType);
1411 else
1412 index = 0;
1413 if (index < 0)
1414 return -1;
1415
1416 return EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, lastp);
1417 }
1418
1419 static JSBool
1420 BackPatch(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t last,
1421 jsbytecode *target, jsbytecode op)
1422 {
1423 jsbytecode *pc, *stop;
1424 ptrdiff_t delta, span;
1425
1426 pc = CG_CODE(cg, last);
1427 stop = CG_CODE(cg, -1);
1428 while (pc != stop) {
1429 delta = GetJumpOffset(cg, pc);
1430 span = PTRDIFF(target, pc, jsbytecode);
1431 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, span);
1432
1433 /*
1434 * Set *pc after jump offset in case bpdelta didn't overflow, but span
1435 * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1436 * and need to see the JSOP_BACKPATCH* op at *pc).
1437 */
1438 *pc = op;
1439 pc -= delta;
1440 }
1441 return JS_TRUE;
1442 }
1443
1444 void
1445 js_PopStatement(JSTreeContext *tc)
1446 {
1447 JSStmtInfo *stmt;
1448
1449 stmt = tc->topStmt;
1450 tc->topStmt = stmt->down;
1451 if (STMT_LINKS_SCOPE(stmt)) {
1452 tc->topScopeStmt = stmt->downScope;
1453 if (stmt->flags & SIF_SCOPE) {
1454 tc->blockChain = STOBJ_GET_PARENT(stmt->blockObj);
1455 JS_SCOPE_DEPTH_METERING(--tc->scopeDepth);
1456 }
1457 }
1458 }
1459
1460 JSBool
1461 js_PopStatementCG(JSContext *cx, JSCodeGenerator *cg)
1462 {
1463 JSStmtInfo *stmt;
1464
1465 stmt = cg->topStmt;
1466 if (!STMT_IS_TRYING(stmt) &&
1467 (!BackPatch(cx, cg, stmt->breaks, CG_NEXT(cg), JSOP_GOTO) ||
1468 !BackPatch(cx, cg, stmt->continues, CG_CODE(cg, stmt->update),
1469 JSOP_GOTO))) {
1470 return JS_FALSE;
1471 }
1472 js_PopStatement(cg);
1473 return JS_TRUE;
1474 }
1475
1476 JSBool
1477 js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1478 JSParseNode *pn)
1479 {
1480 jsdouble dval;
1481 jsint ival;
1482 JSAtom *valueAtom;
1483 jsval v;
1484 JSAtomListElement *ale;
1485
1486 /* XXX just do numbers for now */
1487 if (pn->pn_type == TOK_NUMBER) {
1488 dval = pn->pn_dval;
1489 if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
1490 v = INT_TO_JSVAL(ival);
1491 } else {
1492 /*
1493 * We atomize double to root a jsdouble instance that we wrap as
1494 * jsval and store in cg->constList. This works because atoms are
1495 * protected from GC during compilation.
1496 */
1497 valueAtom = js_AtomizeDouble(cx, dval);
1498 if (!valueAtom)
1499 return JS_FALSE;
1500 v = ATOM_KEY(valueAtom);
1501 }
1502 ale = cg->constList.add(cg->compiler, atom);
1503 if (!ale)
1504 return JS_FALSE;
1505 ALE_SET_VALUE(ale, v);
1506 }
1507 return JS_TRUE;
1508 }
1509
1510 JSStmtInfo *
1511 js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSStmtInfo *stmt)
1512 {
1513 JSObject *obj;
1514 JSScope *scope;
1515 JSScopeProperty *sprop;
1516
1517 if (!stmt)
1518 stmt = tc->topScopeStmt;
1519 for (; stmt; stmt = stmt->downScope) {
1520 if (stmt->type == STMT_WITH)
1521 break;
1522
1523 /* Skip "maybe scope" statements that don't contain let bindings. */
1524 if (!(stmt->flags & SIF_SCOPE))
1525 continue;
1526
1527 obj = stmt->blockObj;
1528 JS_ASSERT(LOCKED_OBJ_GET_CLASS(obj) == &js_BlockClass);
1529 scope = OBJ_SCOPE(obj);
1530 sprop = SCOPE_GET_PROPERTY(scope, ATOM_TO_JSID(atom));
1531 if (sprop) {
1532 JS_ASSERT(sprop->flags & SPROP_HAS_SHORTID);
1533
1534 if (slotp) {
1535 JS_ASSERT(JSVAL_IS_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]));
1536 *slotp = JSVAL_TO_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]) +
1537 sprop->shortid;
1538 }
1539 return stmt;
1540 }
1541 }
1542
1543 if (slotp)
1544 *slotp = -1;
1545 return stmt;
1546 }
1547
1548 /*
1549 * Check if the attributes describe a property holding a compile-time constant
1550 * or a permanent, read-only property without a getter.
1551 */
1552 #define IS_CONSTANT_PROPERTY(attrs) \
1553 (((attrs) & (JSPROP_READONLY | JSPROP_PERMANENT | JSPROP_GETTER)) == \
1554 (JSPROP_READONLY | JSPROP_PERMANENT))
1555
1556 /*
1557 * The function sets vp to JSVAL_HOLE when the atom does not corresponds to a
1558 * name defining a constant.
1559 */
1560 static JSBool
1561 LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1562 jsval *vp)
1563 {
1564 JSBool ok;
1565 JSStmtInfo *stmt;
1566 JSAtomListElement *ale;
1567 JSObject *obj, *objbox;
1568 JSProperty *prop;
1569 uintN attrs;
1570
1571 /*
1572 * Chase down the cg stack, but only until we reach the outermost cg.
1573 * This enables propagating consts from top-level into switch cases in a
1574 * function compiled along with the top-level script.
1575 */
1576 *vp = JSVAL_HOLE;
1577 do {
1578 if (cg->flags & (TCF_IN_FUNCTION | TCF_COMPILE_N_GO)) {
1579 /* XXX this will need revising if 'const' becomes block-scoped. */
1580 stmt = js_LexicalLookup(cg, atom, NULL);
1581 if (stmt)
1582 return JS_TRUE;
1583
1584 ale = cg->constList.lookup(atom);
1585 if (ale) {
1586 JS_ASSERT(ALE_VALUE(ale) != JSVAL_HOLE);
1587 *vp = ALE_VALUE(ale);
1588 return JS_TRUE;
1589 }
1590
1591 /*
1592 * Try looking in the variable object for a direct property that
1593 * is readonly and permanent. We know such a property can't be
1594 * shadowed by another property on obj's prototype chain, or a
1595 * with object or catch variable; nor can prop's value be changed,
1596 * nor can prop be deleted.
1597 */
1598 if (cg->flags & TCF_IN_FUNCTION) {
1599 if (js_LookupLocal(cx, cg->fun, atom, NULL) != JSLOCAL_NONE)
1600 break;
1601 } else {
1602 JS_ASSERT(cg->flags & TCF_COMPILE_N_GO);
1603 obj = cg->scopeChain;
1604 ok = OBJ_LOOKUP_PROPERTY(cx, obj, ATOM_TO_JSID(atom), &objbox,
1605 &prop);
1606 if (!ok)
1607 return JS_FALSE;
1608 if (objbox == obj) {
1609 /*
1610 * We're compiling code that will be executed immediately,
1611 * not re-executed against a different scope chain and/or
1612 * variable object. Therefore we can get constant values
1613 * from our variable object here.
1614 */
1615 ok = OBJ_GET_ATTRIBUTES(cx, obj, ATOM_TO_JSID(atom), prop,
1616 &attrs);
1617 if (ok && IS_CONSTANT_PROPERTY(attrs)) {
1618 ok = OBJ_GET_PROPERTY(cx, obj, ATOM_TO_JSID(atom), vp);
1619 JS_ASSERT_IF(ok, *vp != JSVAL_HOLE);
1620 }
1621 }
1622 if (prop)
1623 OBJ_DROP_PROPERTY(cx, objbox, prop);
1624 if (!ok)
1625 return JS_FALSE;
1626 if (prop)
1627 break;
1628 }
1629 }
1630 } while ((cg = (JSCodeGenerator *) cg->parent) != NULL);
1631 return JS_TRUE;
1632 }
1633
1634 /*
1635 * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment
1636 * reset instruction is necessary, JSOP_FALSE to indicate an error or either
1637 * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue
1638 * after the main bytecode sequence.
1639 */
1640 static JSOp
1641 EmitBigIndexPrefix(JSContext *cx, JSCodeGenerator *cg, uintN index)
1642 {
1643 uintN indexBase;
1644
1645 /*
1646 * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only
1647 * for big indexes.
1648 */
1649 JS_STATIC_ASSERT(INDEX_LIMIT <= JS_BIT(24));
1650 JS_STATIC_ASSERT(INDEX_LIMIT >=
1651 (JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 2) << 16);
1652
1653 if (index < JS_BIT(16))
1654 return JSOP_NOP;
1655 indexBase = index >> 16;
1656 if (indexBase <= JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 1) {
1657 if (js_Emit1(cx, cg, (JSOp)(JSOP_INDEXBASE1 + indexBase - 1)) < 0)
1658 return JSOP_FALSE;
1659 return JSOP_RESETBASE0;
1660 }
1661
1662 if (index >= INDEX_LIMIT) {
1663 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1664 JSMSG_TOO_MANY_LITERALS);
1665 return JSOP_FALSE;
1666 }
1667
1668 if (js_Emit2(cx, cg, JSOP_INDEXBASE, (JSOp)indexBase) < 0)
1669 return JSOP_FALSE;
1670 return JSOP_RESETBASE;
1671 }
1672
1673 /*
1674 * Emit a bytecode and its 2-byte constant index immediate operand. If the
1675 * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate
1676 * operand effectively extends the 16-bit immediate of the prefixed opcode,
1677 * by changing index "segment" (see jsinterp.c). We optimize segments 1-3
1678 * with single-byte JSOP_INDEXBASE[123] codes.
1679 *
1680 * Such prefixing currently requires a suffix to restore the "zero segment"
1681 * register setting, but this could be optimized further.
1682 */
1683 static JSBool
1684 EmitIndexOp(JSContext *cx, JSOp op, uintN index, JSCodeGenerator *cg)
1685 {
1686 JSOp bigSuffix;
1687
1688 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1689 if (bigSuffix == JSOP_FALSE)
1690 return JS_FALSE;
1691 EMIT_UINT16_IMM_OP(op, index);
1692 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1693 }
1694
1695 /*
1696 * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro
1697 * caller's lexical environment, and embedding a false return on error.
1698 */
1699 #define EMIT_INDEX_OP(op, index) \
1700 JS_BEGIN_MACRO \
1701 if (!EmitIndexOp(cx, op, index, cg)) \
1702 return JS_FALSE; \
1703 JS_END_MACRO
1704
1705 static JSBool
1706 EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
1707 {
1708 JSAtomListElement *ale;
1709
1710 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1711 if (op == JSOP_GETPROP &&
1712 pn->pn_atom == cx->runtime->atomState.lengthAtom) {
1713 return js_Emit1(cx, cg, JSOP_LENGTH) >= 0;
1714 }
1715 ale = cg->atomList.add(cg->compiler, pn->pn_atom);
1716 if (!ale)
1717 return JS_FALSE;
1718 return EmitIndexOp(cx, op, ALE_INDEX(ale), cg);
1719 }
1720
1721 static JSBool
1722 EmitObjectOp(JSContext *cx, JSObjectBox *objbox, JSOp op,
1723 JSCodeGenerator *cg)
1724 {
1725 JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
1726 return EmitIndexOp(cx, op, cg->objectList.index(objbox), cg);
1727 }
1728
1729 /*
1730 * What good are ARGNO_LEN and SLOTNO_LEN, you ask? The answer is that, apart
1731 * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in
1732 * other parts of the code there's no necessary relationship between the two.
1733 * The abstraction cracks here in order to share EmitSlotIndexOp code among
1734 * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases.
1735 */
1736 JS_STATIC_ASSERT(ARGNO_LEN == 2);
1737 JS_STATIC_ASSERT(SLOTNO_LEN == 2);
1738
1739 static JSBool
1740 EmitSlotIndexOp(JSContext *cx, JSOp op, uintN slot, uintN index,
1741 JSCodeGenerator *cg)
1742 {
1743 JSOp bigSuffix;
1744 ptrdiff_t off;
1745 jsbytecode *pc;
1746
1747 JS_ASSERT(JOF_OPTYPE(op) == JOF_SLOTATOM ||
1748 JOF_OPTYPE(op) == JOF_SLOTOBJECT);
1749 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1750 if (bigSuffix == JSOP_FALSE)
1751 return JS_FALSE;
1752
1753 /* Emit [op, slot, index]. */
1754 off = js_EmitN(cx, cg, op, 2 + INDEX_LEN);
1755 if (off < 0)
1756 return JS_FALSE;
1757 pc = CG_CODE(cg, off);
1758 SET_UINT16(pc, slot);
1759 pc += 2;
1760 SET_INDEX(pc, index);
1761 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1762 }
1763
1764 /*
1765 * Adjust the slot for a block local to account for the number of variables
1766 * that share the same index space with locals. Due to the incremental code
1767 * generation for top-level script, we do the adjustment via code patching in
1768 * JSCompiler::compileScript; see comments there.
1769 *
1770 * The function returns -1 on failures.
1771 */
1772 static jsint
1773 AdjustBlockSlot(JSContext *cx, JSCodeGenerator *cg, jsint slot)
1774 {
1775 JS_ASSERT((jsuint) slot < cg->maxStackDepth);
1776 if (cg->flags & TCF_IN_FUNCTION) {
1777 slot += cg->fun->u.i.nvars;
1778 if ((uintN) slot >= SLOTNO_LIMIT) {
1779 js_ReportCompileErrorNumber(cx, CG_TS(cg), NULL,
1780 JSREPORT_ERROR,
1781 JSMSG_TOO_MANY_LOCALS);
1782 slot = -1;
1783 }
1784 }
1785 return slot;
1786 }
1787
1788 static bool
1789 EmitEnterBlock(JSContext *cx, JSParseNode *pn, JSCodeGenerator *cg)
1790 {
1791 JS_ASSERT(PN_TYPE(pn) == TOK_LEXICALSCOPE);
1792 if (!EmitObjectOp(cx, pn->pn_objbox, JSOP_ENTERBLOCK, cg))
1793 return false;
1794
1795 JSObject *blockObj = pn->pn_objbox->object;
1796 jsint depth = AdjustBlockSlot(cx, cg, OBJ_BLOCK_DEPTH(cx, blockObj));
1797 if (depth < 0)
1798 return false;
1799
1800 for (uintN slot = JSSLOT_FREE(&js_BlockClass),
1801 limit = slot + OBJ_BLOCK_COUNT(cx, blockObj);
1802 slot < limit; slot++) {
1803 jsval v = STOBJ_GET_SLOT(blockObj, slot);
1804
1805 /* Beware the empty destructuring dummy. */
1806 if (JSVAL_IS_VOID(v)) {
1807 JS_ASSERT(slot + 1 <= limit);
1808 continue;
1809 }
1810
1811 JSDefinition *dn = (JSDefinition *) JSVAL_TO_PRIVATE(v);
1812 JS_ASSERT(dn->pn_defn);
1813 JS_ASSERT(uintN(dn->frameSlot() + depth) < JS_BIT(16));
1814 dn->pn_cookie += depth;
1815 #ifdef DEBUG
1816 for (JSParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
1817 JS_ASSERT(pnu->pn_lexdef == dn);
1818 JS_ASSERT(!(pnu->pn_dflags & PND_BOUND));
1819 JS_ASSERT(pnu->pn_cookie == FREE_UPVAR_COOKIE);
1820 }
1821 #endif
1822 }
1823
1824 OBJ_SCOPE(blockObj)->freeslot = JSSLOT_FREE(&js_BlockClass);
1825 js_ReallocSlots(cx, blockObj, JSSLOT_FREE(&js_BlockClass), JS_TRUE);
1826 return true;
1827 }
1828
1829 /*
1830 * When eval is called from a function, the eval code or function code it
1831 * compiles may reference upvars that live in the eval-calling function. The
1832 * eval-invoked compiler does not have explicit definitions for these upvars
1833 * and we do not attempt to create them a-priori (by inspecting the function's
1834 * args and vars) -- we could, but we'd take an avoidable penalty for each
1835 * function local not referenced by any upvar. Instead, we map such upvars
1836 * lazily, growing upvarMap.vector by powers of two.
1837 *
1838 * This function knows that it is called with pn pointing to a PN_NAME-arity
1839 * node, and cg->compiler->callerFrame having a non-null fun member, and the
1840 * static level of cg at least one greater than the eval-calling function's
1841 * static level.
1842 */
1843 static bool
1844 MakeUpvarForEval(JSParseNode *pn, JSCodeGenerator *cg)
1845 {
1846 JSContext *cx = cg->compiler->context;
1847 JSFunction *fun = cg->compiler->callerFrame->fun;
1848 uintN upvarLevel = fun->u.i.script->staticLevel;
1849
1850 JSFunctionBox *funbox = cg->funbox;
1851 if (funbox) {
1852 /*
1853 * Treat top-level function definitions as escaping (i.e., as funargs),
1854 * required since we compile each such top level function or statement
1855 * and throw away the AST, so we can't yet see all funarg uses of this
1856 * function being compiled (cg->funbox->object). See bug 493177.
1857 */
1858 if (funbox->level == fun->u.i.script->staticLevel + 1U &&
1859 !(((JSFunction *) funbox->object)->flags & JSFUN_LAMBDA)) {
1860 JS_ASSERT_IF(cx->options & JSOPTION_ANONFUNFIX,
1861 ((JSFunction *) funbox->object)->atom);
1862 return true;
1863 }
1864
1865 while (funbox->level >= upvarLevel) {
1866 if (funbox->node->pn_dflags & PND_FUNARG)
1867 return true;
1868 funbox = funbox->parent;
1869 if (!funbox)
1870 break;
1871 }
1872 }
1873
1874 JSAtom *atom = pn->pn_atom;
1875
1876 uintN index;
1877 JSLocalKind localKind = js_LookupLocal(cx, fun, atom, &index);
1878 if (localKind == JSLOCAL_NONE)
1879 return true;
1880
1881 JS_ASSERT(cg->staticLevel > upvarLevel);
1882 if (cg->staticLevel >= JS_DISPLAY_SIZE || upvarLevel >= JS_DISPLAY_SIZE)
1883 return true;
1884
1885 JSAtomListElement *ale = cg->upvarList.lookup(atom);
1886 if (!ale) {
1887 if ((cg->flags & TCF_IN_FUNCTION) &&
1888 !js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR)) {
1889 return false;
1890 }
1891
1892 ale = cg->upvarList.add(cg->compiler, atom);
1893 if (!ale)
1894 return false;
1895 JS_ASSERT(ALE_INDEX(ale) == cg->upvarList.count - 1);
1896
1897 uint32 *vector = cg->upvarMap.vector;
1898 uint32 length = cg->upvarMap.length;
1899
1900 JS_ASSERT(ALE_INDEX(ale) <= length);
1901 if (ALE_INDEX(ale) == length) {
1902 length = 2 * JS_MAX(2, length);
1903 vector = (uint32 *) JS_realloc(cx, vector, length * sizeof *vector);
1904 if (!vector)
1905 return false;
1906 cg->upvarMap.vector = vector;
1907 cg->upvarMap.length = length;
1908 }
1909
1910 if (localKind != JSLOCAL_ARG)
1911 index += fun->nargs;
1912 JS_ASSERT(index < JS_BIT(16));
1913
1914 uintN skip = cg->staticLevel - upvarLevel;
1915 vector[ALE_INDEX(ale)] = MAKE_UPVAR_COOKIE(skip, index);
1916 }
1917
1918 pn->pn_op = JSOP_GETUPVAR;
1919 pn->pn_cookie = MAKE_UPVAR_COOKIE(cg->staticLevel, ALE_INDEX(ale));
1920 pn->pn_dflags |= PND_BOUND;
1921 return true;
1922 }
1923
1924 /*
1925 * BindNameToSlot attempts to optimize name gets and sets to stack slot loads
1926 * and stores, given the compile-time information in cg and a TOK_NAME node pn.
1927 * It returns false on error, true on success.
1928 *
1929 * The caller can inspect pn->pn_cookie for FREE_UPVAR_COOKIE to tell whether
1930 * optimization occurred, in which case BindNameToSlot also updated pn->pn_op.
1931 * If pn->pn_cookie is still FREE_UPVAR_COOKIE on return, pn->pn_op still may
1932 * have been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not
1933 * pn->pn_op was modified, if this function finds an argument or local variable
1934 * name, PND_CONST will be set in pn_dflags for read-only properties after a
1935 * successful return.
1936 *
1937 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1938 * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
1939 * in js_EmitTree.
1940 */
1941 static JSBool
1942 BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
1943 {
1944 JSDefinition *dn;
1945 JSOp op;
1946 JSAtom *atom;
1947 uint32 cookie;
1948 JSDefinition::Kind dn_kind;
1949 JSAtomListElement *ale;
1950 uintN index;
1951
1952 JS_ASSERT(pn->pn_type == TOK_NAME);
1953
1954 /* Idempotency tests come first, since we may be called more than once. */
1955 if (pn->pn_dflags & PND_BOUND)
1956 return JS_TRUE;
1957
1958 /* No cookie initialized for these two, they're pre-bound by definition. */
1959 JS_ASSERT(pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE);
1960
1961 /*
1962 * The parser linked all uses (including forward references) to their
1963 * definitions, unless a with statement or direct eval intervened.
1964 */
1965 if (pn->pn_used) {
1966 JS_ASSERT(pn->pn_cookie == FREE_UPVAR_COOKIE);
1967 dn = pn->pn_lexdef;
1968 JS_ASSERT(dn->pn_defn);
1969 pn->pn_dflags |= (dn->pn_dflags & PND_CONST);
1970 } else {
1971 if (!pn->pn_defn)
1972 return JS_TRUE;
1973 dn = (JSDefinition *) pn;
1974 }
1975
1976 op = PN_OP(pn);
1977 if (op == JSOP_NOP)
1978 return JS_TRUE;
1979
1980 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1981 atom = pn->pn_atom;
1982 cookie = dn->pn_cookie;
1983 dn_kind = dn->kind();
1984
1985 /*
1986 * Turn attempts to mutate const-declared bindings into get ops (for
1987 * pre-increment and pre-decrement ops, our caller will have to emit
1988 * JSOP_POS, JSOP_ONE, and JSOP_ADD as well).
1989 *
1990 * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared
1991 * bindings visible to the compiler are permanent in JS unless the
1992 * declaration originates in eval code. We detect eval code by testing
1993 * cg->compiler->callerFrame, which is set only by eval or a debugger
1994 * equivalent.
1995 *
1996 * Note that this callerFrame non-null test must be qualified by testing
1997 * !cg->funbox to exclude function code nested in eval code, which is not
1998 * subject to the deletable binding exception.
1999 */
2000 switch (op) {
2001 case JSOP_NAME:
2002 case JSOP_SETCONST:
2003 break;
2004 case JSOP_DELNAME:
2005 if (dn_kind != JSDefinition::UNKNOWN) {
2006 if (cg->compiler->callerFrame && !cg->funbox)
2007 JS_ASSERT(cg->flags & TCF_COMPILE_N_GO);
2008 else
2009 pn->pn_op = JSOP_FALSE;
2010 pn->pn_dflags |= PND_BOUND;
2011 return JS_TRUE;
2012 }
2013 break;
2014 default:
2015 if (pn->isConst())
2016 pn->pn_op = op = JSOP_NAME;
2017 }
2018
2019 if (cookie == FREE_UPVAR_COOKIE) {
2020 JSStackFrame *caller = cg->compiler->callerFrame;
2021 if (caller) {
2022 JS_ASSERT(cg->flags & TCF_COMPILE_N_GO);
2023
2024 /*
2025 * Don't generate upvars on the left side of a for loop. See
2026 * bug 470758.
2027 */
2028 if (cg->flags & TCF_IN_FOR_INIT)
2029 return JS_TRUE;
2030
2031 JS_ASSERT(caller->script);
2032 if (!caller->fun)
2033 return JS_TRUE;
2034
2035 /*
2036 * Make sure the variable object used by the compiler to initialize
2037 * parent links matches the caller's varobj. Compile-n-go compiler-
2038 * created function objects have the top-level cg's scopeChain set
2039 * as their parent by JSCompiler::newFunction.
2040 */
2041 JSObject *scopeobj = (cg->flags & TCF_IN_FUNCTION)
2042 ? STOBJ_GET_PARENT(FUN_OBJECT(cg->fun))
2043 : cg->scopeChain;
2044 if (scopeobj != caller->varobj)
2045 return JS_TRUE;
2046
2047 /*
2048 * We are compiling eval or debug script inside a function frame
2049 * and the scope chain matches the function's variable object.
2050 * Optimize access to function's arguments and variable and the
2051 * arguments object.
2052 */
2053 if (op != JSOP_NAME)
2054 return JS_TRUE;
2055
2056 return MakeUpvarForEval(pn, cg);
2057 }
2058 return JS_TRUE;
2059 }
2060
2061 if (dn->pn_dflags & PND_GVAR) {
2062 /*
2063 * If this is a global reference from within a function, leave pn_op as
2064 * JSOP_NAME, etc. We could emit JSOP_*GVAR ops within function code if
2065 * only we could depend on the global frame's slots being valid for all
2066 * calls to the function.
2067 */
2068 if (cg->flags & TCF_IN_FUNCTION)
2069 return JS_TRUE;
2070
2071 /*
2072 * We are optimizing global variables and there may be no pre-existing
2073 * global property named atom when this global script runs. If atom was
2074 * declared via const or var, optimize pn to access fp->vars using the
2075 * appropriate JSOP_*GVAR op.
2076 *
2077 * FIXME: should be able to optimize global function access too.
2078 */
2079 JS_ASSERT(dn_kind == JSDefinition::VAR || dn_kind == JSDefinition::CONST);
2080
2081 switch (op) {
2082 case JSOP_NAME: op = JSOP_GETGVAR; break;
2083 case JSOP_SETNAME: op = JSOP_SETGVAR; break;
2084 case JSOP_SETCONST: /* NB: no change */ break;
2085 case JSOP_INCNAME: op = JSOP_INCGVAR; break;
2086 case JSOP_NAMEINC: op = JSOP_GVARINC; break;
2087 case JSOP_DECNAME: op = JSOP_DECGVAR; break;
2088 case JSOP_NAMEDEC: op = JSOP_GVARDEC; break;
2089 case JSOP_FORNAME: /* NB: no change */ break;
2090 case JSOP_DELNAME: /* NB: no change */ break;
2091 default: JS_NOT_REACHED("gvar");
2092 }
2093 pn->pn_op = op;
2094 pn->pn_cookie = cookie;
2095 pn->pn_dflags |= PND_BOUND;
2096 return JS_TRUE;
2097 }
2098
2099 uintN level = UPVAR_FRAME_SKIP(cookie);
2100 JS_ASSERT(cg->staticLevel >= level);
2101
2102 /*
2103 * A JSDefinition witnessed as a declaration by the parser cannot be an
2104 * upvar, unless it is the degenerate kind of upvar selected above (in the
2105 * code before the PND_GVAR test) for the special case of compile-and-go
2106 * code generated from eval called from a function, where the eval code
2107 * uses local vars defined in the function. We detect this upvar-for-eval
2108 * case by checking dn's op.
2109 */
2110 if (PN_OP(dn) == JSOP_GETUPVAR) {
2111 JS_ASSERT(cg->staticLevel >= level);
2112 if (op != JSOP_NAME)
2113 return JS_TRUE;
2114
2115 #ifdef DEBUG
2116 JSStackFrame *caller = cg->compiler->callerFrame;
2117 JS_ASSERT(caller);
2118
2119 JSTreeContext *tc = cg;
2120 while (tc->staticLevel != level)
2121 tc = tc->parent;
2122 JS_ASSERT(tc->flags & TCF_COMPILING);
2123
2124 JSCodeGenerator *evalcg = (JSCodeGenerator *) tc;
2125 JS_ASSERT(evalcg->flags & TCF_COMPILE_N_GO);
2126 JS_ASSERT(!(evalcg->flags & TCF_IN_FOR_INIT));
2127 JS_ASSERT(caller->script);
2128 JS_ASSERT(caller->fun && caller->varobj == evalcg->scopeChain);
2129 #endif
2130
2131 if (cg->staticLevel == level) {
2132 pn->pn_op = JSOP_GETUPVAR;
2133 pn->pn_cookie = cookie;
2134 pn->pn_dflags |= PND_BOUND;
2135 return JS_TRUE;
2136 }
2137
2138 return MakeUpvarForEval(pn, cg);
2139 }
2140
2141 uintN skip = cg->staticLevel - level;
2142 if (skip != 0) {
2143 JS_ASSERT(cg->flags & TCF_IN_FUNCTION);
2144 JS_ASSERT(cg->lexdeps.lookup(atom));
2145 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
2146 JS_ASSERT(cg->fun->u.i.skipmin <= skip);
2147
2148 /*
2149 * If op is a mutating opcode, this upvar's static level is too big to
2150 * index into the display, or the function is heavyweight, we fall back
2151 * on JSOP_*NAME*.
2152 */
2153 if (op != JSOP_NAME)
2154 return JS_TRUE;
2155 if (level >= JS_DISPLAY_SIZE)
2156 return JS_TRUE;
2157 if (cg->flags & TCF_FUN_HEAVYWEIGHT)
2158 return JS_TRUE;
2159
2160 if (FUN_FLAT_CLOSURE(cg->fun)) {
2161 op = JSOP_GETDSLOT;
2162 } else {
2163 /*
2164 * The function we're compiling may not be heavyweight, but if it
2165 * escapes as a funarg, we can't use JSOP_GETUPVAR/JSOP_CALLUPVAR.
2166 * JSCompiler::analyzeFunctions has arranged for this function's
2167 * enclosing functions to be heavyweight, so we can safely stick
2168 * with JSOP_NAME/JSOP_CALLNAME.
2169 */
2170 if (cg->funbox->node->pn_dflags & PND_FUNARG)
2171 return JS_TRUE;
2172
2173 /*
2174 * Generator functions may be resumed from any call stack, which
2175 * defeats the display optimization to static link searching used
2176 * by JSOP_{GET,CALL}UPVAR.
2177 */
2178 if (cg->flags & TCF_FUN_IS_GENERATOR)
2179 return JS_TRUE;
2180
2181 op = JSOP_GETUPVAR;
2182 }
2183
2184 ale = cg->upvarList.lookup(atom);
2185 if (ale) {
2186 index = ALE_INDEX(ale);
2187 } else {
2188 if (!js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR))
2189 return JS_FALSE;
2190
2191 ale = cg->upvarList.add(cg->compiler, atom);
2192 if (!ale)
2193 return JS_FALSE;
2194 index = ALE_INDEX(ale);
2195 JS_ASSERT(index == cg->upvarList.count - 1);
2196
2197 uint32 *vector = cg->upvarMap.vector;
2198 if (!vector) {
2199 uint32 length = cg->lexdeps.count;
2200
2201 vector = (uint32 *) calloc(length, sizeof *vector);
2202 if (!vector) {
2203 JS_ReportOutOfMemory(cx);
2204 return JS_FALSE;
2205 }
2206 cg->upvarMap.vector = vector;
2207 cg->upvarMap.length = length;
2208 }
2209
2210 uintN slot = UPVAR_FRAME_SLOT(cookie);
2211 if (dn_kind != JSDefinition::ARG) {
2212 JSTreeContext *tc = cg;
2213 do {
2214 tc = tc->parent;
2215 } while (tc->staticLevel != level);
2216 if (tc->flags & TCF_IN_FUNCTION)
2217 slot += tc->fun->nargs;
2218 }
2219
2220 vector[index] = MAKE_UPVAR_COOKIE(skip, slot);
2221 }
2222
2223 pn->pn_op = op;
2224 pn->pn_cookie = index;
2225 pn->pn_dflags |= PND_BOUND;
2226 return JS_TRUE;
2227 }
2228
2229 /*
2230 * We are compiling a function body and may be able to optimize name
2231 * to stack slot. Look for an argument or variable in the function and
2232 * rewrite pn_op and update pn accordingly.
2233 */
2234 switch (dn_kind) {
2235 case JSDefinition::UNKNOWN:
2236 return JS_TRUE;
2237
2238 case JSDefinition::LET:
2239 switch (op) {
2240 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2241 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2242 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2243 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2244 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2245 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2246 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2247 default: JS_NOT_REACHED("let");
2248 }
2249 break;
2250
2251 case JSDefinition::ARG:
2252 switch (op) {
2253 case JSOP_NAME: op = JSOP_GETARG; break;
2254 case JSOP_SETNAME: op = JSOP_SETARG; break;
2255 case JSOP_INCNAME: op = JSOP_INCARG; break;
2256 case JSOP_NAMEINC: op = JSOP_ARGINC; break;
2257 case JSOP_DECNAME: op = JSOP_DECARG; break;
2258 case JSOP_NAMEDEC: op = JSOP_ARGDEC; break;
2259 case JSOP_FORNAME: op = JSOP_FORARG; break;
2260 default: JS_NOT_REACHED("arg");
2261 }
2262 JS_ASSERT(!pn->isConst());
2263 break;
2264
2265 case JSDefinition::VAR:
2266 if (PN_OP(dn) == JSOP_CALLEE) {
2267 JS_ASSERT(op != JSOP_CALLEE);
2268 JS_ASSERT((cg->fun->flags & JSFUN_LAMBDA) && atom == cg->fun->atom);
2269
2270 switch (op) {
2271 default:
2272 /*
2273 * Leave pn->pn_op == JSOP_NAME if cg->fun is heavyweight, as
2274 * we cannot be sure cg->fun is not something of the form:
2275 *
2276 * var ff = (function f(s) { eval(s); return f; });
2277 *
2278 * where a caller invokes ff("var f = 42"). The result returned
2279 * for such an invocation must be 42, since the callee name is
2280 * lexically bound in an outer declarative environment from the
2281 * function's activation. See jsfun.cpp:call_resolve.
2282 */
2283 JS_ASSERT(op != JSOP_DELNAME);
2284 if (!(cg->flags & TCF_FUN_HEAVYWEIGHT)) {
2285 op = JSOP_CALLEE;
2286 pn->pn_dflags |= PND_CONST;
2287 }
2288 break;
2289 }
2290 pn->pn_op = op;
2291 pn->pn_dflags |= PND_BOUND;
2292 return JS_TRUE;
2293 }
2294 /* FALL THROUGH */
2295
2296 default:
2297 JS_ASSERT_IF(dn_kind != JSDefinition::FUNCTION,
2298 dn_kind == JSDefinition::VAR ||
2299 dn_kind == JSDefinition::CONST);
2300 switch (op) {
2301 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2302 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2303 case JSOP_SETCONST: op = JSOP_SETLOCAL; break;
2304 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2305 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2306 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2307 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2308 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2309 default: JS_NOT_REACHED("local");
2310 }
2311 JS_ASSERT_IF(dn_kind == JSDefinition::CONST, pn->pn_dflags & PND_CONST);
2312 break;
2313 }
2314
2315 JS_ASSERT(op != PN_OP(pn));
2316 pn->pn_op = op;
2317 pn->pn_cookie = UPVAR_FRAME_SLOT(cookie);
2318 pn->pn_dflags |= PND_BOUND;
2319 return JS_TRUE;
2320 }
2321
2322 /*
2323 * If pn contains a useful expression, return true with *answer set to true.
2324 * If pn contains a useless expression, return true with *answer set to false.
2325 * Return false on error.
2326 *
2327 * The caller should initialize *answer to false and invoke this function on
2328 * an expression statement or similar subtree to decide whether the tree could
2329 * produce code that has any side effects. For an expression statement, we
2330 * define useless code as code with no side effects, because the main effect,
2331 * the value left on the stack after the code executes, will be discarded by a
2332 * pop bytecode.
2333 */
2334 static JSBool
2335 CheckSideEffects(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2336 JSBool *answer)
2337 {
2338 JSBool ok;
2339 JSParseNode *pn2;
2340
2341 ok = JS_TRUE;
2342 if (!pn || *answer)
2343 return ok;
2344
2345 switch (pn->pn_arity) {
2346 case PN_FUNC:
2347 /*
2348 * A named function, contrary to ES3, is no longer useful, because we
2349 * bind its name lexically (using JSOP_CALLEE) instead of creating an
2350 * Object instance and binding a readonly, permanent property in it
2351 * (the object and binding can be detected and hijacked or captured).
2352 * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
2353 */
2354 *answer = JS_FALSE;
2355 break;
2356
2357 case PN_LIST:
2358 if (pn->pn_op == JSOP_NOP ||
2359 pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2360 pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2361 /*
2362 * Non-operators along with ||, &&, ===, and !== never invoke
2363 * toString or valueOf.
2364 */
2365 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
2366 ok &= CheckSideEffects(cx, cg, pn2, answer);
2367 } else {
2368 /*
2369 * All invocation operations (construct: TOK_NEW, call: TOK_LP)
2370 * are presumed to be useful, because they may have side effects
2371 * even if their main effect (their return value) is discarded.
2372 *
2373 * TOK_LB binary trees of 3 or more nodes are flattened into lists
2374 * to avoid too much recursion. All such lists must be presumed
2375 * to be useful because each index operation could invoke a getter
2376 * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2377 * does not apply here: arguments[i][j] might invoke a getter).
2378 *
2379 * Likewise, array and object initialisers may call prototype
2380 * setters (the __defineSetter__ built-in, and writable __proto__
2381 * on Array.prototype create this hazard). Initialiser list nodes
2382 * have JSOP_NEWINIT in their pn_op.
2383 */
2384 *answer = JS_TRUE;
2385 }
2386 break;
2387
2388 case PN_TERNARY:
2389 ok = CheckSideEffects(cx, cg, pn->pn_kid1, answer) &&
2390 CheckSideEffects(cx, cg, pn->pn_kid2, answer) &&
2391 CheckSideEffects(cx, cg, pn->pn_kid3, answer);
2392 break;
2393
2394 case PN_BINARY:
2395 if (pn->pn_type == TOK_ASSIGN) {
2396 /*
2397 * Assignment is presumed to be useful, even if the next operation
2398 * is another assignment overwriting this one's ostensible effect,
2399 * because the left operand may be a property with a setter that
2400 * has side effects.
2401 *
2402 * The only exception is assignment of a useless value to a const
2403 * declared in the function currently being compiled.
2404 */
2405 pn2 = pn->pn_left;
2406 if (pn2->pn_type != TOK_NAME) {
2407 *answer = JS_TRUE;
2408 } else {
2409 if (!BindNameToSlot(cx, cg, pn2))
2410 return JS_FALSE;
2411 if (!CheckSideEffects(cx, cg, pn->pn_right, answer))
2412 return JS_FALSE;
2413 if (!*answer && (pn->pn_op != JSOP_NOP || !pn2->isConst()))
2414 *answer = JS_TRUE;
2415 }
2416 } else {
2417 if (pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2418 pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2419 /*
2420 * ||, &&, ===, and !== do not convert their operands via
2421 * toString or valueOf method calls.
2422 */
2423 ok = CheckSideEffects(cx, cg, pn->pn_left, answer) &&
2424 CheckSideEffects(cx, cg, pn->pn_right, answer);
2425 } else {
2426 /*
2427 * We can't easily prove that neither operand ever denotes an
2428 * object with a toString or valueOf method.
2429 */
2430 *answer = JS_TRUE;
2431 }
2432 }
2433 break;
2434
2435 case PN_UNARY:
2436 switch (pn->pn_type) {
2437 case TOK_RP:
2438 ok = CheckSideEffects(cx, cg, pn->pn_kid, answer);
2439 break;
2440
2441 case TOK_DELETE:
2442 pn2 = pn->pn_kid;
2443 switch (pn2->pn_type) {
2444 case TOK_NAME:
2445 if (!BindNameToSlot(cx, cg, pn2))
2446 return JS_FALSE;
2447 if (pn2->isConst()) {
2448 *answer = JS_FALSE;
2449 break;
2450 }
2451 /* FALL THROUGH */
2452 case TOK_DOT:
2453 #if JS_HAS_XML_SUPPORT
2454 case TOK_DBLDOT:
2455 #endif
2456 #if JS_HAS_LVALUE_RETURN
2457 case TOK_LP:
2458 #endif
2459 case TOK_LB:
2460 /* All these delete addressing modes have effects too. */
2461 *answer = JS_TRUE;
2462 break;
2463 default:
2464 ok = CheckSideEffects(cx, cg, pn2, answer);
2465 break;
2466 }
2467 break;
2468
2469 case TOK_UNARYOP:
2470 if (pn->pn_op == JSOP_NOT) {
2471 /* ! does not convert its operand via toString or valueOf. */
2472 ok = CheckSideEffects(cx, cg, pn->pn_kid, answer);
2473 break;
2474 }
2475 /* FALL THROUGH */
2476
2477 default:
2478 /*
2479 * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP
2480 * have direct effects. Of the remaining unary-arity node types,
2481 * we can't easily prove that the operand never denotes an object
2482 * with a toString or valueOf method.
2483 */
2484 *answer = JS_TRUE;
2485 break;
2486 }
2487 break;
2488
2489 case PN_NAME:
2490 /*
2491 * Take care to avoid trying to bind a label name (labels, both for
2492 * statements and property values in object initialisers, have pn_op
2493 * defaulted to JSOP_NOP).
2494 */
2495 if (pn->pn_type == TOK_NAME && pn->pn_op != JSOP_NOP) {
2496 if (!BindNameToSlot(cx, cg, pn))
2497 return JS_FALSE;
2498 if (pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE &&
2499 pn->pn_cookie == FREE_UPVAR_COOKIE) {
2500 /*
2501 * Not an argument or local variable use, and not a use of a
2502 * unshadowed named function expression's given name, so this
2503 * expression could invoke a getter that has side effects.
2504 */
2505 *answer = JS_TRUE;
2506 }
2507 }
2508 pn2 = pn->maybeExpr();
2509 if (pn->pn_type == TOK_DOT) {
2510 if (pn2->pn_type == TOK_NAME && !BindNameToSlot(cx, cg, pn2))
2511 return JS_FALSE;
2512 if (!(pn2->pn_op == JSOP_ARGUMENTS &&
2513 pn->pn_atom == cx->runtime->atomState.lengthAtom)) {
2514 /*
2515 * Any dotted property reference could call a getter, except
2516 * for arguments.length where arguments is unambiguous.
2517 */
2518 *answer = JS_TRUE;
2519 }
2520 }
2521 ok = CheckSideEffects(cx, cg, pn2, answer);
2522 break;
2523
2524 case PN_NAMESET:
2525 ok = CheckSideEffects(cx, cg, pn->pn_tree, answer);
2526 break;
2527
2528 case PN_NULLARY:
2529 if (pn->pn_type == TOK_DEBUGGER)
2530 *answer = JS_TRUE;
2531 break;
2532 }
2533 return ok;
2534 }
2535
2536 static JSBool
2537 EmitNameOp(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2538 JSBool callContext)
2539 {
2540 JSOp op;
2541
2542 if (!BindNameToSlot(cx, cg, pn))
2543 return JS_FALSE;
2544 op = PN_OP(pn);
2545
2546 if (callContext) {
2547 switch (op) {
2548 case JSOP_NAME:
2549 op = JSOP_CALLNAME;
2550 break;
2551 case JSOP_GETGVAR:
2552 op = JSOP_CALLGVAR;
2553 break;
2554 case JSOP_GETARG:
2555 op = JSOP_CALLARG;
2556 break;
2557 case JSOP_GETLOCAL:
2558 op = JSOP_CALLLOCAL;
2559 break;
2560 case JSOP_GETUPVAR:
2561 op = JSOP_CALLUPVAR;
2562 break;
2563 case JSOP_GETDSLOT:
2564 op = JSOP_CALLDSLOT;
2565 break;
2566 default:
2567 JS_ASSERT(op == JSOP_ARGUMENTS || op == JSOP_CALLEE);
2568 break;
2569 }
2570 }
2571
2572 if (op == JSOP_ARGUMENTS || op == JSOP_CALLEE) {
2573 if (js_Emit1(cx, cg, op) < 0)
2574 return JS_FALSE;
2575 if (callContext && js_Emit1(cx, cg, JSOP_NULL) < 0)
2576 return JS_FALSE;
2577 } else {
2578 if (pn->pn_cookie != FREE_UPVAR_COOKIE) {
2579 EMIT_UINT16_IMM_OP(op, pn->pn_cookie);
2580 } else {
2581 if (!EmitAtomOp(cx, pn, op, cg))
2582 return JS_FALSE;
2583 }
2584 }
2585
2586 return JS_TRUE;
2587 }
2588
2589 #if JS_HAS_XML_SUPPORT
2590 static JSBool
2591 EmitXMLName(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2592 {
2593 JSParseNode *pn2;
2594 uintN oldflags;
2595
2596 JS_ASSERT(pn->pn_type == TOK_UNARYOP);
2597 JS_ASSERT(pn->pn_op == JSOP_XMLNAME);
2598 JS_ASSERT(op == JSOP_XMLNAME || op == JSOP_CALLXMLNAME);
2599
2600 pn2 = pn->pn_kid;
2601 oldflags = cg->flags;
2602 cg->flags &= ~TCF_IN_FOR_INIT;
2603 if (!js_EmitTree(cx, cg, pn2))
2604 return JS_FALSE;
2605 cg->flags |= oldflags & TCF_IN_FOR_INIT;
2606 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2607 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2608 return JS_FALSE;
2609 }
2610
2611 return js_Emit1(cx, cg, op) >= 0;
2612 }
2613 #endif
2614
2615 static JSBool
2616 EmitSpecialPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2617 {
2618 /*
2619 * Special case for obj.__proto__, obj.__parent__, obj.__count__ to
2620 * deoptimize away from fast paths in the interpreter and trace recorder,
2621 * which skip dense array instances by going up to Array.prototype before
2622 * looking up the property name.
2623 */
2624 JSAtomListElement *ale = cg->atomList.add(cg->compiler, pn->pn_atom);
2625 if (!ale)
2626 return JS_FALSE;
2627 if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
2628 return JS_FALSE;
2629 if (js_Emit1(cx, cg, op) < 0)
2630 return JS_FALSE;
2631 return JS_TRUE;
2632 }
2633
2634 static JSBool
2635 EmitPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg,
2636 JSBool callContext)
2637 {
2638 JSParseNode *pn2, *pndot, *pnup, *pndown;
2639 ptrdiff_t top;
2640
2641 JS_ASSERT(pn->pn_arity == PN_NAME);
2642 pn2 = pn->maybeExpr();
2643
2644 /* Special case deoptimization on __proto__, __count__ and __parent__. */
2645 if ((op == JSOP_GETPROP || op == JSOP_CALLPROP) &&
2646 (pn->pn_atom == cx->runtime->atomState.protoAtom ||
2647 pn->pn_atom == cx->runtime->atomState.parentAtom ||
2648 pn->pn_atom == cx->runtime->atomState.countAtom)) {
2649 if (pn2 && !js_EmitTree(cx, cg, pn2))
2650 return JS_FALSE;
2651 return EmitSpecialPropOp(cx, pn, callContext ? JSOP_CALLELEM : JSOP_GETELEM, cg);
2652 }
2653
2654 if (callContext) {
2655 JS_ASSERT(pn->pn_type == TOK_DOT);
2656 JS_ASSERT(op == JSOP_GETPROP);
2657 op = JSOP_CALLPROP;
2658 } else if (op == JSOP_GETPROP && pn->pn_type == TOK_DOT) {
2659 if (pn2->pn_op == JSOP_THIS) {
2660 if (pn->pn_atom != cx->runtime->atomState.lengthAtom) {
2661 /* Fast path for gets of |this.foo|. */
2662 return EmitAtomOp(cx, pn, JSOP_GETTHISPROP, cg);
2663 }
2664 } else if (pn2->pn_type == TOK_NAME) {
2665 /*
2666 * Try to optimize:
2667 * - arguments.length into JSOP_ARGCNT
2668 * - argname.prop into JSOP_GETARGPROP
2669 * - localname.prop into JSOP_GETLOCALPROP
2670 * but don't do this if the property is 'length' -- prefer to emit
2671 * JSOP_GETARG, etc., and then JSOP_LENGTH.
2672 */
2673 if (!BindNameToSlot(cx, cg, pn2))
2674 return JS_FALSE;
2675 if (pn->pn_atom == cx->runtime->atomState.lengthAtom) {
2676 if (pn2->pn_op == JSOP_ARGUMENTS)
2677 return js_Emit1(cx, cg, JSOP_ARGCNT) >= 0;
2678 } else {
2679 switch (pn2->pn_op) {
2680 case JSOP_GETARG:
2681 op = JSOP_GETARGPROP;
2682 goto do_indexconst;
2683 case JSOP_GETLOCAL:
2684 op = JSOP_GETLOCALPROP;
2685 do_indexconst: {
2686 JSAtomListElement *ale;
2687 jsatomid atomIndex;
2688
2689 ale = cg->atomList.add(cg->compiler, pn->pn_atom);
2690 if (!ale)
2691 return JS_FALSE;
2692 atomIndex = ALE_INDEX(ale);
2693 return EmitSlotIndexOp(cx, op, pn2->pn_cookie, atomIndex, cg);
2694 }
2695
2696 default:;
2697 }
2698 }
2699 }
2700 }
2701
2702 /*
2703 * If the object operand is also a dotted property reference, reverse the
2704 * list linked via pn_expr temporarily so we can iterate over it from the
2705 * bottom up (reversing again as we go), to avoid excessive recursion.
2706 */
2707 if (pn2->pn_type == TOK_DOT) {
2708 pndot = pn2;
2709 pnup = NULL;
2710 top = CG_OFFSET(cg);
2711 for (;;) {
2712 /* Reverse pndot->pn_expr to point up, not down. */
2713 pndot->pn_offset = top;
2714 JS_ASSERT(!pndot->pn_used);
2715 pndown = pndot->pn_expr;
2716 pndot->pn_expr = pnup;
2717 if (pndown->pn_type != TOK_DOT)
2718 break;
2719 pnup = pndot;
2720 pndot = pndown;
2721 }
2722
2723 /* pndown is a primary expression, not a dotted property reference. */
2724 if (!js_EmitTree(cx, cg, pndown))
2725 return JS_FALSE;
2726
2727 do {
2728 /* Walk back up the list, emitting annotated name ops. */
2729 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2730 CG_OFFSET(cg) - pndown->pn_offset) < 0) {
2731 return JS_FALSE;
2732 }
2733
2734 /*
2735 * Special case deoptimization on __proto__, __count__ and
2736 * __parent__, as above.
2737 */
2738 if (pndot->pn_arity == PN_NAME &&
2739 (pndot->pn_atom == cx->runtime->atomState.protoAtom ||
2740 pndot->pn_atom == cx->runtime->atomState.parentAtom ||
2741 pndot->pn_atom == cx->runtime->atomState.countAtom)) {
2742 if (!EmitSpecialPropOp(cx, pndot, JSOP_GETELEM, cg))
2743 return JS_FALSE;
2744 } else if (!EmitAtomOp(cx, pndot, PN_OP(pndot), cg)) {
2745 return JS_FALSE;
2746 }
2747
2748 /* Reverse the pn_expr link again. */
2749 pnup = pndot->pn_expr;
2750 pndot->pn_expr = pndown;
2751 pndown = pndot;
2752 } while ((pndot = pnup) != NULL);
2753 } else {
2754 if (!js_EmitTree(cx, cg, pn2))
2755 return JS_FALSE;
2756 }
2757
2758 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2759 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2760 return JS_FALSE;
2761 }
2762
2763 return EmitAtomOp(cx, pn, op, cg);
2764 }
2765
2766 static JSBool
2767 EmitElemOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2768 {
2769 ptrdiff_t top;
2770 JSParseNode *left, *right, *next, ltmp, rtmp;
2771 jsint slot;
2772
2773 top = CG_OFFSET(cg);
2774 if (pn->pn_arity == PN_LIST) {
2775 /* Left-associative operator chain to avoid too much recursion. */
2776 JS_ASSERT(pn->pn_op == JSOP_GETELEM);
2777 JS_ASSERT(pn->pn_count >= 3);
2778 left = pn->pn_head;
2779 right = pn->last();
2780 next = left->pn_next;
2781 JS_ASSERT(next != right);
2782
2783 /*
2784 * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2785 * one or more index expression and JSOP_GETELEM op pairs.
2786 */
2787 if (left->pn_type == TOK_NAME && next->pn_type == TOK_NUMBER) {
2788 if (!BindNameToSlot(cx, cg, left))
2789 return JS_FALSE;
2790 if (left->pn_op == JSOP_ARGUMENTS &&
2791 JSDOUBLE_IS_INT(next->pn_dval, slot) &&
2792 (jsuint)slot < JS_BIT(16)) {
2793 /*
2794 * arguments[i]() requires arguments object as "this".
2795 * Check that we never generates list for that usage.
2796 */
2797 JS_ASSERT(op != JSOP_CALLELEM || next->pn_next);
2798 left->pn_offset = next->pn_offset = top;
2799 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2800 left = next;
2801 next = left->pn_next;
2802 }
2803 }
2804
2805 /*
2806 * Check whether we generated JSOP_ARGSUB, just above, and have only
2807 * one more index expression to emit. Given arguments[0][j], we must
2808 * skip the while loop altogether, falling through to emit code for j
2809 * (in the subtree referenced by right), followed by the annotated op,
2810 * at the bottom of this function.
2811 */
2812 JS_ASSERT(next != right || pn->pn_count == 3);
2813 if (left == pn->pn_head) {
2814 if (!js_EmitTree(cx, cg, left))
2815 return JS_FALSE;
2816 }
2817 while (next != right) {
2818 if (!js_EmitTree(cx, cg, next))
2819 return JS_FALSE;
2820 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2821 return JS_FALSE;
2822 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
2823 return JS_FALSE;
2824 next = next->pn_next;
2825 }
2826 } else {
2827 if (pn->pn_arity == PN_NAME) {
2828 /*
2829 * Set left and right so pn appears to be a TOK_LB node, instead
2830 * of a TOK_DOT node. See the TOK_FOR/IN case in js_EmitTree, and
2831 * EmitDestructuringOps nearer below. In the destructuring case,
2832 * the base expression (pn_expr) of the name may be null, which
2833 * means we have to emit a JSOP_BINDNAME.
2834 */
2835 left = pn->maybeExpr();
2836 if (!left) {
2837 left = &ltmp;
2838 left->pn_type = TOK_STRING;
2839 left->pn_op = JSOP_BINDNAME;
2840 left->pn_arity = PN_NULLARY;
2841 left->pn_pos = pn->pn_pos;
2842 left->pn_atom = pn->pn_atom;
2843 }
2844 right = &rtmp;
2845 right->pn_type = TOK_STRING;
2846 JS_ASSERT(ATOM_IS_STRING(pn->pn_atom));
2847 right->pn_op = js_IsIdentifier(ATOM_TO_STRING(pn->pn_atom))
2848 ? JSOP_QNAMEPART
2849 : JSOP_STRING;
2850 right->pn_arity = PN_NULLARY;
2851 right->pn_pos = pn->pn_pos;
2852 right->pn_atom = pn->pn_atom;
2853 } else {
2854 JS_ASSERT(pn->pn_arity == PN_BINARY);
2855 left = pn->pn_left;
2856 right = pn->pn_right;
2857 }
2858
2859 /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2860 if (op == JSOP_GETELEM &&
2861 left->pn_type == TOK_NAME &&
2862 right->pn_type == TOK_NUMBER) {
2863 if (!BindNameToSlot(cx, cg, left))
2864 return JS_FALSE;
2865 if (left->pn_op == JSOP_ARGUMENTS &&
2866 JSDOUBLE_IS_INT(right->pn_dval, slot) &&
2867 (jsuint)slot < JS_BIT(16)) {
2868 left->pn_offset = right->pn_offset = top;
2869 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2870 return JS_TRUE;
2871 }
2872 }
2873
2874 if (!js_EmitTree(cx, cg, left))
2875 return JS_FALSE;
2876 }
2877
2878 /* The right side of the descendant operator is implicitly quoted. */
2879 JS_ASSERT(op != JSOP_DESCENDANTS || right->pn_type != TOK_STRING ||
2880 right->pn_op == JSOP_QNAMEPART);
2881 if (!js_EmitTree(cx, cg, right))
2882 return JS_FALSE;
2883 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2884 return JS_FALSE;
2885 return js_Emit1(cx, cg, op) >= 0;
2886 }
2887
2888 static JSBool
2889 EmitNumberOp(JSContext *cx, jsdouble dval, JSCodeGenerator *cg)
2890 {
2891 jsint ival;
2892 uint32 u;
2893 ptrdiff_t off;
2894 jsbytecode *pc;
2895 JSAtom *atom;
2896 JSAtomListElement *ale;
2897
2898 if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
2899 if (ival == 0)
2900 return js_Emit1(cx, cg, JSOP_ZERO) >= 0;
2901 if (ival == 1)
2902 return js_Emit1(cx, cg, JSOP_ONE) >= 0;
2903 if ((jsint)(int8)ival == ival)
2904 return js_Emit2(cx, cg, JSOP_INT8, (jsbytecode)(int8)ival) >= 0;
2905
2906 u = (uint32)ival;
2907 if (u < JS_BIT(16)) {
2908 EMIT_UINT16_IMM_OP(JSOP_UINT16, u);
2909 } else if (u < JS_BIT(24)) {
2910 off = js_EmitN(cx, cg, JSOP_UINT24, 3);
2911 if (off < 0)
2912 return JS_FALSE;
2913 pc = CG_CODE(cg, off);
2914 SET_UINT24(pc, u);
2915 } else {
2916 off = js_EmitN(cx, cg, JSOP_INT32, 4);
2917 if (off < 0)
2918 return JS_FALSE;
2919 pc = CG_CODE(cg, off);
2920 SET_INT32(pc, ival);
2921 }
2922 return JS_TRUE;
2923 }
2924
2925 atom = js_AtomizeDouble(cx, dval);
2926 if (!atom)
2927 return JS_FALSE;
2928
2929 ale = cg->atomList.add(cg->compiler, atom);
2930 if (!ale)
2931 return JS_FALSE;
2932 return EmitIndexOp(cx, JSOP_DOUBLE, ALE_INDEX(ale), cg);
2933 }
2934
2935 static JSBool
2936 EmitSwitch(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2937 JSStmtInfo *stmtInfo)
2938 {
2939 JSOp switchOp;
2940 JSBool ok, hasDefault, constPropagated;
2941 ptrdiff_t top, off, defaultOffset;
2942 JSParseNode *pn2, *pn3, *pn4;
2943 uint32 caseCount, tableLength;
2944 JSParseNode **table;
2945 jsdouble d;
2946 jsint i, low, high;
2947 jsval v;
2948 JSAtom *atom;
2949 JSAtomListElement *ale;
2950 intN noteIndex;
2951 size_t switchSize, tableSize;
2952 jsbytecode *pc, *savepc;
2953 #if JS_HAS_BLOCK_SCOPE
2954 jsint count;
2955 #endif
2956
2957 /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
2958 switchOp = JSOP_TABLESWITCH;
2959 ok = JS_TRUE;
2960 hasDefault = constPropagated = JS_FALSE;
2961 defaultOffset = -1;
2962
2963 /*
2964 * If the switch contains let variables scoped by its body, model the
2965 * resulting block on the stack first, before emitting the discriminant's
2966 * bytecode (in case the discriminant contains a stack-model dependency
2967 * such as a let expression).
2968 */
2969 pn2 = pn->pn_right;
2970 #if JS_HAS_BLOCK_SCOPE
2971 if (pn2->pn_type == TOK_LEXICALSCOPE) {
2972 /*
2973 * Push the body's block scope before discriminant code-gen for proper
2974 * static block scope linkage in case the discriminant contains a let
2975 * expression. The block's locals must lie under the discriminant on
2976 * the stack so that case-dispatch bytecodes can find the discriminant
2977 * on top of stack.
2978 */
2979 count = OBJ_BLOCK_COUNT(cx, pn2->pn_objbox->object);
2980 js_PushBlockScope(cg, stmtInfo, pn2->pn_objbox->object, -1);
2981 stmtInfo->type = STMT_SWITCH;
2982
2983 /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
2984 if (!EmitEnterBlock(cx, pn2, cg))
2985 return JS_FALSE;
2986
2987 /*
2988 * Pop the switch's statement info around discriminant code-gen. Note
2989 * how this leaves cg->blockChain referencing the switch's
2990 * block scope object, which is necessary for correct block parenting
2991 * in the case where the discriminant contains a let expression.
2992 */
2993 cg->topStmt = stmtInfo->down;
2994 cg->topScopeStmt = stmtInfo->downScope;
2995 }
2996 #ifdef __GNUC__
2997 else {
2998 count = 0;
2999 }
3000 #endif
3001 #endif
3002
3003 /*
3004 * Emit code for the discriminant first (or nearly first, in the case of a
3005 * switch whose body is a block scope).
3006 */
3007 if (!js_EmitTree(cx, cg, pn->pn_left))
3008 return JS_FALSE;
3009
3010 /* Switch bytecodes run from here till end of final case. */
3011 top = CG_OFFSET(cg);
3012 #if !JS_HAS_BLOCK_SCOPE
3013 js_PushStatement(cg, stmtInfo, STMT_SWITCH, top);
3014 #else
3015 if (pn2->pn_type == TOK_LC) {
3016 js_PushStatement(cg, stmtInfo, STMT_SWITCH, top);
3017 } else {
3018 /* Re-push the switch's statement info record. */
3019 cg->topStmt = cg->topScopeStmt = stmtInfo;
3020
3021 /* Set the statement info record's idea of top. */
3022 stmtInfo->update = top;
3023
3024 /* Advance pn2 to refer to the switch case list. */
3025 pn2 = pn2->expr();
3026 }
3027 #endif
3028
3029 caseCount = pn2->pn_count;
3030 tableLength = 0;
3031 table = NULL;
3032
3033 if (caseCount == 0 ||
3034 (caseCount == 1 &&
3035 (hasDefault = (pn2->pn_head->pn_type == TOK_DEFAULT)))) {
3036 caseCount = 0;
3037 low = 0;
3038 high = -1;
3039 } else {
3040 #define INTMAP_LENGTH 256
3041 jsbitmap intmap_space[INTMAP_LENGTH];
3042 jsbitmap *intmap = NULL;
3043 int32 intmap_bitlen = 0;
3044
3045 low = JSVAL_INT_MAX;
3046 high = JSVAL_INT_MIN;
3047
3048 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3049 if (pn3->pn_type == TOK_DEFAULT) {
3050 hasDefault = JS_TRUE;
3051 caseCount--; /* one of the "cases" was the default */
3052 continue;
3053 }
3054
3055 JS_ASSERT(pn3->pn_type == TOK_CASE);
3056 if (switchOp == JSOP_CONDSWITCH)
3057 continue;
3058
3059 pn4 = pn3->pn_left;
3060 while (pn4->pn_type == TOK_RP)
3061 pn4 = pn4->pn_kid;
3062 switch (pn4->pn_type) {
3063 case TOK_NUMBER:
3064 d = pn4->pn_dval;
3065 if (JSDOUBLE_IS_INT(d, i) && INT_FITS_IN_JSVAL(i)) {
3066 pn3->pn_val = INT_TO_JSVAL(i);
3067 } else {
3068 atom = js_AtomizeDouble(cx, d);
3069 if (!atom) {
3070 ok = JS_FALSE;
3071 goto release;
3072 }
3073 pn3->pn_val = ATOM_KEY(atom);
3074 }
3075 break;
3076 case TOK_STRING:
3077 pn3->pn_val = ATOM_KEY(pn4->pn_atom);
3078 break;
3079 case TOK_NAME:
3080 if (!pn4->maybeExpr()) {
3081 ok = LookupCompileTimeConstant(cx, cg, pn4->pn_atom, &v);
3082 if (!ok)
3083 goto release;
3084 if (v != JSVAL_HOLE) {
3085 if (!JSVAL_IS_PRIMITIVE(v)) {
3086 /*
3087 * XXX JSOP_LOOKUPSWITCH does not support const-
3088 * propagated object values, see bug 407186.
3089 */
3090 switchOp = JSOP_CONDSWITCH;
3091 continue;
3092 }
3093 pn3->pn_val = v;
3094 constPropagated = JS_TRUE;
3095 break;
3096 }
3097 }
3098 /* FALL THROUGH */
3099 case TOK_PRIMARY:
3100 if (pn4->pn_op == JSOP_TRUE) {
3101 pn3->pn_val = JSVAL_TRUE;
3102 break;
3103 }
3104 if (pn4->pn_op == JSOP_FALSE) {
3105 pn3->pn_val = JSVAL_FALSE;
3106 break;
3107 }
3108 /* FALL THROUGH */
3109 default:
3110 switchOp = JSOP_CONDSWITCH;
3111 continue;
3112 }
3113
3114 JS_ASSERT(JSVAL_IS_PRIMITIVE(pn3->pn_val));
3115
3116 if (switchOp != JSOP_TABLESWITCH)
3117 continue;
3118 if (!JSVAL_IS_INT(pn3->pn_val)) {
3119 switchOp = JSOP_LOOKUPSWITCH;
3120 continue;
3121 }
3122 i = JSVAL_TO_INT(pn3->pn_val);
3123 if ((jsuint)(i + (jsint)JS_BIT(15)) >= (jsuint)JS_BIT(16)) {
3124 switchOp = JSOP_LOOKUPSWITCH;
3125 continue;
3126 }
3127 if (i < low)
3128 low = i;
3129 if (high < i)
3130 high = i;
3131
3132 /*
3133 * Check for duplicates, which require a JSOP_LOOKUPSWITCH.
3134 * We bias i by 65536 if it's negative, and hope that's a rare
3135 * case (because it requires a malloc'd bitmap).
3136 */
3137 if (i < 0)
3138 i += JS_BIT(16);
3139 if (i >= intmap_bitlen) {
3140 if (!intmap &&
3141 i < (INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2)) {
3142 intmap = intmap_space;
3143 intmap_bitlen = INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2;
3144 } else {
3145 /* Just grab 8K for the worst-case bitmap. */
3146 intmap_bitlen = JS_BIT(16);
3147 intmap = (jsbitmap *)
3148 JS_malloc(cx,
3149 (JS_BIT(16) >> JS_BITS_PER_WORD_LOG2)
3150 * sizeof(jsbitmap));
3151 if (!intmap) {
3152 JS_ReportOutOfMemory(cx);
3153 return JS_FALSE;
3154 }
3155 }
3156 memset(intmap, 0, intmap_bitlen >> JS_BITS_PER_BYTE_LOG2);
3157 }
3158 if (JS_TEST_BIT(intmap, i)) {
3159 switchOp = JSOP_LOOKUPSWITCH;
3160 continue;
3161 }
3162 JS_SET_BIT(intmap, i);
3163 }
3164
3165 release:
3166 if (intmap && intmap != intmap_space)
3167 JS_free(cx, intmap);
3168 if (!ok)
3169 return JS_FALSE;
3170
3171 /*
3172 * Compute table length and select lookup instead if overlarge or
3173 * more than half-sparse.
3174 */
3175 if (switchOp == JSOP_TABLESWITCH) {
3176 tableLength = (uint32)(high - low + 1);
3177 if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount)
3178 switchOp = JSOP_LOOKUPSWITCH;
3179 } else if (switchOp == JSOP_LOOKUPSWITCH) {
3180 /*
3181 * Lookup switch supports only atom indexes below 64K limit.
3182 * Conservatively estimate the maximum possible index during
3183 * switch generation and use conditional switch if it exceeds
3184 * the limit.
3185 */
3186 if (caseCount + cg->atomList.count > JS_BIT(16))
3187 switchOp = JSOP_CONDSWITCH;
3188 }
3189 }
3190
3191 /*
3192 * Emit a note with two offsets: first tells total switch code length,
3193 * second tells offset to first JSOP_CASE if condswitch.
3194 */
3195 noteIndex = js_NewSrcNote3(cx, cg, SRC_SWITCH, 0, 0);
3196 if (noteIndex < 0)
3197 return JS_FALSE;
3198
3199 if (switchOp == JSOP_CONDSWITCH) {
3200 /*
3201 * 0 bytes of immediate for unoptimized ECMAv2 switch.
3202 */
3203 switchSize = 0;
3204 } else if (switchOp == JSOP_TABLESWITCH) {
3205 /*
3206 * 3 offsets (len, low, high) before the table, 1 per entry.
3207 */
3208 switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength));
3209 } else {
3210 /*
3211 * JSOP_LOOKUPSWITCH:
3212 * 1 offset (len) and 1 atom index (npairs) before the table,
3213 * 1 atom index and 1 jump offset per entry.
3214 */
3215 switchSize = (size_t)(JUMP_OFFSET_LEN + INDEX_LEN +
3216 (INDEX_LEN + JUMP_OFFSET_LEN) * caseCount);
3217 }
3218
3219 /*
3220 * Emit switchOp followed by switchSize bytes of jump or lookup table.
3221 *
3222 * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
3223 * to emit the immediate operand(s) by which bytecode readers such as
3224 * BuildSpanDepTable discover the length of the switch opcode *before*
3225 * calling js_SetJumpOffset (which may call BuildSpanDepTable). It's
3226 * also important to zero all unknown jump offset immediate operands,
3227 * so they can be converted to span dependencies with null targets to
3228 * be computed later (js_EmitN zeros switchSize bytes after switchOp).
3229 */
3230 if (js_EmitN(cx, cg, switchOp, switchSize) < 0)
3231 return JS_FALSE;
3232
3233 off = -1;
3234 if (switchOp == JSOP_CONDSWITCH) {
3235 intN caseNoteIndex = -1;
3236 JSBool beforeCases = JS_TRUE;
3237
3238 /* Emit code for evaluating cases and jumping to case statements. */
3239 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3240 pn4 = pn3->pn_left;
3241 if (pn4 && !js_EmitTree(cx, cg, pn4))
3242 return JS_FALSE;
3243 if (caseNoteIndex >= 0) {
3244 /* off is the previous JSOP_CASE's bytecode offset. */
3245 if (!js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
3246 CG_OFFSET(cg) - off)) {
3247 return JS_FALSE;
3248 }
3249 }
3250 if (!pn4) {
3251 JS_ASSERT(pn3->pn_type == TOK_DEFAULT);
3252 continue;
3253 }
3254 caseNoteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
3255 if (caseNoteIndex < 0)
3256 return JS_FALSE;
3257 off = EmitJump(cx, cg, JSOP_CASE, 0);
3258 if (off < 0)
3259 return JS_FALSE;
3260 pn3->pn_offset = off;
3261 if (beforeCases) {
3262 uintN noteCount, noteCountDelta;
3263
3264 /* Switch note's second offset is to first JSOP_CASE. */
3265 noteCount = CG_NOTE_COUNT(cg);
3266 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
3267 off - top)) {
3268 return JS_FALSE;
3269 }
3270 noteCountDelta = CG_NOTE_COUNT(cg) - noteCount;
3271 if (noteCountDelta != 0)
3272 caseNoteIndex += noteCountDelta;
3273 beforeCases = JS_FALSE;
3274 }
3275 }
3276
3277 /*
3278 * If we didn't have an explicit default (which could fall in between
3279 * cases, preventing us from fusing this js_SetSrcNoteOffset with the
3280 * call in the loop above), link the last case to the implicit default
3281 * for the decompiler.
3282 */
3283 if (!hasDefault &&
3284 caseNoteIndex >= 0 &&
3285 !js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
3286 CG_OFFSET(cg) - off)) {
3287 return JS_FALSE;
3288 }
3289
3290 /* Emit default even if no explicit default statement. */
3291 defaultOffset = EmitJump(cx, cg, JSOP_DEFAULT, 0);
3292 if (defaultOffset < 0)
3293 return JS_FALSE;
3294 } else {
3295 pc = CG_CODE(cg, top + JUMP_OFFSET_LEN);
3296
3297 if (switchOp == JSOP_TABLESWITCH) {
3298 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
3299 SET_JUMP_OFFSET(pc, low);
3300 pc += JUMP_OFFSET_LEN;
3301 SET_JUMP_OFFSET(pc, high);
3302