Parent Directory
|
Revision Log
Use SpiderMonkey from Firefox 3.1b2.
1 | /* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
2 | * vim: set ts=8 sw=4 et tw=99: |
3 | * |
4 | * ***** BEGIN LICENSE BLOCK ***** |
5 | * Version: MPL 1.1/GPL 2.0/LGPL 2.1 |
6 | * |
7 | * The contents of this file are subject to the Mozilla Public License Version |
8 | * 1.1 (the "License"); you may not use this file except in compliance with |
9 | * the License. You may obtain a copy of the License at |
10 | * http://www.mozilla.org/MPL/ |
11 | * |
12 | * Software distributed under the License is distributed on an "AS IS" basis, |
13 | * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License |
14 | * for the specific language governing rights and limitations under the |
15 | * License. |
16 | * |
17 | * The Original Code is Mozilla Communicator client code, released |
18 | * March 31, 1998. |
19 | * |
20 | * The Initial Developer of the Original Code is |
21 | * Netscape Communications Corporation. |
22 | * Portions created by the Initial Developer are Copyright (C) 1998 |
23 | * the Initial Developer. All Rights Reserved. |
24 | * |
25 | * Contributor(s): |
26 | * |
27 | * Alternatively, the contents of this file may be used under the terms of |
28 | * either of the GNU General Public License Version 2 or later (the "GPL"), |
29 | * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), |
30 | * in which case the provisions of the GPL or the LGPL are applicable instead |
31 | * of those above. If you wish to allow use of your version of this file only |
32 | * under the terms of either the GPL or the LGPL, and not to allow others to |
33 | * use your version of this file under the terms of the MPL, indicate your |
34 | * decision by deleting the provisions above and replace them with the notice |
35 | * and other provisions required by the GPL or the LGPL. If you do not delete |
36 | * the provisions above, a recipient may use your version of this file under |
37 | * the terms of any one of the MPL, the GPL or the LGPL. |
38 | * |
39 | * ***** END LICENSE BLOCK ***** */ |
40 | |
41 | /* |
42 | * JS bytecode generation. |
43 | */ |
44 | #include "jsstddef.h" |
45 | #ifdef HAVE_MEMORY_H |
46 | #include <memory.h> |
47 | #endif |
48 | #include <string.h> |
49 | #include "jstypes.h" |
50 | #include "jsarena.h" /* Added by JSIFY */ |
51 | #include "jsutil.h" /* Added by JSIFY */ |
52 | #include "jsbit.h" |
53 | #include "jsprf.h" |
54 | #include "jsapi.h" |
55 | #include "jsatom.h" |
56 | #include "jsbool.h" |
57 | #include "jscntxt.h" |
58 | #include "jsversion.h" |
59 | #include "jsemit.h" |
60 | #include "jsfun.h" |
61 | #include "jsnum.h" |
62 | #include "jsopcode.h" |
63 | #include "jsparse.h" |
64 | #include "jsregexp.h" |
65 | #include "jsscan.h" |
66 | #include "jsscope.h" |
67 | #include "jsscript.h" |
68 | #include "jsautooplen.h" |
69 | #include "jsstaticcheck.h" |
70 | |
71 | /* Allocation chunk counts, must be powers of two in general. */ |
72 | #define BYTECODE_CHUNK 256 /* code allocation increment */ |
73 | #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */ |
74 | #define TRYNOTE_CHUNK 64 /* trynote allocation increment */ |
75 | |
76 | /* Macros to compute byte sizes from typed element counts. */ |
77 | #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode)) |
78 | #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote)) |
79 | #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote)) |
80 | |
81 | static JSBool |
82 | NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind, |
83 | uintN stackDepth, size_t start, size_t end); |
84 | |
85 | JS_FRIEND_API(void) |
86 | js_InitCodeGenerator(JSContext *cx, JSCodeGenerator *cg, JSParseContext *pc, |
87 | JSArenaPool *codePool, JSArenaPool *notePool, |
88 | uintN lineno) |
89 | { |
90 | memset(cg, 0, sizeof *cg); |
91 | TREE_CONTEXT_INIT(&cg->treeContext, pc); |
92 | cg->codePool = codePool; |
93 | cg->notePool = notePool; |
94 | cg->codeMark = JS_ARENA_MARK(codePool); |
95 | cg->noteMark = JS_ARENA_MARK(notePool); |
96 | cg->current = &cg->main; |
97 | cg->firstLine = cg->prolog.currentLine = cg->main.currentLine = lineno; |
98 | ATOM_LIST_INIT(&cg->atomList); |
99 | cg->prolog.noteMask = cg->main.noteMask = SRCNOTE_CHUNK - 1; |
100 | ATOM_LIST_INIT(&cg->constList); |
101 | ATOM_LIST_INIT(&cg->upvarList); |
102 | } |
103 | |
104 | JS_FRIEND_API(void) |
105 | js_FinishCodeGenerator(JSContext *cx, JSCodeGenerator *cg) |
106 | { |
107 | TREE_CONTEXT_FINISH(cx, &cg->treeContext); |
108 | JS_ARENA_RELEASE(cg->codePool, cg->codeMark); |
109 | JS_ARENA_RELEASE(cg->notePool, cg->noteMark); |
110 | |
111 | /* NB: non-null only after OOM. */ |
112 | if (cg->spanDeps) |
113 | JS_free(cx, cg->spanDeps); |
114 | |
115 | if (cg->upvarMap.vector) |
116 | JS_free(cx, cg->upvarMap.vector); |
117 | } |
118 | |
119 | static ptrdiff_t |
120 | EmitCheck(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t delta) |
121 | { |
122 | jsbytecode *base, *limit, *next; |
123 | ptrdiff_t offset, length; |
124 | size_t incr, size; |
125 | |
126 | base = CG_BASE(cg); |
127 | next = CG_NEXT(cg); |
128 | limit = CG_LIMIT(cg); |
129 | offset = PTRDIFF(next, base, jsbytecode); |
130 | if (next + delta > limit) { |
131 | length = offset + delta; |
132 | length = (length <= BYTECODE_CHUNK) |
133 | ? BYTECODE_CHUNK |
134 | : JS_BIT(JS_CeilingLog2(length)); |
135 | incr = BYTECODE_SIZE(length); |
136 | if (!base) { |
137 | JS_ARENA_ALLOCATE_CAST(base, jsbytecode *, cg->codePool, incr); |
138 | } else { |
139 | size = BYTECODE_SIZE(PTRDIFF(limit, base, jsbytecode)); |
140 | incr -= size; |
141 | JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr); |
142 | } |
143 | if (!base) { |
144 | js_ReportOutOfScriptQuota(cx); |
145 | return -1; |
146 | } |
147 | CG_BASE(cg) = base; |
148 | CG_LIMIT(cg) = base + length; |
149 | CG_NEXT(cg) = base + offset; |
150 | } |
151 | return offset; |
152 | } |
153 | |
154 | static void |
155 | UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target) |
156 | { |
157 | jsbytecode *pc; |
158 | JSOp op; |
159 | const JSCodeSpec *cs; |
160 | uintN depth; |
161 | intN nuses, ndefs; |
162 | |
163 | pc = CG_CODE(cg, target); |
164 | op = (JSOp) *pc; |
165 | cs = &js_CodeSpec[op]; |
166 | if (cs->format & JOF_TMPSLOT_MASK) { |
167 | depth = (uintN) cg->stackDepth + |
168 | ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT); |
169 | if (depth > cg->maxStackDepth) |
170 | cg->maxStackDepth = depth; |
171 | } |
172 | nuses = cs->nuses; |
173 | if (nuses < 0) |
174 | nuses = js_GetVariableStackUseLength(op, pc); |
175 | cg->stackDepth -= nuses; |
176 | JS_ASSERT(cg->stackDepth >= 0); |
177 | if (cg->stackDepth < 0) { |
178 | char numBuf[12]; |
179 | JSTokenStream *ts; |
180 | |
181 | JS_snprintf(numBuf, sizeof numBuf, "%d", target); |
182 | ts = &cg->treeContext.parseContext->tokenStream; |
183 | JS_ReportErrorFlagsAndNumber(cx, JSREPORT_WARNING, |
184 | js_GetErrorMessage, NULL, |
185 | JSMSG_STACK_UNDERFLOW, |
186 | ts->filename ? ts->filename : "stdin", |
187 | numBuf); |
188 | } |
189 | ndefs = cs->ndefs; |
190 | if (ndefs < 0) { |
191 | JSObject *blockObj; |
192 | |
193 | /* We just executed IndexParsedObject */ |
194 | JS_ASSERT(op == JSOP_ENTERBLOCK); |
195 | JS_ASSERT(nuses == 0); |
196 | blockObj = cg->objectList.lastPob->object; |
197 | JS_ASSERT(STOBJ_GET_CLASS(blockObj) == &js_BlockClass); |
198 | JS_ASSERT(JSVAL_IS_VOID(blockObj->fslots[JSSLOT_BLOCK_DEPTH])); |
199 | |
200 | OBJ_SET_BLOCK_DEPTH(cx, blockObj, cg->stackDepth); |
201 | ndefs = OBJ_BLOCK_COUNT(cx, blockObj); |
202 | } |
203 | cg->stackDepth += ndefs; |
204 | if ((uintN)cg->stackDepth > cg->maxStackDepth) |
205 | cg->maxStackDepth = cg->stackDepth; |
206 | } |
207 | |
208 | ptrdiff_t |
209 | js_Emit1(JSContext *cx, JSCodeGenerator *cg, JSOp op) |
210 | { |
211 | ptrdiff_t offset = EmitCheck(cx, cg, op, 1); |
212 | |
213 | if (offset >= 0) { |
214 | *CG_NEXT(cg)++ = (jsbytecode)op; |
215 | UpdateDepth(cx, cg, offset); |
216 | } |
217 | return offset; |
218 | } |
219 | |
220 | ptrdiff_t |
221 | js_Emit2(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1) |
222 | { |
223 | ptrdiff_t offset = EmitCheck(cx, cg, op, 2); |
224 | |
225 | if (offset >= 0) { |
226 | jsbytecode *next = CG_NEXT(cg); |
227 | next[0] = (jsbytecode)op; |
228 | next[1] = op1; |
229 | CG_NEXT(cg) = next + 2; |
230 | UpdateDepth(cx, cg, offset); |
231 | } |
232 | return offset; |
233 | } |
234 | |
235 | ptrdiff_t |
236 | js_Emit3(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1, |
237 | jsbytecode op2) |
238 | { |
239 | ptrdiff_t offset = EmitCheck(cx, cg, op, 3); |
240 | |
241 | if (offset >= 0) { |
242 | jsbytecode *next = CG_NEXT(cg); |
243 | next[0] = (jsbytecode)op; |
244 | next[1] = op1; |
245 | next[2] = op2; |
246 | CG_NEXT(cg) = next + 3; |
247 | UpdateDepth(cx, cg, offset); |
248 | } |
249 | return offset; |
250 | } |
251 | |
252 | ptrdiff_t |
253 | js_EmitN(JSContext *cx, JSCodeGenerator *cg, JSOp op, size_t extra) |
254 | { |
255 | ptrdiff_t length = 1 + (ptrdiff_t)extra; |
256 | ptrdiff_t offset = EmitCheck(cx, cg, op, length); |
257 | |
258 | if (offset >= 0) { |
259 | jsbytecode *next = CG_NEXT(cg); |
260 | *next = (jsbytecode)op; |
261 | memset(next + 1, 0, BYTECODE_SIZE(extra)); |
262 | CG_NEXT(cg) = next + length; |
263 | |
264 | /* |
265 | * Don't UpdateDepth if op's use-count comes from the immediate |
266 | * operand yet to be stored in the extra bytes after op. |
267 | */ |
268 | if (js_CodeSpec[op].nuses >= 0) |
269 | UpdateDepth(cx, cg, offset); |
270 | } |
271 | return offset; |
272 | } |
273 | |
274 | /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */ |
275 | const char js_with_statement_str[] = "with statement"; |
276 | const char js_finally_block_str[] = "finally block"; |
277 | const char js_script_str[] = "script"; |
278 | |
279 | static const char *statementName[] = { |
280 | "label statement", /* LABEL */ |
281 | "if statement", /* IF */ |
282 | "else statement", /* ELSE */ |
283 | "destructuring body", /* BODY */ |
284 | "switch statement", /* SWITCH */ |
285 | "block", /* BLOCK */ |
286 | js_with_statement_str, /* WITH */ |
287 | "catch block", /* CATCH */ |
288 | "try block", /* TRY */ |
289 | js_finally_block_str, /* FINALLY */ |
290 | js_finally_block_str, /* SUBROUTINE */ |
291 | "do loop", /* DO_LOOP */ |
292 | "for loop", /* FOR_LOOP */ |
293 | "for/in loop", /* FOR_IN_LOOP */ |
294 | "while loop", /* WHILE_LOOP */ |
295 | }; |
296 | |
297 | JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT); |
298 | |
299 | static const char * |
300 | StatementName(JSCodeGenerator *cg) |
301 | { |
302 | if (!cg->treeContext.topStmt) |
303 | return js_script_str; |
304 | return statementName[cg->treeContext.topStmt->type]; |
305 | } |
306 | |
307 | static void |
308 | ReportStatementTooLarge(JSContext *cx, JSCodeGenerator *cg) |
309 | { |
310 | JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET, |
311 | StatementName(cg)); |
312 | } |
313 | |
314 | /** |
315 | Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP) |
316 | and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided |
317 | into unconditional (gotos and gosubs), and conditional jumps or branches |
318 | (which pop a value, test it, and jump depending on its value). Most jumps |
319 | have just one immediate operand, a signed offset from the jump opcode's pc |
320 | to the target bytecode. The lookup and table switch opcodes may contain |
321 | many jump offsets. |
322 | |
323 | Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was |
324 | fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is |
325 | suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for |
326 | the extended form of the JSOP_OR branch opcode). The unextended or short |
327 | formats have 16-bit signed immediate offset operands, the extended or long |
328 | formats have 32-bit signed immediates. The span-dependency problem consists |
329 | of selecting as few long instructions as possible, or about as few -- since |
330 | jumps can span other jumps, extending one jump may cause another to need to |
331 | be extended. |
332 | |
333 | Most JS scripts are short, so need no extended jumps. We optimize for this |
334 | case by generating short jumps until we know a long jump is needed. After |
335 | that point, we keep generating short jumps, but each jump's 16-bit immediate |
336 | offset operand is actually an unsigned index into cg->spanDeps, an array of |
337 | JSSpanDep structs. Each struct tells the top offset in the script of the |
338 | opcode, the "before" offset of the jump (which will be the same as top for |
339 | simplex jumps, but which will index further into the bytecode array for a |
340 | non-initial jump offset in a lookup or table switch), the after "offset" |
341 | adjusted during span-dependent instruction selection (initially the same |
342 | value as the "before" offset), and the jump target (more below). |
343 | |
344 | Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must |
345 | ensure that all bytecode generated so far can be inspected to discover where |
346 | the jump offset immediate operands lie within CG_CODE(cg). But the bonus is |
347 | that we generate span-dependency records sorted by their offsets, so we can |
348 | binary-search when trying to find a JSSpanDep for a given bytecode offset, |
349 | or the nearest JSSpanDep at or above a given pc. |
350 | |
351 | To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows |
352 | 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This |
353 | tells us that we need to binary-search for the cg->spanDeps entry by the |
354 | jump opcode's bytecode offset (sd->before). |
355 | |
356 | Jump targets need to be maintained in a data structure that lets us look |
357 | up an already-known target by its address (jumps may have a common target), |
358 | and that also lets us update the addresses (script-relative, a.k.a. absolute |
359 | offsets) of targets that come after a jump target (for when a jump below |
360 | that target needs to be extended). We use an AVL tree, implemented using |
361 | recursion, but with some tricky optimizations to its height-balancing code |
362 | (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html). |
363 | |
364 | A final wrinkle: backpatch chains are linked by jump-to-jump offsets with |
365 | positive sign, even though they link "backward" (i.e., toward lower bytecode |
366 | address). We don't want to waste space and search time in the AVL tree for |
367 | such temporary backpatch deltas, so we use a single-bit wildcard scheme to |
368 | tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas |
369 | in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known |
370 | target, or is still awaiting backpatching. |
371 | |
372 | Note that backpatch chains would present a problem for BuildSpanDepTable, |
373 | which inspects bytecode to build cg->spanDeps on demand, when the first |
374 | short jump offset overflows. To solve this temporary problem, we emit a |
375 | proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose |
376 | nuses/ndefs counts help keep the stack balanced, but whose opcode format |
377 | distinguishes its backpatch delta immediate operand from a normal jump |
378 | offset. |
379 | */ |
380 | static int |
381 | BalanceJumpTargets(JSJumpTarget **jtp) |
382 | { |
383 | JSJumpTarget *jt, *jt2, *root; |
384 | int dir, otherDir, heightChanged; |
385 | JSBool doubleRotate; |
386 | |
387 | jt = *jtp; |
388 | JS_ASSERT(jt->balance != 0); |
389 | |
390 | if (jt->balance < -1) { |
391 | dir = JT_RIGHT; |
392 | doubleRotate = (jt->kids[JT_LEFT]->balance > 0); |
393 | } else if (jt->balance > 1) { |
394 | dir = JT_LEFT; |
395 | doubleRotate = (jt->kids[JT_RIGHT]->balance < 0); |
396 | } else { |
397 | return 0; |
398 | } |
399 | |
400 | otherDir = JT_OTHER_DIR(dir); |
401 | if (doubleRotate) { |
402 | jt2 = jt->kids[otherDir]; |
403 | *jtp = root = jt2->kids[dir]; |
404 | |
405 | jt->kids[otherDir] = root->kids[dir]; |
406 | root->kids[dir] = jt; |
407 | |
408 | jt2->kids[dir] = root->kids[otherDir]; |
409 | root->kids[otherDir] = jt2; |
410 | |
411 | heightChanged = 1; |
412 | root->kids[JT_LEFT]->balance = -JS_MAX(root->balance, 0); |
413 | root->kids[JT_RIGHT]->balance = -JS_MIN(root->balance, 0); |
414 | root->balance = 0; |
415 | } else { |
416 | *jtp = root = jt->kids[otherDir]; |
417 | jt->kids[otherDir] = root->kids[dir]; |
418 | root->kids[dir] = jt; |
419 | |
420 | heightChanged = (root->balance != 0); |
421 | jt->balance = -((dir == JT_LEFT) ? --root->balance : ++root->balance); |
422 | } |
423 | |
424 | return heightChanged; |
425 | } |
426 | |
427 | typedef struct AddJumpTargetArgs { |
428 | JSContext *cx; |
429 | JSCodeGenerator *cg; |
430 | ptrdiff_t offset; |
431 | JSJumpTarget *node; |
432 | } AddJumpTargetArgs; |
433 | |
434 | static int |
435 | AddJumpTarget(AddJumpTargetArgs *args, JSJumpTarget **jtp) |
436 | { |
437 | JSJumpTarget *jt; |
438 | int balanceDelta; |
439 | |
440 | jt = *jtp; |
441 | if (!jt) { |
442 | JSCodeGenerator *cg = args->cg; |
443 | |
444 | jt = cg->jtFreeList; |
445 | if (jt) { |
446 | cg->jtFreeList = jt->kids[JT_LEFT]; |
447 | } else { |
448 | JS_ARENA_ALLOCATE_CAST(jt, JSJumpTarget *, &args->cx->tempPool, |
449 | sizeof *jt); |
450 | if (!jt) { |
451 | js_ReportOutOfScriptQuota(args->cx); |
452 | return 0; |
453 | } |
454 | } |
455 | jt->offset = args->offset; |
456 | jt->balance = 0; |
457 | jt->kids[JT_LEFT] = jt->kids[JT_RIGHT] = NULL; |
458 | cg->numJumpTargets++; |
459 | args->node = jt; |
460 | *jtp = jt; |
461 | return 1; |
462 | } |
463 | |
464 | if (jt->offset == args->offset) { |
465 | args->node = jt; |
466 | return 0; |
467 | } |
468 | |
469 | if (args->offset < jt->offset) |
470 | balanceDelta = -AddJumpTarget(args, &jt->kids[JT_LEFT]); |
471 | else |
472 | balanceDelta = AddJumpTarget(args, &jt->kids[JT_RIGHT]); |
473 | if (!args->node) |
474 | return 0; |
475 | |
476 | jt->balance += balanceDelta; |
477 | return (balanceDelta && jt->balance) |
478 | ? 1 - BalanceJumpTargets(jtp) |
479 | : 0; |
480 | } |
481 | |
482 | #ifdef DEBUG_brendan |
483 | static int AVLCheck(JSJumpTarget *jt) |
484 | { |
485 | int lh, rh; |
486 | |
487 | if (!jt) return 0; |
488 | JS_ASSERT(-1 <= jt->balance && jt->balance <= 1); |
489 | lh = AVLCheck(jt->kids[JT_LEFT]); |
490 | rh = AVLCheck(jt->kids[JT_RIGHT]); |
491 | JS_ASSERT(jt->balance == rh - lh); |
492 | return 1 + JS_MAX(lh, rh); |
493 | } |
494 | #endif |
495 | |
496 | static JSBool |
497 | SetSpanDepTarget(JSContext *cx, JSCodeGenerator *cg, JSSpanDep *sd, |
498 | ptrdiff_t off) |
499 | { |
500 | AddJumpTargetArgs args; |
501 | |
502 | if (off < JUMPX_OFFSET_MIN || JUMPX_OFFSET_MAX < off) { |
503 | ReportStatementTooLarge(cx, cg); |
504 | return JS_FALSE; |
505 | } |
506 | |
507 | args.cx = cx; |
508 | args.cg = cg; |
509 | args.offset = sd->top + off; |
510 | args.node = NULL; |
511 | AddJumpTarget(&args, &cg->jumpTargets); |
512 | if (!args.node) |
513 | return JS_FALSE; |
514 | |
515 | #ifdef DEBUG_brendan |
516 | AVLCheck(cg->jumpTargets); |
517 | #endif |
518 | |
519 | SD_SET_TARGET(sd, args.node); |
520 | return JS_TRUE; |
521 | } |
522 | |
523 | #define SPANDEPS_MIN 256 |
524 | #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep)) |
525 | #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN) |
526 | |
527 | static JSBool |
528 | AddSpanDep(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, jsbytecode *pc2, |
529 | ptrdiff_t off) |
530 | { |
531 | uintN index; |
532 | JSSpanDep *sdbase, *sd; |
533 | size_t size; |
534 | |
535 | index = cg->numSpanDeps; |
536 | if (index + 1 == 0) { |
537 | ReportStatementTooLarge(cx, cg); |
538 | return JS_FALSE; |
539 | } |
540 | |
541 | if ((index & (index - 1)) == 0 && |
542 | (!(sdbase = cg->spanDeps) || index >= SPANDEPS_MIN)) { |
543 | size = sdbase ? SPANDEPS_SIZE(index) : SPANDEPS_SIZE_MIN / 2; |
544 | sdbase = (JSSpanDep *) JS_realloc(cx, sdbase, size + size); |
545 | if (!sdbase) |
546 | return JS_FALSE; |
547 | cg->spanDeps = sdbase; |
548 | } |
549 | |
550 | cg->numSpanDeps = index + 1; |
551 | sd = cg->spanDeps + index; |
552 | sd->top = PTRDIFF(pc, CG_BASE(cg), jsbytecode); |
553 | sd->offset = sd->before = PTRDIFF(pc2, CG_BASE(cg), jsbytecode); |
554 | |
555 | if (js_CodeSpec[*pc].format & JOF_BACKPATCH) { |
556 | /* Jump offset will be backpatched if off is a non-zero "bpdelta". */ |
557 | if (off != 0) { |
558 | JS_ASSERT(off >= 1 + JUMP_OFFSET_LEN); |
559 | if (off > BPDELTA_MAX) { |
560 | ReportStatementTooLarge(cx, cg); |
561 | return JS_FALSE; |
562 | } |
563 | } |
564 | SD_SET_BPDELTA(sd, off); |
565 | } else if (off == 0) { |
566 | /* Jump offset will be patched directly, without backpatch chaining. */ |
567 | SD_SET_TARGET(sd, 0); |
568 | } else { |
569 | /* The jump offset in off is non-zero, therefore it's already known. */ |
570 | if (!SetSpanDepTarget(cx, cg, sd, off)) |
571 | return JS_FALSE; |
572 | } |
573 | |
574 | if (index > SPANDEP_INDEX_MAX) |
575 | index = SPANDEP_INDEX_HUGE; |
576 | SET_SPANDEP_INDEX(pc2, index); |
577 | return JS_TRUE; |
578 | } |
579 | |
580 | static jsbytecode * |
581 | AddSwitchSpanDeps(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc) |
582 | { |
583 | JSOp op; |
584 | jsbytecode *pc2; |
585 | ptrdiff_t off; |
586 | jsint low, high; |
587 | uintN njumps, indexlen; |
588 | |
589 | op = (JSOp) *pc; |
590 | JS_ASSERT(op == JSOP_TABLESWITCH || op == JSOP_LOOKUPSWITCH); |
591 | pc2 = pc; |
592 | off = GET_JUMP_OFFSET(pc2); |
593 | if (!AddSpanDep(cx, cg, pc, pc2, off)) |
594 | return NULL; |
595 | pc2 += JUMP_OFFSET_LEN; |
596 | if (op == JSOP_TABLESWITCH) { |
597 | low = GET_JUMP_OFFSET(pc2); |
598 | pc2 += JUMP_OFFSET_LEN; |
599 | high = GET_JUMP_OFFSET(pc2); |
600 | pc2 += JUMP_OFFSET_LEN; |
601 | njumps = (uintN) (high - low + 1); |
602 | indexlen = 0; |
603 | } else { |
604 | njumps = GET_UINT16(pc2); |
605 | pc2 += UINT16_LEN; |
606 | indexlen = INDEX_LEN; |
607 | } |
608 | while (njumps) { |
609 | --njumps; |
610 | pc2 += indexlen; |
611 | off = GET_JUMP_OFFSET(pc2); |
612 | if (!AddSpanDep(cx, cg, pc, pc2, off)) |
613 | return NULL; |
614 | pc2 += JUMP_OFFSET_LEN; |
615 | } |
616 | return 1 + pc2; |
617 | } |
618 | |
619 | static JSBool |
620 | BuildSpanDepTable(JSContext *cx, JSCodeGenerator *cg) |
621 | { |
622 | jsbytecode *pc, *end; |
623 | JSOp op; |
624 | const JSCodeSpec *cs; |
625 | ptrdiff_t off; |
626 | |
627 | pc = CG_BASE(cg) + cg->spanDepTodo; |
628 | end = CG_NEXT(cg); |
629 | while (pc != end) { |
630 | JS_ASSERT(pc < end); |
631 | op = (JSOp)*pc; |
632 | cs = &js_CodeSpec[op]; |
633 | |
634 | switch (JOF_TYPE(cs->format)) { |
635 | case JOF_TABLESWITCH: |
636 | case JOF_LOOKUPSWITCH: |
637 | pc = AddSwitchSpanDeps(cx, cg, pc); |
638 | if (!pc) |
639 | return JS_FALSE; |
640 | break; |
641 | |
642 | case JOF_JUMP: |
643 | off = GET_JUMP_OFFSET(pc); |
644 | if (!AddSpanDep(cx, cg, pc, pc, off)) |
645 | return JS_FALSE; |
646 | /* FALL THROUGH */ |
647 | default: |
648 | pc += cs->length; |
649 | break; |
650 | } |
651 | } |
652 | |
653 | return JS_TRUE; |
654 | } |
655 | |
656 | static JSSpanDep * |
657 | GetSpanDep(JSCodeGenerator *cg, jsbytecode *pc) |
658 | { |
659 | uintN index; |
660 | ptrdiff_t offset; |
661 | int lo, hi, mid; |
662 | JSSpanDep *sd; |
663 | |
664 | index = GET_SPANDEP_INDEX(pc); |
665 | if (index != SPANDEP_INDEX_HUGE) |
666 | return cg->spanDeps + index; |
667 | |
668 | offset = PTRDIFF(pc, CG_BASE(cg), jsbytecode); |
669 | lo = 0; |
670 | hi = cg->numSpanDeps - 1; |
671 | while (lo <= hi) { |
672 | mid = (lo + hi) / 2; |
673 | sd = cg->spanDeps + mid; |
674 | if (sd->before == offset) |
675 | return sd; |
676 | if (sd->before < offset) |
677 | lo = mid + 1; |
678 | else |
679 | hi = mid - 1; |
680 | } |
681 | |
682 | JS_ASSERT(0); |
683 | return NULL; |
684 | } |
685 | |
686 | static JSBool |
687 | SetBackPatchDelta(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, |
688 | ptrdiff_t delta) |
689 | { |
690 | JSSpanDep *sd; |
691 | |
692 | JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN); |
693 | if (!cg->spanDeps && delta < JUMP_OFFSET_MAX) { |
694 | SET_JUMP_OFFSET(pc, delta); |
695 | return JS_TRUE; |
696 | } |
697 | |
698 | if (delta > BPDELTA_MAX) { |
699 | ReportStatementTooLarge(cx, cg); |
700 | return JS_FALSE; |
701 | } |
702 | |
703 | if (!cg->spanDeps && !BuildSpanDepTable(cx, cg)) |
704 | return JS_FALSE; |
705 | |
706 | sd = GetSpanDep(cg, pc); |
707 | JS_ASSERT(SD_GET_BPDELTA(sd) == 0); |
708 | SD_SET_BPDELTA(sd, delta); |
709 | return JS_TRUE; |
710 | } |
711 | |
712 | static void |
713 | UpdateJumpTargets(JSJumpTarget *jt, ptrdiff_t pivot, ptrdiff_t delta) |
714 | { |
715 | if (jt->offset > pivot) { |
716 | jt->offset += delta; |
717 | if (jt->kids[JT_LEFT]) |
718 | UpdateJumpTargets(jt->kids[JT_LEFT], pivot, delta); |
719 | } |
720 | if (jt->kids[JT_RIGHT]) |
721 | UpdateJumpTargets(jt->kids[JT_RIGHT], pivot, delta); |
722 | } |
723 | |
724 | static JSSpanDep * |
725 | FindNearestSpanDep(JSCodeGenerator *cg, ptrdiff_t offset, int lo, |
726 | JSSpanDep *guard) |
727 | { |
728 | int num, hi, mid; |
729 | JSSpanDep *sdbase, *sd; |
730 | |
731 | num = cg->numSpanDeps; |
732 | JS_ASSERT(num > 0); |
733 | hi = num - 1; |
734 | sdbase = cg->spanDeps; |
735 | while (lo <= hi) { |
736 | mid = (lo + hi) / 2; |
737 | sd = sdbase + mid; |
738 | if (sd->before == offset) |
739 | return sd; |
740 | if (sd->before < offset) |
741 | lo = mid + 1; |
742 | else |
743 | hi = mid - 1; |
744 | } |
745 | if (lo == num) |
746 | return guard; |
747 | sd = sdbase + lo; |
748 | JS_ASSERT(sd->before >= offset && (lo == 0 || sd[-1].before < offset)); |
749 | return sd; |
750 | } |
751 | |
752 | static void |
753 | FreeJumpTargets(JSCodeGenerator *cg, JSJumpTarget *jt) |
754 | { |
755 | if (jt->kids[JT_LEFT]) |
756 | FreeJumpTargets(cg, jt->kids[JT_LEFT]); |
757 | if (jt->kids[JT_RIGHT]) |
758 | FreeJumpTargets(cg, jt->kids[JT_RIGHT]); |
759 | jt->kids[JT_LEFT] = cg->jtFreeList; |
760 | cg->jtFreeList = jt; |
761 | } |
762 | |
763 | static JSBool |
764 | OptimizeSpanDeps(JSContext *cx, JSCodeGenerator *cg) |
765 | { |
766 | jsbytecode *pc, *oldpc, *base, *limit, *next; |
767 | JSSpanDep *sd, *sd2, *sdbase, *sdlimit, *sdtop, guard; |
768 | ptrdiff_t offset, growth, delta, top, pivot, span, length, target; |
769 | JSBool done; |
770 | JSOp op; |
771 | uint32 type; |
772 | size_t size, incr; |
773 | jssrcnote *sn, *snlimit; |
774 | JSSrcNoteSpec *spec; |
775 | uintN i, n, noteIndex; |
776 | JSTryNode *tryNode; |
777 | #ifdef DEBUG_brendan |
778 | int passes = 0; |
779 | #endif |
780 | |
781 | base = CG_BASE(cg); |
782 | sdbase = cg->spanDeps; |
783 | sdlimit = sdbase + cg->numSpanDeps; |
784 | offset = CG_OFFSET(cg); |
785 | growth = 0; |
786 | |
787 | do { |
788 | done = JS_TRUE; |
789 | delta = 0; |
790 | top = pivot = -1; |
791 | sdtop = NULL; |
792 | pc = NULL; |
793 | op = JSOP_NOP; |
794 | type = 0; |
795 | #ifdef DEBUG_brendan |
796 | passes++; |
797 | #endif |
798 | |
799 | for (sd = sdbase; sd < sdlimit; sd++) { |
800 | JS_ASSERT(JT_HAS_TAG(sd->target)); |
801 | sd->offset += delta; |
802 | |
803 | if (sd->top != top) { |
804 | sdtop = sd; |
805 | top = sd->top; |
806 | JS_ASSERT(top == sd->before); |
807 | pivot = sd->offset; |
808 | pc = base + top; |
809 | op = (JSOp) *pc; |
810 | type = JOF_OPTYPE(op); |
811 | if (JOF_TYPE_IS_EXTENDED_JUMP(type)) { |
812 | /* |
813 | * We already extended all the jump offset operands for |
814 | * the opcode at sd->top. Jumps and branches have only |
815 | * one jump offset operand, but switches have many, all |
816 | * of which are adjacent in cg->spanDeps. |
817 | */ |
818 | continue; |
819 | } |
820 | |
821 | JS_ASSERT(type == JOF_JUMP || |
822 | type == JOF_TABLESWITCH || |
823 | type == JOF_LOOKUPSWITCH); |
824 | } |
825 | |
826 | if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) { |
827 | span = SD_SPAN(sd, pivot); |
828 | if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) { |
829 | ptrdiff_t deltaFromTop = 0; |
830 | |
831 | done = JS_FALSE; |
832 | |
833 | switch (op) { |
834 | case JSOP_GOTO: op = JSOP_GOTOX; break; |
835 | case JSOP_IFEQ: op = JSOP_IFEQX; break; |
836 | case JSOP_IFNE: op = JSOP_IFNEX; break; |
837 | case JSOP_OR: op = JSOP_ORX; break; |
838 | case JSOP_AND: op = JSOP_ANDX; break; |
839 | case JSOP_GOSUB: op = JSOP_GOSUBX; break; |
840 | case JSOP_CASE: op = JSOP_CASEX; break; |
841 | case JSOP_DEFAULT: op = JSOP_DEFAULTX; break; |
842 | case JSOP_TABLESWITCH: op = JSOP_TABLESWITCHX; break; |
843 | case JSOP_LOOKUPSWITCH: op = JSOP_LOOKUPSWITCHX; break; |
844 | default: |
845 | ReportStatementTooLarge(cx, cg); |
846 | return JS_FALSE; |
847 | } |
848 | *pc = (jsbytecode) op; |
849 | |
850 | for (sd2 = sdtop; sd2 < sdlimit && sd2->top == top; sd2++) { |
851 | if (sd2 <= sd) { |
852 | /* |
853 | * sd2->offset already includes delta as it stood |
854 | * before we entered this loop, but it must also |
855 | * include the delta relative to top due to all the |
856 | * extended jump offset immediates for the opcode |
857 | * starting at top, which we extend in this loop. |
858 | * |
859 | * If there is only one extended jump offset, then |
860 | * sd2->offset won't change and this for loop will |
861 | * iterate once only. |
862 | */ |
863 | sd2->offset += deltaFromTop; |
864 | deltaFromTop += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN; |
865 | } else { |
866 | /* |
867 | * sd2 comes after sd, and won't be revisited by |
868 | * the outer for loop, so we have to increase its |
869 | * offset by delta, not merely by deltaFromTop. |
870 | */ |
871 | sd2->offset += delta; |
872 | } |
873 | |
874 | delta += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN; |
875 | UpdateJumpTargets(cg->jumpTargets, sd2->offset, |
876 | JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN); |
877 | } |
878 | sd = sd2 - 1; |
879 | } |
880 | } |
881 | } |
882 | |
883 | growth += delta; |
884 | } while (!done); |
885 | |
886 | if (growth) { |
887 | #ifdef DEBUG_brendan |
888 | JSTokenStream *ts = &cg->treeContext.parseContext->tokenStream; |
889 | |
890 | printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n", |
891 | ts->filename ? ts->filename : "stdin", cg->firstLine, |
892 | growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), cg->numSpanDeps, |
893 | passes, offset + growth, offset, growth); |
894 | #endif |
895 | |
896 | /* |
897 | * Ensure that we have room for the extended jumps, but don't round up |
898 | * to a power of two -- we're done generating code, so we cut to fit. |
899 | */ |
900 | limit = CG_LIMIT(cg); |
901 | length = offset + growth; |
902 | next = base + length; |
903 | if (next > limit) { |
904 | JS_ASSERT(length > BYTECODE_CHUNK); |
905 | size = BYTECODE_SIZE(PTRDIFF(limit, base, jsbytecode)); |
906 | incr = BYTECODE_SIZE(length) - size; |
907 | JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr); |
908 | if (!base) { |
909 | js_ReportOutOfScriptQuota(cx); |
910 | return JS_FALSE; |
911 | } |
912 | CG_BASE(cg) = base; |
913 | CG_LIMIT(cg) = next = base + length; |
914 | } |
915 | CG_NEXT(cg) = next; |
916 | |
917 | /* |
918 | * Set up a fake span dependency record to guard the end of the code |
919 | * being generated. This guard record is returned as a fencepost by |
920 | * FindNearestSpanDep if there is no real spandep at or above a given |
921 | * unextended code offset. |
922 | */ |
923 | guard.top = -1; |
924 | guard.offset = offset + growth; |
925 | guard.before = offset; |
926 | guard.target = NULL; |
927 | } |
928 | |
929 | /* |
930 | * Now work backwards through the span dependencies, copying chunks of |
931 | * bytecode between each extended jump toward the end of the grown code |
932 | * space, and restoring immediate offset operands for all jump bytecodes. |
933 | * The first chunk of bytecodes, starting at base and ending at the first |
934 | * extended jump offset (NB: this chunk includes the operation bytecode |
935 | * just before that immediate jump offset), doesn't need to be copied. |
936 | */ |
937 | JS_ASSERT(sd == sdlimit); |
938 | top = -1; |
939 | while (--sd >= sdbase) { |
940 | if (sd->top != top) { |
941 | top = sd->top; |
942 | op = (JSOp) base[top]; |
943 | type = JOF_OPTYPE(op); |
944 | |
945 | for (sd2 = sd - 1; sd2 >= sdbase && sd2->top == top; sd2--) |
946 | continue; |
947 | sd2++; |
948 | pivot = sd2->offset; |
949 | JS_ASSERT(top == sd2->before); |
950 | } |
951 | |
952 | oldpc = base + sd->before; |
953 | span = SD_SPAN(sd, pivot); |
954 | |
955 | /* |
956 | * If this jump didn't need to be extended, restore its span immediate |
957 | * offset operand now, overwriting the index of sd within cg->spanDeps |
958 | * that was stored temporarily after *pc when BuildSpanDepTable ran. |
959 | * |
960 | * Note that span might fit in 16 bits even for an extended jump op, |
961 | * if the op has multiple span operands, not all of which overflowed |
962 | * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in |
963 | * range for a short jump, but others are not). |
964 | */ |
965 | if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) { |
966 | JS_ASSERT(JUMP_OFFSET_MIN <= span && span <= JUMP_OFFSET_MAX); |
967 | SET_JUMP_OFFSET(oldpc, span); |
968 | continue; |
969 | } |
970 | |
971 | /* |
972 | * Set up parameters needed to copy the next run of bytecode starting |
973 | * at offset (which is a cursor into the unextended, original bytecode |
974 | * vector), down to sd->before (a cursor of the same scale as offset, |
975 | * it's the index of the original jump pc). Reuse delta to count the |
976 | * nominal number of bytes to copy. |
977 | */ |
978 | pc = base + sd->offset; |
979 | delta = offset - sd->before; |
980 | JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN); |
981 | |
982 | /* |
983 | * Don't bother copying the jump offset we're about to reset, but do |
984 | * copy the bytecode at oldpc (which comes just before its immediate |
985 | * jump offset operand), on the next iteration through the loop, by |
986 | * including it in offset's new value. |
987 | */ |
988 | offset = sd->before + 1; |
989 | size = BYTECODE_SIZE(delta - (1 + JUMP_OFFSET_LEN)); |
990 | if (size) { |
991 | memmove(pc + 1 + JUMPX_OFFSET_LEN, |
992 | oldpc + 1 + JUMP_OFFSET_LEN, |
993 | size); |
994 | } |
995 | |
996 | SET_JUMPX_OFFSET(pc, span); |
997 | } |
998 | |
999 | if (growth) { |
1000 | /* |
1001 | * Fix source note deltas. Don't hardwire the delta fixup adjustment, |
1002 | * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN |
1003 | * at each sd that moved. The future may bring different offset sizes |
1004 | * for span-dependent instruction operands. However, we fix only main |
1005 | * notes here, not prolog notes -- we know that prolog opcodes are not |
1006 | * span-dependent, and aren't likely ever to be. |
1007 | */ |
1008 | offset = growth = 0; |
1009 | sd = sdbase; |
1010 | for (sn = cg->main.notes, snlimit = sn + cg->main.noteCount; |
1011 | sn < snlimit; |
1012 | sn = SN_NEXT(sn)) { |
1013 | /* |
1014 | * Recall that the offset of a given note includes its delta, and |
1015 | * tells the offset of the annotated bytecode from the main entry |
1016 | * point of the script. |
1017 | */ |
1018 | offset += SN_DELTA(sn); |
1019 | while (sd < sdlimit && sd->before < offset) { |
1020 | /* |
1021 | * To compute the delta to add to sn, we need to look at the |
1022 | * spandep after sd, whose offset - (before + growth) tells by |
1023 | * how many bytes sd's instruction grew. |
1024 | */ |
1025 | sd2 = sd + 1; |
1026 | if (sd2 == sdlimit) |
1027 | sd2 = &guard; |
1028 | delta = sd2->offset - (sd2->before + growth); |
1029 | if (delta > 0) { |
1030 | JS_ASSERT(delta == JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN); |
1031 | sn = js_AddToSrcNoteDelta(cx, cg, sn, delta); |
1032 | if (!sn) |
1033 | return JS_FALSE; |
1034 | snlimit = cg->main.notes + cg->main.noteCount; |
1035 | growth += delta; |
1036 | } |
1037 | sd++; |
1038 | } |
1039 | |
1040 | /* |
1041 | * If sn has span-dependent offset operands, check whether each |
1042 | * covers further span-dependencies, and increase those operands |
1043 | * accordingly. Some source notes measure offset not from the |
1044 | * annotated pc, but from that pc plus some small bias. NB: we |
1045 | * assume that spec->offsetBias can't itself span span-dependent |
1046 | * instructions! |
1047 | */ |
1048 | spec = &js_SrcNoteSpec[SN_TYPE(sn)]; |
1049 | if (spec->isSpanDep) { |
1050 | pivot = offset + spec->offsetBias; |
1051 | n = spec->arity; |
1052 | for (i = 0; i < n; i++) { |
1053 | span = js_GetSrcNoteOffset(sn, i); |
1054 | if (span == 0) |
1055 | continue; |
1056 | target = pivot + span * spec->isSpanDep; |
1057 | sd2 = FindNearestSpanDep(cg, target, |
1058 | (target >= pivot) |
1059 | ? sd - sdbase |
1060 | : 0, |
1061 | &guard); |
1062 | |
1063 | /* |
1064 | * Increase target by sd2's before-vs-after offset delta, |
1065 | * which is absolute (i.e., relative to start of script, |
1066 | * as is target). Recompute the span by subtracting its |
1067 | * adjusted pivot from target. |
1068 | */ |
1069 | target += sd2->offset - sd2->before; |
1070 | span = target - (pivot + growth); |
1071 | span *= spec->isSpanDep; |
1072 | noteIndex = sn - cg->main.notes; |
1073 | if (!js_SetSrcNoteOffset(cx, cg, noteIndex, i, span)) |
1074 | return JS_FALSE; |
1075 | sn = cg->main.notes + noteIndex; |
1076 | snlimit = cg->main.notes + cg->main.noteCount; |
1077 | } |
1078 | } |
1079 | } |
1080 | cg->main.lastNoteOffset += growth; |
1081 | |
1082 | /* |
1083 | * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's |
1084 | * not clear how we can beat that). |
1085 | */ |
1086 | for (tryNode = cg->lastTryNode; tryNode; tryNode = tryNode->prev) { |
1087 | /* |
1088 | * First, look for the nearest span dependency at/above tn->start. |
1089 | * There may not be any such spandep, in which case the guard will |
1090 | * be returned. |
1091 | */ |
1092 | offset = tryNode->note.start; |
1093 | sd = FindNearestSpanDep(cg, offset, 0, &guard); |
1094 | delta = sd->offset - sd->before; |
1095 | tryNode->note.start = offset + delta; |
1096 | |
1097 | /* |
1098 | * Next, find the nearest spandep at/above tn->start + tn->length. |
1099 | * Use its delta minus tn->start's delta to increase tn->length. |
1100 | */ |
1101 | length = tryNode->note.length; |
1102 | sd2 = FindNearestSpanDep(cg, offset + length, sd - sdbase, &guard); |
1103 | if (sd2 != sd) { |
1104 | tryNode->note.length = |
1105 | length + sd2->offset - sd2->before - delta; |
1106 | } |
1107 | } |
1108 | } |
1109 | |
1110 | #ifdef DEBUG_brendan |
1111 | { |
1112 | uintN bigspans = 0; |
1113 | top = -1; |
1114 | for (sd = sdbase; sd < sdlimit; sd++) { |
1115 | offset = sd->offset; |
1116 | |
1117 | /* NB: sd->top cursors into the original, unextended bytecode vector. */ |
1118 | if (sd->top != top) { |
1119 | JS_ASSERT(top == -1 || |
1120 | !JOF_TYPE_IS_EXTENDED_JUMP(type) || |
1121 | bigspans != 0); |
1122 | bigspans = 0; |
1123 | top = sd->top; |
1124 | JS_ASSERT(top == sd->before); |
1125 | op = (JSOp) base[offset]; |
1126 | type = JOF_OPTYPE(op); |
1127 | JS_ASSERT(type == JOF_JUMP || |
1128 | type == JOF_JUMPX || |
1129 | type == JOF_TABLESWITCH || |
1130 | type == JOF_TABLESWITCHX || |
1131 | type == JOF_LOOKUPSWITCH || |
1132 | type == JOF_LOOKUPSWITCHX); |
1133 | pivot = offset; |
1134 | } |
1135 | |
1136 | pc = base + offset; |
1137 | if (JOF_TYPE_IS_EXTENDED_JUMP(type)) { |
1138 | span = GET_JUMPX_OFFSET(pc); |
1139 | if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) { |
1140 | bigspans++; |
1141 | } else { |
1142 | JS_ASSERT(type == JOF_TABLESWITCHX || |
1143 | type == JOF_LOOKUPSWITCHX); |
1144 | } |
1145 | } else { |
1146 | span = GET_JUMP_OFFSET(pc); |
1147 | } |
1148 | JS_ASSERT(SD_SPAN(sd, pivot) == span); |
1149 | } |
1150 | JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type) || bigspans != 0); |
1151 | } |
1152 | #endif |
1153 | |
1154 | /* |
1155 | * Reset so we optimize at most once -- cg may be used for further code |
1156 | * generation of successive, independent, top-level statements. No jump |
1157 | * can span top-level statements, because JS lacks goto. |
1158 | */ |
1159 | size = SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg->numSpanDeps))); |
1160 | JS_free(cx, cg->spanDeps); |
1161 | cg->spanDeps = NULL; |
1162 | FreeJumpTargets(cg, cg->jumpTargets); |
1163 | cg->jumpTargets = NULL; |
1164 | cg->numSpanDeps = cg->numJumpTargets = 0; |
1165 | cg->spanDepTodo = CG_OFFSET(cg); |
1166 | return JS_TRUE; |
1167 | } |
1168 | |
1169 | static ptrdiff_t |
1170 | EmitJump(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t off) |
1171 | { |
1172 | JSBool extend; |
1173 | ptrdiff_t jmp; |
1174 | jsbytecode *pc; |
1175 | |
1176 | extend = off < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < off; |
1177 | if (extend && !cg->spanDeps && !BuildSpanDepTable(cx, cg)) |
1178 | return -1; |
1179 | |
1180 | jmp = js_Emit3(cx, cg, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off)); |
1181 | if (jmp >= 0 && (extend || cg->spanDeps)) { |
1182 | pc = CG_CODE(cg, jmp); |
1183 | if (!AddSpanDep(cx, cg, pc, pc, off)) |
1184 | return -1; |
1185 | } |
1186 | return jmp; |
1187 | } |
1188 | |
1189 | static ptrdiff_t |
1190 | GetJumpOffset(JSCodeGenerator *cg, jsbytecode *pc) |
1191 | { |
1192 | JSSpanDep *sd; |
1193 | JSJumpTarget *jt; |
1194 | ptrdiff_t top; |
1195 | |
1196 | if (!cg->spanDeps) |
1197 | return GET_JUMP_OFFSET(pc); |
1198 | |
1199 | sd = GetSpanDep(cg, pc); |
1200 | jt = sd->target; |
1201 | if (!JT_HAS_TAG(jt)) |
1202 | return JT_TO_BPDELTA(jt); |
1203 | |
1204 | top = sd->top; |
1205 | while (--sd >= cg->spanDeps && sd->top == top) |
1206 | continue; |
1207 | sd++; |
1208 | return JT_CLR_TAG(jt)->offset - sd->offset; |
1209 | } |
1210 | |
1211 | JSBool |
1212 | js_SetJumpOffset(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, |
1213 | ptrdiff_t off) |
1214 | { |
1215 | if (!cg->spanDeps) { |
1216 | if (JUMP_OFFSET_MIN <= off && off <= JUMP_OFFSET_MAX) { |
1217 | SET_JUMP_OFFSET(pc, off); |
1218 | return JS_TRUE; |
1219 | } |
1220 | |
1221 | if (!BuildSpanDepTable(cx, cg)) |
1222 | return JS_FALSE; |
1223 | } |
1224 | |
1225 | return SetSpanDepTarget(cx, cg, GetSpanDep(cg, pc), off); |
1226 | } |
1227 | |
1228 | JSBool |
1229 | js_InStatement(JSTreeContext *tc, JSStmtType type) |
1230 | { |
1231 | JSStmtInfo *stmt; |
1232 | |
1233 | for (stmt = tc->topStmt; stmt; stmt = stmt->down) { |
1234 | if (stmt->type == type) |
1235 | return JS_TRUE; |
1236 | } |
1237 | return JS_FALSE; |
1238 | } |
1239 | |
1240 | void |
1241 | js_PushStatement(JSTreeContext *tc, JSStmtInfo *stmt, JSStmtType type, |
1242 | ptrdiff_t top) |
1243 | { |
1244 | stmt->type = type; |
1245 | stmt->flags = 0; |
1246 | SET_STATEMENT_TOP(stmt, top); |
1247 | stmt->u.label = NULL; |
1248 | JS_ASSERT(!stmt->u.blockObj); |
1249 | stmt->down = tc->topStmt; |
1250 | tc->topStmt = stmt; |
1251 | if (STMT_LINKS_SCOPE(stmt)) { |
1252 | stmt->downScope = tc->topScopeStmt; |
1253 | tc->topScopeStmt = stmt; |
1254 | } else { |
1255 | stmt->downScope = NULL; |
1256 | } |
1257 | } |
1258 | |
1259 | void |
1260 | js_PushBlockScope(JSTreeContext *tc, JSStmtInfo *stmt, JSObject *blockObj, |
1261 | ptrdiff_t top) |
1262 | { |
1263 | |
1264 | js_PushStatement(tc, stmt, STMT_BLOCK, top); |
1265 | stmt->flags |= SIF_SCOPE; |
1266 | STOBJ_SET_PARENT(blockObj, tc->blockChain); |
1267 | stmt->downScope = tc->topScopeStmt; |
1268 | tc->topScopeStmt = stmt; |
1269 | tc->blockChain = blockObj; |
1270 | stmt->u.blockObj = blockObj; |
1271 | } |
1272 | |
1273 | /* |
1274 | * Emit a backpatch op with offset pointing to the previous jump of this type, |
1275 | * so that we can walk back up the chain fixing up the op and jump offset. |
1276 | */ |
1277 | static ptrdiff_t |
1278 | EmitBackPatchOp(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t *lastp) |
1279 | { |
1280 | ptrdiff_t offset, delta; |
1281 | |
1282 | offset = CG_OFFSET(cg); |
1283 | delta = offset - *lastp; |
1284 | *lastp = offset; |
1285 | JS_ASSERT(delta > 0); |
1286 | return EmitJump(cx, cg, op, delta); |
1287 | } |
1288 | |
1289 | /* |
1290 | * Macro to emit a bytecode followed by a uint16 immediate operand stored in |
1291 | * big-endian order, used for arg and var numbers as well as for atomIndexes. |
1292 | * NB: We use cx and cg from our caller's lexical environment, and return |
1293 | * false on error. |
1294 | */ |
1295 | #define EMIT_UINT16_IMM_OP(op, i) \ |
1296 | JS_BEGIN_MACRO \ |
1297 | if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \ |
1298 | return JS_FALSE; \ |
1299 | JS_END_MACRO |
1300 | |
1301 | static JSBool |
1302 | FlushPops(JSContext *cx, JSCodeGenerator *cg, intN *npops) |
1303 | { |
1304 | JS_ASSERT(*npops != 0); |
1305 | if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0) |
1306 | return JS_FALSE; |
1307 | EMIT_UINT16_IMM_OP(JSOP_POPN, *npops); |
1308 | *npops = 0; |
1309 | return JS_TRUE; |
1310 | } |
1311 | |
1312 | /* |
1313 | * Emit additional bytecode(s) for non-local jumps. |
1314 | */ |
1315 | static JSBool |
1316 | EmitNonLocalJumpFixup(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt) |
1317 | { |
1318 | intN depth, npops; |
1319 | JSStmtInfo *stmt; |
1320 | |
1321 | /* |
1322 | * The non-local jump fixup we emit will unbalance cg->stackDepth, because |
1323 | * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the |
1324 | * end of a with statement, so we save cg->stackDepth here and restore it |
1325 | * just before a successful return. |
1326 | */ |
1327 | depth = cg->stackDepth; |
1328 | npops = 0; |
1329 | |
1330 | #define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE |
1331 | |
1332 | for (stmt = cg->treeContext.topStmt; stmt != toStmt; stmt = stmt->down) { |
1333 | switch (stmt->type) { |
1334 | case STMT_FINALLY: |
1335 | FLUSH_POPS(); |
1336 | if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0) |
1337 | return JS_FALSE; |
1338 | if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(*stmt)) < 0) |
1339 | return JS_FALSE; |
1340 | break; |
1341 | |
1342 | case STMT_WITH: |
1343 | /* There's a With object on the stack that we need to pop. */ |
1344 | FLUSH_POPS(); |
1345 | if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0) |
1346 | return JS_FALSE; |
1347 | if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0) |
1348 | return JS_FALSE; |
1349 | break; |
1350 | |
1351 | case STMT_FOR_IN_LOOP: |
1352 | /* |
1353 | * The iterator and the object being iterated need to be popped. |
1354 | */ |
1355 | FLUSH_POPS(); |
1356 | if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0) |
1357 | return JS_FALSE; |
1358 | if (js_Emit1(cx, cg, JSOP_ENDITER) < 0) |
1359 | return JS_FALSE; |
1360 | break; |
1361 | |
1362 | case STMT_SUBROUTINE: |
1363 | /* |
1364 | * There's a [exception or hole, retsub pc-index] pair on the |
1365 | * stack that we need to pop. |
1366 | */ |
1367 | npops += 2; |
1368 | break; |
1369 | |
1370 | default:; |
1371 | } |
1372 | |
1373 | if (stmt->flags & SIF_SCOPE) { |
1374 | uintN i; |
1375 | |
1376 | /* There is a Block object with locals on the stack to pop. */ |
1377 | FLUSH_POPS(); |
1378 | if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0) |
1379 | return JS_FALSE; |
1380 | i = OBJ_BLOCK_COUNT(cx, stmt->u.blockObj); |
1381 | EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, i); |
1382 | } |
1383 | } |
1384 | |
1385 | FLUSH_POPS(); |
1386 | cg->stackDepth = depth; |
1387 | return JS_TRUE; |
1388 | |
1389 | #undef FLUSH_POPS |
1390 | } |
1391 | |
1392 | static ptrdiff_t |
1393 | EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt, |
1394 | ptrdiff_t *lastp, JSAtomListElement *label, JSSrcNoteType noteType) |
1395 | { |
1396 | intN index; |
1397 | |
1398 | if (!EmitNonLocalJumpFixup(cx, cg, toStmt)) |
1399 | return -1; |
1400 | |
1401 | if (label) |
1402 | index = js_NewSrcNote2(cx, cg, noteType, (ptrdiff_t) ALE_INDEX(label)); |
1403 | else if (noteType != SRC_NULL) |
1404 | index = js_NewSrcNote(cx, cg, noteType); |
1405 | else |
1406 | index = 0; |
1407 | if (index < 0) |
1408 | return -1; |
1409 | |
1410 | return EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, lastp); |
1411 | } |
1412 | |
1413 | static JSBool |
1414 | BackPatch(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t last, |
1415 | jsbytecode *target, jsbytecode op) |
1416 | { |
1417 | jsbytecode *pc, *stop; |
1418 | ptrdiff_t delta, span; |
1419 | |
1420 | pc = CG_CODE(cg, last); |
1421 | stop = CG_CODE(cg, -1); |
1422 | while (pc != stop) { |
1423 | delta = GetJumpOffset(cg, pc); |
1424 | span = PTRDIFF(target, pc, jsbytecode); |
1425 | CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, span); |
1426 | |
1427 | /* |
1428 | * Set *pc after jump offset in case bpdelta didn't overflow, but span |
1429 | * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable |
1430 | * and need to see the JSOP_BACKPATCH* op at *pc). |
1431 | */ |
1432 | *pc = op; |
1433 | pc -= delta; |
1434 | } |
1435 | return JS_TRUE; |
1436 | } |
1437 | |
1438 | void |
1439 | js_PopStatement(JSTreeContext *tc) |
1440 | { |
1441 | JSStmtInfo *stmt; |
1442 | |
1443 | stmt = tc->topStmt; |
1444 | tc->topStmt = stmt->down; |
1445 | if (STMT_LINKS_SCOPE(stmt)) { |
1446 | tc->topScopeStmt = stmt->downScope; |
1447 | if (stmt->flags & SIF_SCOPE) { |
1448 | tc->blockChain = STOBJ_GET_PARENT(stmt->u.blockObj); |
1449 | JS_SCOPE_DEPTH_METERING(--tc->scopeDepth); |
1450 | } |
1451 | } |
1452 | } |
1453 | |
1454 | JSBool |
1455 | js_PopStatementCG(JSContext *cx, JSCodeGenerator *cg) |
1456 | { |
1457 | JSStmtInfo *stmt; |
1458 | |
1459 | stmt = cg->treeContext.topStmt; |
1460 | if (!STMT_IS_TRYING(stmt) && |
1461 | (!BackPatch(cx, cg, stmt->breaks, CG_NEXT(cg), JSOP_GOTO) || |
1462 | !BackPatch(cx, cg, stmt->continues, CG_CODE(cg, stmt->update), |
1463 | JSOP_GOTO))) { |
1464 | return JS_FALSE; |
1465 | } |
1466 | js_PopStatement(&cg->treeContext); |
1467 | return JS_TRUE; |
1468 | } |
1469 | |
1470 | JSBool |
1471 | js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom, |
1472 | JSParseNode *pn) |
1473 | { |
1474 | jsdouble dval; |
1475 | jsint ival; |
1476 | JSAtom *valueAtom; |
1477 | jsval v; |
1478 | JSAtomListElement *ale; |
1479 | |
1480 | /* XXX just do numbers for now */ |
1481 | if (pn->pn_type == TOK_NUMBER) { |
1482 | dval = pn->pn_dval; |
1483 | if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) { |
1484 | v = INT_TO_JSVAL(ival); |
1485 | } else { |
1486 | /* |
1487 | * We atomize double to root a jsdouble instance that we wrap as |
1488 | * jsval and store in cg->constList. This works because atoms are |
1489 | * protected from GC during compilation. |
1490 | */ |
1491 | valueAtom = js_AtomizeDouble(cx, dval); |
1492 | if (!valueAtom) |
1493 | return JS_FALSE; |
1494 | v = ATOM_KEY(valueAtom); |
1495 | } |
1496 | ale = js_IndexAtom(cx, atom, &cg->constList); |
1497 | if (!ale) |
1498 | return JS_FALSE; |
1499 | ALE_SET_VALUE(ale, v); |
1500 | } |
1501 | return JS_TRUE; |
1502 | } |
1503 | |
1504 | JSStmtInfo * |
1505 | js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp) |
1506 | { |
1507 | JSStmtInfo *stmt; |
1508 | JSObject *obj; |
1509 | JSScope *scope; |
1510 | JSScopeProperty *sprop; |
1511 | |
1512 | for (stmt = tc->topScopeStmt; stmt; stmt = stmt->downScope) { |
1513 | if (stmt->type == STMT_WITH) |
1514 | break; |
1515 | |
1516 | /* Skip "maybe scope" statements that don't contain let bindings. */ |
1517 | if (!(stmt->flags & SIF_SCOPE)) |
1518 | continue; |
1519 | |
1520 | obj = stmt->u.blockObj; |
1521 | JS_ASSERT(LOCKED_OBJ_GET_CLASS(obj) == &js_BlockClass); |
1522 | scope = OBJ_SCOPE(obj); |
1523 | sprop = SCOPE_GET_PROPERTY(scope, ATOM_TO_JSID(atom)); |
1524 | if (sprop) { |
1525 | JS_ASSERT(sprop->flags & SPROP_HAS_SHORTID); |
1526 | |
1527 | if (slotp) { |
1528 | JS_ASSERT(JSVAL_IS_INT(obj->fslots[JSSLOT_BLOCK_DEPTH])); |
1529 | *slotp = JSVAL_TO_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]) + |
1530 | sprop->shortid; |
1531 | } |
1532 | return stmt; |
1533 | } |
1534 | } |
1535 | |
1536 | if (slotp) |
1537 | *slotp = -1; |
1538 | return stmt; |
1539 | } |
1540 | |
1541 | /* |
1542 | * Check if the attributes describe a property holding a compile-time constant |
1543 | * or a permanent, read-only property without a getter. |
1544 | */ |
1545 | #define IS_CONSTANT_PROPERTY(attrs) \ |
1546 | (((attrs) & (JSPROP_READONLY | JSPROP_PERMANENT | JSPROP_GETTER)) == \ |
1547 | (JSPROP_READONLY | JSPROP_PERMANENT)) |
1548 | |
1549 | /* |
1550 | * The function sets vp to JSVAL_HOLE when the atom does not corresponds to a |
1551 | * name defining a constant. |
1552 | */ |
1553 | static JSBool |
1554 | LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom, |
1555 | jsval *vp) |
1556 | { |
1557 | JSBool ok; |
1558 | JSStmtInfo *stmt; |
1559 | JSAtomListElement *ale; |
1560 | JSObject *obj, *pobj; |
1561 | JSProperty *prop; |
1562 | uintN attrs; |
1563 | |
1564 | /* |
1565 | * Chase down the cg stack, but only until we reach the outermost cg. |
1566 | * This enables propagating consts from top-level into switch cases in a |
1567 | * function compiled along with the top-level script. |
1568 | */ |
1569 | *vp = JSVAL_HOLE; |
1570 | do { |
1571 | if (cg->treeContext.flags & (TCF_IN_FUNCTION | TCF_COMPILE_N_GO)) { |
1572 | /* XXX this will need revising when 'let const' is added. */ |
1573 | stmt = js_LexicalLookup(&cg->treeContext, atom, NULL); |
1574 | if (stmt) |
1575 | return JS_TRUE; |
1576 | |
1577 | ATOM_LIST_SEARCH(ale, &cg->constList, atom); |
1578 | if (ale) { |
1579 | JS_ASSERT(ALE_VALUE(ale) != JSVAL_HOLE); |
1580 | *vp = ALE_VALUE(ale); |
1581 | return JS_TRUE; |
1582 | } |
1583 | |
1584 | /* |
1585 | * Try looking in the variable object for a direct property that |
1586 | * is readonly and permanent. We know such a property can't be |
1587 | * shadowed by another property on obj's prototype chain, or a |
1588 | * with object or catch variable; nor can prop's value be changed, |
1589 | * nor can prop be deleted. |
1590 | */ |
1591 | if (cg->treeContext.flags & TCF_IN_FUNCTION) { |
1592 | if (js_LookupLocal(cx, cg->treeContext.u.fun, atom, NULL) != |
1593 | JSLOCAL_NONE) { |
1594 | break; |
1595 | } |
1596 | } else { |
1597 | JS_ASSERT(cg->treeContext.flags & TCF_COMPILE_N_GO); |
1598 | obj = cg->treeContext.u.scopeChain; |
1599 | ok = OBJ_LOOKUP_PROPERTY(cx, obj, ATOM_TO_JSID(atom), &pobj, |
1600 | &prop); |
1601 | if (!ok) |
1602 | return JS_FALSE; |
1603 | if (pobj == obj) { |
1604 | /* |
1605 | * We're compiling code that will be executed immediately, |
1606 | * not re-executed against a different scope chain and/or |
1607 | * variable object. Therefore we can get constant values |
1608 | * from our variable object here. |
1609 | */ |
1610 | ok = OBJ_GET_ATTRIBUTES(cx, obj, ATOM_TO_JSID(atom), prop, |
1611 | &attrs); |
1612 | if (ok && IS_CONSTANT_PROPERTY(attrs)) { |
1613 | ok = OBJ_GET_PROPERTY(cx, obj, ATOM_TO_JSID(atom), vp); |
1614 | JS_ASSERT_IF(ok, *vp != JSVAL_HOLE); |
1615 | } |
1616 | } |
1617 | if (prop) |
1618 | OBJ_DROP_PROPERTY(cx, pobj, prop); |
1619 | if (!ok) |
1620 | return JS_FALSE; |
1621 | if (prop) |
1622 | break; |
1623 | } |
1624 | } |
1625 | } while ((cg = cg->parent) != NULL); |
1626 | return JS_TRUE; |
1627 | } |
1628 | |
1629 | /* |
1630 | * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment |
1631 | * reset instruction is necessary, JSOP_FALSE to indicate an error or either |
1632 | * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue |
1633 | * after the main bytecode sequence. |
1634 | */ |
1635 | static JSOp |
1636 | EmitBigIndexPrefix(JSContext *cx, JSCodeGenerator *cg, uintN index) |
1637 | { |
1638 | uintN indexBase; |
1639 | |
1640 | /* |
1641 | * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only |
1642 | * for big indexes. |
1643 | */ |
1644 | JS_STATIC_ASSERT(INDEX_LIMIT <= JS_BIT(24)); |
1645 | JS_STATIC_ASSERT(INDEX_LIMIT >= |
1646 | (JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 2) << 16); |
1647 | |
1648 | if (index < JS_BIT(16)) |
1649 | return JSOP_NOP; |
1650 | indexBase = index >> 16; |
1651 | if (indexBase <= JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 1) { |
1652 | if (js_Emit1(cx, cg, (JSOp)(JSOP_INDEXBASE1 + indexBase - 1)) < 0) |
1653 | return JSOP_FALSE; |
1654 | return JSOP_RESETBASE0; |
1655 | } |
1656 | |
1657 | if (index >= INDEX_LIMIT) { |
1658 | JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, |
1659 | JSMSG_TOO_MANY_LITERALS); |
1660 | return JSOP_FALSE; |
1661 | } |
1662 | |
1663 | if (js_Emit2(cx, cg, JSOP_INDEXBASE, (JSOp)indexBase) < 0) |
1664 | return JSOP_FALSE; |
1665 | return JSOP_RESETBASE; |
1666 | } |
1667 | |
1668 | /* |
1669 | * Emit a bytecode and its 2-byte constant index immediate operand. If the |
1670 | * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate |
1671 | * operand effectively extends the 16-bit immediate of the prefixed opcode, |
1672 | * by changing index "segment" (see jsinterp.c). We optimize segments 1-3 |
1673 | * with single-byte JSOP_INDEXBASE[123] codes. |
1674 | * |
1675 | * Such prefixing currently requires a suffix to restore the "zero segment" |
1676 | * register setting, but this could be optimized further. |
1677 | */ |
1678 | static JSBool |
1679 | EmitIndexOp(JSContext *cx, JSOp op, uintN index, JSCodeGenerator *cg) |
1680 | { |
1681 | JSOp bigSuffix; |
1682 | |
1683 | bigSuffix = EmitBigIndexPrefix(cx, cg, index); |
1684 | if (bigSuffix == JSOP_FALSE) |
1685 | return JS_FALSE; |
1686 | EMIT_UINT16_IMM_OP(op, index); |
1687 | return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0; |
1688 | } |
1689 | |
1690 | /* |
1691 | * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro |
1692 | * caller's lexical environment, and embedding a false return on error. |
1693 | */ |
1694 | #define EMIT_INDEX_OP(op, index) \ |
1695 | JS_BEGIN_MACRO \ |
1696 | if (!EmitIndexOp(cx, op, index, cg)) \ |
1697 | return JS_FALSE; \ |
1698 | JS_END_MACRO |
1699 | |
1700 | |
1701 | static JSBool |
1702 | EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg) |
1703 | { |
1704 | JSAtomListElement *ale; |
1705 | |
1706 | JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM); |
1707 | if (op == JSOP_GETPROP && |
1708 | pn->pn_atom == cx->runtime->atomState.lengthAtom) { |
1709 | return js_Emit1(cx, cg, JSOP_LENGTH) >= 0; |
1710 | } |
1711 | ale = js_IndexAtom(cx, pn->pn_atom, &cg->atomList); |
1712 | if (!ale) |
1713 | return JS_FALSE; |
1714 | return EmitIndexOp(cx, op, ALE_INDEX(ale), cg); |
1715 | } |
1716 | |
1717 | static uintN |
1718 | IndexParsedObject(JSParsedObjectBox *pob, JSEmittedObjectList *list); |
1719 | |
1720 | static JSBool |
1721 | EmitObjectOp(JSContext *cx, JSParsedObjectBox *pob, JSOp op, |
1722 | JSCodeGenerator *cg) |
1723 | { |
1724 | JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT); |
1725 | return EmitIndexOp(cx, op, IndexParsedObject(pob, &cg->objectList), cg); |
1726 | } |
1727 | |
1728 | /* |
1729 | * What good are ARGNO_LEN and SLOTNO_LEN, you ask? The answer is that, apart |
1730 | * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in |
1731 | * other parts of the code there's no necessary relationship between the two. |
1732 | * The abstraction cracks here in order to share EmitSlotIndexOp code among |
1733 | * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases. |
1734 | */ |
1735 | JS_STATIC_ASSERT(ARGNO_LEN == 2); |
1736 | JS_STATIC_ASSERT(SLOTNO_LEN == 2); |
1737 | |
1738 | static JSBool |
1739 | EmitSlotIndexOp(JSContext *cx, JSOp op, uintN slot, uintN index, |
1740 | JSCodeGenerator *cg) |
1741 | { |
1742 | JSOp bigSuffix; |
1743 | ptrdiff_t off; |
1744 | jsbytecode *pc; |
1745 | |
1746 | JS_ASSERT(JOF_OPTYPE(op) == JOF_SLOTATOM || |
1747 | JOF_OPTYPE(op) == JOF_SLOTOBJECT); |
1748 | bigSuffix = EmitBigIndexPrefix(cx, cg, index); |
1749 | if (bigSuffix == JSOP_FALSE) |
1750 | return JS_FALSE; |
1751 | |
1752 | /* Emit [op, slot, index]. */ |
1753 | off = js_EmitN(cx, cg, op, 2 + INDEX_LEN); |
1754 | if (off < 0) |
1755 | return JS_FALSE; |
1756 | pc = CG_CODE(cg, off); |
1757 | SET_UINT16(pc, slot); |
1758 | pc += 2; |
1759 | SET_INDEX(pc, index); |
1760 | return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0; |
1761 | } |
1762 | |
1763 | /* |
1764 | * Adjust the slot for a block local to account for the number of variables |
1765 | * that share the same index space with locals. Due to the incremental code |
1766 | * generation for top-level script, we do the adjustment via code patching in |
1767 | * js_CompileScript; see comments there. |
1768 | * |
1769 | * The function returns -1 on failures. |
1770 | */ |
1771 | static jsint |
1772 | AdjustBlockSlot(JSContext *cx, JSCodeGenerator *cg, jsint slot) |
1773 | { |
1774 | JS_ASSERT((jsuint) slot < cg->maxStackDepth); |
1775 | if (cg->treeContext.flags & TCF_IN_FUNCTION) { |
1776 | slot += cg->treeContext.u.fun->u.i.nvars; |
1777 | if ((uintN) slot >= SLOTNO_LIMIT) { |
1778 | js_ReportCompileErrorNumber(cx, CG_TS(cg), NULL, |
1779 | JSREPORT_ERROR, |
1780 | JSMSG_TOO_MANY_LOCALS); |
1781 | slot = -1; |
1782 | } |
1783 | } |
1784 | return slot; |
1785 | } |
1786 | |
1787 | /* |
1788 | * This routine tries to optimize name gets and sets to stack slot loads and |
1789 | * stores, given the variables object and scope chain in cx's top frame, the |
1790 | * compile-time context in tc, and a TOK_NAME node pn. It returns false on |
1791 | * error, true on success. |
1792 | * |
1793 | * The caller can inspect pn->pn_slot for a non-negative slot number to tell |
1794 | * whether optimization occurred, in which case BindNameToSlot also updated |
1795 | * pn->pn_op. If pn->pn_slot is still -1 on return, pn->pn_op nevertheless |
1796 | * may have been optimized, e.g., from JSOP_NAME to JSOP_ARGUMENTS. Whether |
1797 | * or not pn->pn_op was modified, if this function finds an argument or local |
1798 | * variable name, pn->pn_const will be true for const properties after a |
1799 | * successful return. |
1800 | * |
1801 | * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget |
1802 | * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases |
1803 | * in js_EmitTree. |
1804 | */ |
1805 | static JSBool |
1806 | BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn) |
1807 | { |
1808 | JSTreeContext *tc; |
1809 | JSAtom *atom; |
1810 | JSStmtInfo *stmt; |
1811 | jsint slot; |
1812 | JSOp op; |
1813 | JSLocalKind localKind; |
1814 | uintN index; |
1815 | JSAtomListElement *ale; |
1816 | JSBool constOp; |
1817 | |
1818 | JS_ASSERT(pn->pn_type == TOK_NAME); |
1819 | if (pn->pn_slot >= 0 || pn->pn_op == JSOP_ARGUMENTS) |
1820 | return JS_TRUE; |
1821 | |
1822 | /* QNAME references can never be optimized to use arg/var storage. */ |
1823 | if (pn->pn_op == JSOP_QNAMEPART) |
1824 | return JS_TRUE; |
1825 | |
1826 | /* |
1827 | * We can't optimize if we are compiling a with statement and its body, |
1828 | * or we're in a catch block whose exception variable has the same name |
1829 | * as this node. FIXME: we should be able to optimize catch vars to be |
1830 | * block-locals. |
1831 | */ |
1832 | tc = &cg->treeContext; |
1833 | atom = pn->pn_atom; |
1834 | stmt = js_LexicalLookup(tc, atom, &slot); |
1835 | if (stmt) { |
1836 | if (stmt->type == STMT_WITH) |
1837 | return JS_TRUE; |
1838 | |
1839 | JS_ASSERT(stmt->flags & SIF_SCOPE); |
1840 | JS_ASSERT(slot >= 0); |
1841 | op = PN_OP(pn); |
1842 | switch (op) { |
1843 | case JSOP_NAME: op = JSOP_GETLOCAL; break; |
1844 | case JSOP_SETNAME: op = JSOP_SETLOCAL; break; |
1845 | case JSOP_INCNAME: op = JSOP_INCLOCAL; break; |
1846 | case JSOP_NAMEINC: op = JSOP_LOCALINC; break; |
1847 | case JSOP_DECNAME: op = JSOP_DECLOCAL; break; |
1848 | case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break; |
1849 | case JSOP_FORNAME: op = JSOP_FORLOCAL; break; |
1850 | case JSOP_DELNAME: op = JSOP_FALSE; break; |
1851 | default: JS_ASSERT(0); |
1852 | } |
1853 | if (op != pn->pn_op) { |
1854 | slot = AdjustBlockSlot(cx, cg, slot); |
1855 | if (slot < 0) |
1856 | return JS_FALSE; |
1857 | pn->pn_op = op; |
1858 | pn->pn_slot = slot; |
1859 | } |
1860 | return JS_TRUE; |
1861 | } |
1862 | |
1863 | /* |
1864 | * We can't optimize if var and closure (a local function not in a larger |
1865 | * expression and not at top-level within another's body) collide. |
1866 | * XXX suboptimal: keep track of colliding names and deoptimize only those |
1867 | */ |
1868 | if (tc->flags & TCF_FUN_CLOSURE_VS_VAR) |
1869 | return JS_TRUE; |
1870 | |
1871 | if (!(tc->flags & TCF_IN_FUNCTION)) { |
1872 | JSStackFrame *caller; |
1873 | |
1874 | caller = tc->parseContext->callerFrame; |
1875 | if (caller) { |
1876 | JS_ASSERT(tc->flags & TCF_COMPILE_N_GO); |
1877 | JS_ASSERT(caller->script); |
1878 | if (!caller->fun || caller->varobj != tc->u.scopeChain) |
1879 | return JS_TRUE; |
1880 | |
1881 | /* |
1882 | * We are compiling eval or debug script inside a function frame |
1883 | * and the scope chain matches function's variable object. |
1884 | * Optimize access to function's arguments and variable and the |
1885 | * arguments object. |
1886 | */ |
1887 | if (PN_OP(pn) != JSOP_NAME || cg->staticDepth > JS_DISPLAY_SIZE) |
1888 | goto arguments_check; |
1889 | localKind = js_LookupLocal(cx, caller->fun, atom, &index); |
1890 | if (localKind == JSLOCAL_NONE) |
1891 | goto arguments_check; |
1892 | |
1893 | ATOM_LIST_SEARCH(ale, &cg->upvarList, atom); |
1894 | if (!ale) { |
1895 | uint32 length, *vector; |
1896 | |
1897 | ale = js_IndexAtom(cx, atom, &cg->upvarList); |
1898 | if (!ale) |
1899 | return JS_FALSE; |
1900 | JS_ASSERT(ALE_INDEX(ale) == cg->upvarList.count - 1); |
1901 | |
1902 | length = cg->upvarMap.length; |
1903 | JS_ASSERT(ALE_INDEX(ale) <= length); |
1904 | if (ALE_INDEX(ale) == length) { |
1905 | length = 2 * JS_MAX(2, length); |
1906 | vector = (uint32 *) |
1907 | JS_realloc(cx, cg->upvarMap.vector, |
1908 | length * sizeof *vector); |
1909 | if (!vector) |
1910 | return JS_FALSE; |
1911 | cg->upvarMap.vector = vector; |
1912 | cg->upvarMap.length = length; |
1913 | } |
1914 | |
1915 | if (localKind != JSLOCAL_ARG) |
1916 | index += caller->fun->nargs; |
1917 | if (index >= JS_BIT(16)) { |
1918 | cg->treeContext.flags |= TCF_FUN_USES_NONLOCALS; |
1919 | return JS_TRUE; |
1920 | } |
1921 | |
1922 | JS_ASSERT(cg->staticDepth > caller->fun->u.i.script->staticDepth); |
1923 | uintN skip = cg->staticDepth - caller->fun->u.i.script->staticDepth; |
1924 | cg->upvarMap.vector[ALE_INDEX(ale)] = MAKE_UPVAR_COOKIE(skip, index); |
1925 | } |
1926 | |
1927 | pn->pn_op = JSOP_GETUPVAR; |
1928 | pn->pn_slot = ALE_INDEX(ale); |
1929 | return JS_TRUE; |
1930 | } |
1931 | |
1932 | /* |
1933 | * We are optimizing global variables and there may be no pre-existing |
1934 | * global property named atom. If atom was declared via const or var, |
1935 | * optimize pn to access fp->vars using the appropriate JSOP_*GVAR op. |
1936 | */ |
1937 | ATOM_LIST_SEARCH(ale, &tc->decls, atom); |
1938 | if (!ale) { |
1939 | /* Use precedes declaration, or name is never declared. */ |
1940 | return JS_TRUE; |
1941 | } |
1942 | constOp = (ALE_JSOP(ale) == JSOP_DEFCONST); |
1943 | |
1944 | /* Index atom so we can map fast global number to name. */ |
1945 | ale = js_IndexAtom(cx, atom, &cg->atomList); |
1946 | if (!ale) |
1947 | return JS_FALSE; |
1948 | |
1949 | /* Defend against tc->ngvars 16-bit overflow. */ |
1950 | slot = ALE_INDEX(ale); |
1951 | if ((slot + 1) >> 16) |
1952 | return JS_TRUE; |
1953 | |
1954 | if ((uint16)(slot + 1) > tc->ngvars) |
1955 | tc->ngvars = (uint16)(slot + 1); |
1956 | |
1957 | op = PN_OP(pn); |
1958 | switch (op) { |
1959 | case JSOP_NAME: op = JSOP_GETGVAR; break; |
1960 | case JSOP_SETNAME: op = JSOP_SETGVAR; break; |
1961 | case JSOP_SETCONST: /* NB: no change */ break; |
1962 | case JSOP_INCNAME: op = JSOP_INCGVAR; break; |
1963 | case JSOP_NAMEINC: op = JSOP_GVARINC; break; |
1964 | case JSOP_DECNAME: op = JSOP_DECGVAR; break; |
1965 | case JSOP_NAMEDEC: op = JSOP_GVARDEC; break; |
1966 | case JSOP_FORNAME: /* NB: no change */ break; |
1967 | case JSOP_DELNAME: /* NB: no change */ break; |
1968 | default: JS_NOT_REACHED("gvar"); |
1969 | } |
1970 | pn->pn_const = constOp; |
1971 | if (op != pn->pn_op) { |
1972 | pn->pn_op = op; |
1973 | pn->pn_slot = slot; |
1974 | } |
1975 | return JS_TRUE; |
1976 | } |
1977 | |
1978 | if (tc->flags & TCF_IN_FUNCTION) { |
1979 | /* |
1980 | * We are compiling a function body and may be able to optimize name |
1981 | * to stack slot. Look for an argument or variable in the function and |
1982 | * rewrite pn_op and update pn accordingly. |
1983 | */ |
1984 | localKind = js_LookupLocal(cx, tc->u.fun, atom, &index); |
1985 | if (localKind != JSLOCAL_NONE) { |
1986 | op = PN_OP(pn); |
1987 | if (localKind == JSLOCAL_ARG) { |
1988 | switch (op) { |
1989 | case JSOP_NAME: op = JSOP_GETARG; break; |
1990 | case JSOP_SETNAME: op = JSOP_SETARG; break; |
1991 | case JSOP_INCNAME: op = JSOP_INCARG; break; |
1992 | case JSOP_NAMEINC: op = JSOP_ARGINC; break; |
1993 | case JSOP_DECNAME: op = JSOP_DECARG; break; |
1994 | case JSOP_NAMEDEC: op = JSOP_ARGDEC; break; |
1995 | case JSOP_FORNAME: op = JSOP_FORARG; break; |
1996 | case JSOP_DELNAME: op = JSOP_FALSE; break; |
1997 | default: JS_NOT_REACHED("arg"); |
1998 | } |
1999 | pn->pn_const = JS_FALSE; |
2000 | } else { |
2001 | JS_ASSERT(localKind == JSLOCAL_VAR || |
2002 | localKind == JSLOCAL_CONST); |
2003 | switch (op) { |
2004 | case JSOP_NAME: op = JSOP_GETLOCAL; break; |
2005 | case JSOP_SETNAME: op = JSOP_SETLOCAL; break; |
2006 | case JSOP_SETCONST: op = JSOP_SETLOCAL; break; |
2007 | case JSOP_INCNAME: op = JSOP_INCLOCAL; break; |
2008 | case JSOP_NAMEINC: op = JSOP_LOCALINC; break; |
2009 | case JSOP_DECNAME: op = JSOP_DECLOCAL; break; |
2010 | case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break; |
2011 | case JSOP_FORNAME: op = JSOP_FORLOCAL; break; |
2012 | case JSOP_DELNAME: op = JSOP_FALSE; break; |
2013 | default: JS_NOT_REACHED("local"); |
2014 | } |
2015 | pn->pn_const = (localKind == JSLOCAL_CONST); |
2016 | } |
2017 | pn->pn_op = op; |
2018 | pn->pn_slot = index; |
2019 | return JS_TRUE; |
2020 | } |
2021 | tc->flags |= TCF_FUN_USES_NONLOCALS; |
2022 | } |
2023 | |
2024 | arguments_check: |
2025 | /* |
2026 | * Here we either compiling a function body or an eval or debug script |
2027 | * inside a function and couldn't optimize pn, so it's not a global or |
2028 | * local slot name. We are also outside of any with blocks. Check if we |
2029 | * can optimize the predefined arguments variable. |
2030 | */ |
2031 | JS_ASSERT((tc->flags & TCF_IN_FUNCTION) || |
2032 | (tc->parseContext->callerFrame && |
2033 | tc->parseContext->callerFrame->fun && |
2034 | tc->parseContext->callerFrame->varobj == tc->u.scopeChain)); |
2035 | if (pn->pn_op == JSOP_NAME && |
2036 | atom == cx->runtime->atomState.argumentsAtom) { |
2037 | pn->pn_op = JSOP_ARGUMENTS; |
2038 | return JS_TRUE; |
2039 | } |
2040 | return JS_TRUE; |
2041 | } |
2042 | |
2043 | /* |
2044 | * If pn contains a useful expression, return true with *answer set to true. |
2045 | * If pn contains a useless expression, return true with *answer set to false. |
2046 | * Return false on error. |
2047 | * |
2048 | * The caller should initialize *answer to false and invoke this function on |
2049 | * an expression statement or similar subtree to decide whether the tree could |
2050 | * produce code that has any side effects. For an expression statement, we |
2051 | * define useless code as code with no side effects, because the main effect, |
2052 | * the value left on the stack after the code executes, will be discarded by a |
2053 | * pop bytecode. |
2054 | */ |
2055 | static JSBool |
2056 | CheckSideEffects(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn, |
2057 | JSBool *answer) |
2058 | { |
2059 | JSBool ok; |
2060 | JSFunction *fun; |
2061 | JSParseNode *pn2; |
2062 | |
2063 | ok = JS_TRUE; |
2064 | if (!pn || *answer) |
2065 | return ok; |
2066 | |
2067 | switch (pn->pn_arity) { |
2068 | case PN_FUNC: |
2069 | /* |
2070 | * A named function is presumed useful: we can't yet know that it is |
2071 | * not called. The side effects are the creation of a scope object |
2072 | * to parent this function object, and the binding of the function's |
2073 | * name in that scope object. See comments at case JSOP_NAMEDFUNOBJ: |
2074 | * in jsinterp.c. |
2075 | */ |
2076 | fun = (JSFunction *) pn->pn_funpob->object; |
2077 | if (fun->atom) |
2078 | *answer = JS_TRUE; |
2079 | break; |
2080 | |
2081 | case PN_LIST: |
2082 | if (pn->pn_op == JSOP_NOP || |
2083 | pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND || |
2084 | pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) { |
2085 | /* |
2086 | * Non-operators along with ||, &&, ===, and !== never invoke |
2087 | * toString or valueOf. |
2088 | */ |
2089 | for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) |
2090 | ok &= CheckSideEffects(cx, cg, pn2, answer); |
2091 | } else { |
2092 | /* |
2093 | * All invocation operations (construct: TOK_NEW, call: TOK_LP) |
2094 | * are presumed to be useful, because they may have side effects |
2095 | * even if their main effect (their return value) is discarded. |
2096 | * |
2097 | * TOK_LB binary trees of 3 or more nodes are flattened into lists |
2098 | * to avoid too much recursion. All such lists must be presumed |
2099 | * to be useful because each index operation could invoke a getter |
2100 | * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case, |
2101 | * does not apply here: arguments[i][j] might invoke a getter). |
2102 | * |
2103 | * Likewise, array and object initialisers may call prototype |
2104 | * setters (the __defineSetter__ built-in, and writable __proto__ |
2105 | * on Array.prototype create this hazard). Initialiser list nodes |
2106 | * have JSOP_NEWINIT in their pn_op. |
2107 | */ |
2108 | *answer = JS_TRUE; |
2109 | } |
2110 | break; |
2111 | |
2112 | case PN_TERNARY: |
2113 | ok = CheckSideEffects(cx, cg, pn->pn_kid1, answer) && |
2114 | CheckSideEffects(cx, cg, pn->pn_kid2, answer) && |
2115 | CheckSideEffects(cx, cg, pn->pn_kid3, answer); |
2116 | break; |
2117 | |
2118 | case PN_BINARY: |
2119 | if (pn->pn_type == TOK_ASSIGN) { |
2120 | /* |
2121 | * Assignment is presumed to be useful, even if the next operation |
2122 | * is another assignment overwriting this one's ostensible effect, |
2123 | * because the left operand may be a property with a setter that |
2124 | * has side effects. |
2125 | * |
2126 | * The only exception is assignment of a useless value to a const |
2127 | * declared in the function currently being compiled. |
2128 | */ |
2129 | pn2 = pn->pn_left; |
2130 | if (pn2->pn_type != TOK_NAME) { |
2131 | *answer = JS_TRUE; |
2132 | } else { |
2133 | if (!BindNameToSlot(cx, cg, pn2)) |
2134 | return JS_FALSE; |
2135 | if (!CheckSideEffects(cx, cg, pn->pn_right, answer)) |
2136 | return JS_FALSE; |
2137 | if (!*answer && |
2138 | (pn->pn_op != JSOP_NOP || |
2139 | pn2->pn_slot < 0 || |
2140 | !pn2->pn_const)) { |
2141 | *answer = JS_TRUE; |
2142 | } |
2143 | } |
2144 | } else { |
2145 | if (pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND || |
2146 | pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) { |
2147 | /* |
2148 | * ||, &&, ===, and !== do not convert their operands via |
2149 | * toString or valueOf method calls. |
2150 | */ |
2151 | ok = CheckSideEffects(cx, cg, pn->pn_left, answer) && |
2152 | CheckSideEffects(cx, cg, pn->pn_right, answer); |
2153 | } else { |
2154 | /* |
2155 | * We can't easily prove that neither operand ever denotes an |
2156 | * object with a toString or valueOf method. |
2157 | */ |
2158 | *answer = JS_TRUE; |
2159 | } |
2160 | } |
2161 | break; |
2162 | |
2163 | case PN_UNARY: |
2164 | switch (pn->pn_type) { |
2165 | case TOK_RP: |
2166 | ok = CheckSideEffects(cx, cg, pn->pn_kid, answer); |
2167 | break; |
2168 | |
2169 | case TOK_DELETE: |
2170 | pn2 = pn->pn_kid; |
2171 | switch (pn2->pn_type) { |
2172 | case TOK_NAME: |
2173 | case TOK_DOT: |
2174 | #if JS_HAS_XML_SUPPORT |
2175 | case TOK_DBLDOT: |
2176 | #endif |
2177 | #if JS_HAS_LVALUE_RETURN |
2178 | case TOK_LP: |
2179 | #endif |
2180 | case TOK_LB: |
2181 | /* All these delete addressing modes have effects too. */ |
2182 | *answer = JS_TRUE; |
2183 | break; |
2184 | default: |
2185 | ok = CheckSideEffects(cx, cg, pn2, answer); |
2186 | break; |
2187 | } |
2188 | break; |
2189 | |
2190 | case TOK_UNARYOP: |
2191 | if (pn->pn_op == JSOP_NOT) { |
2192 | /* ! does not convert its operand via toString or valueOf. */ |
2193 | ok = CheckSideEffects(cx, cg, pn->pn_kid, answer); |
2194 | break; |
2195 | } |
2196 | /* FALL THROUGH */ |
2197 | |
2198 | default: |
2199 | /* |
2200 | * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP |
2201 | * have direct effects. Of the remaining unary-arity node types, |
2202 | * we can't easily prove that the operand never denotes an object |
2203 | * with a toString or valueOf method. |
2204 | */ |
2205 | *answer = JS_TRUE; |
2206 | break; |
2207 | } |
2208 | break; |
2209 | |
2210 | case PN_NAME: |
2211 | /* |
2212 | * Take care to avoid trying to bind a label name (labels, both for |
2213 | * statements and property values in object initialisers, have pn_op |
2214 | * defaulted to JSOP_NOP). |
2215 | */ |
2216 | if (pn->pn_type == TOK_NAME && pn->pn_op != JSOP_NOP) { |
2217 | if (!BindNameToSlot(cx, cg, pn)) |
2218 | return JS_FALSE; |
2219 | if (pn->pn_slot < 0 && pn->pn_op != JSOP_ARGUMENTS) { |
2220 | /* |
2221 | * Not an argument or local variable use, so this expression |
2222 | * could invoke a getter that has side effects. |
2223 | */ |
2224 | *answer = JS_TRUE; |
2225 | } |
2226 | } |
2227 | pn2 = pn->pn_expr; |
2228 | if (pn->pn_type == TOK_DOT) { |
2229 | if (pn2->pn_type == TOK_NAME && !BindNameToSlot(cx, cg, pn2)) |
2230 | return JS_FALSE; |
2231 | if (!(pn2->pn_op == JSOP_ARGUMENTS && |
2232 | pn->pn_atom == cx->runtime->atomState.lengthAtom)) { |
2233 | /* |
2234 | * Any dotted property reference could call a getter, except |
2235 | * for arguments.length where arguments is unambiguous. |
2236 | */ |
2237 | *answer = JS_TRUE; |
2238 | } |
2239 | } |
2240 | ok = CheckSideEffects(cx, cg, pn2, answer); |
2241 | break; |
2242 | |
2243 | case PN_NULLARY: |
2244 | if (pn->pn_type == TOK_DEBUGGER) |
2245 | *answer = JS_TRUE; |
2246 | break; |
2247 | } |
2248 | return ok; |
2249 | } |
2250 | |
2251 | static JSBool |
2252 | EmitNameOp(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn, |
2253 | JSBool callContext) |
2254 | { |
2255 | JSOp op; |
2256 | |
2257 | if (!BindNameToSlot(cx, cg, pn)) |
2258 | return JS_FALSE; |
2259 | op = PN_OP(pn); |
2260 | |
2261 | if (callContext) { |
2262 | switch (op) { |
2263 | case JSOP_NAME: |
2264 | op = JSOP_CALLNAME; |
2265 | break; |
2266 | case JSOP_GETGVAR: |
2267 | op = JSOP_CALLGVAR; |
2268 | break; |
2269 | case JSOP_GETARG: |
2270 | op = JSOP_CALLARG; |
2271 | break; |
2272 | case JSOP_GETLOCAL: |
2273 | op = JSOP_CALLLOCAL; |
2274 | break; |
2275 | case JSOP_GETUPVAR: |
2276 | op = JSOP_CALLUPVAR; |
2277 | break; |
2278 | default: |
2279 | JS_ASSERT(op == JSOP_ARGUMENTS); |
2280 | break; |
2281 | } |
2282 | } |
2283 | |
2284 | if (op == JSOP_ARGUMENTS) { |
2285 | if (js_Emit1(cx, cg, op) < 0) |
2286 | return JS_FALSE; |
2287 | if (callContext && js_Emit1(cx, cg, JSOP_NULL) < 0) |
2288 | return JS_FALSE; |
2289 | } else { |
2290 | if (pn->pn_slot >= 0) { |
2291 | EMIT_UINT16_IMM_OP(op, pn->pn_slot); |
2292 | } else { |
2293 | if (!EmitAtomOp(cx, pn, op, cg)) |
2294 | return JS_FALSE; |
2295 | } |
2296 | } |
2297 | |
2298 | return JS_TRUE; |
2299 | } |
2300 | |
2301 | #if JS_HAS_XML_SUPPORT |
2302 | static JSBool |
2303 | EmitXMLName(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg) |
2304 | { |
2305 | JSParseNode *pn2; |
2306 | uintN oldflags; |
2307 | |
2308 | JS_ASSERT(pn->pn_type == TOK_UNARYOP); |
2309 | JS_ASSERT(pn->pn_op == JSOP_XMLNAME); |
2310 | JS_ASSERT(op == JSOP_XMLNAME || op == JSOP_CALLXMLNAME); |
2311 | |
2312 | pn2 = pn->pn_kid; |
2313 | oldflags = cg->treeContext.flags; |
2314 | cg->treeContext.flags &= ~TCF_IN_FOR_INIT; |
2315 | if (!js_EmitTree(cx, cg, pn2)) |
2316 | return JS_FALSE; |
2317 | cg->treeContext.flags |= oldflags & TCF_IN_FOR_INIT; |
2318 | if (js_NewSrcNote2(cx, cg, SRC_PCBASE, |
2319 | CG_OFFSET(cg) - pn2->pn_offset) < 0) { |
2320 | return JS_FALSE; |
2321 | } |
2322 | |
2323 | return js_Emit1(cx, cg, op) >= 0; |
2324 | } |
2325 | #endif |
2326 | |
2327 | static JSBool |
2328 | EmitPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg, |
2329 | JSBool callContext) |
2330 | { |
2331 | JSParseNode *pn2, *pndot, *pnup, *pndown; |
2332 | ptrdiff_t top; |
2333 | |
2334 | pn2 = pn->pn_expr; |
2335 | if (callContext) { |
2336 | JS_ASSERT(pn->pn_type == TOK_DOT); |
2337 | JS_ASSERT(op == JSOP_GETPROP); |
2338 | op = JSOP_CALLPROP; |
2339 | } else if (op == JSOP_GETPROP && pn->pn_type == TOK_DOT) { |
2340 | if (pn2->pn_op == JSOP_THIS) { |
2341 | if (pn->pn_atom != cx->runtime->atomState.lengthAtom) { |
2342 | /* Fast path for gets of |this.foo|. */ |
2343 | return EmitAtomOp(cx, pn, JSOP_GETTHISPROP, cg); |
2344 | } |
2345 | } else if (pn2->pn_type == TOK_NAME) { |
2346 | /* |
2347 | * Try to optimize: |
2348 | * - arguments.length into JSOP_ARGCNT |
2349 | * - argname.prop into JSOP_GETARGPROP |
2350 | * - localname.prop into JSOP_GETLOCALPROP |
2351 | * but don't do this if the property is 'length' -- prefer to emit |
2352 | * JSOP_GETARG, etc., and then JSOP_LENGTH. |
2353 | */ |
2354 | if (!BindNameToSlot(cx, cg, pn2)) |
2355 | return JS_FALSE; |
2356 | if (pn->pn_atom == cx->runtime->atomState.lengthAtom) { |
2357 | if (pn2->pn_op == JSOP_ARGUMENTS) |
2358 | return js_Emit1(cx, cg, JSOP_ARGCNT) >= 0; |
2359 | } else { |
2360 | switch (pn2->pn_op) { |
2361 | case JSOP_GETARG: |
2362 | op = JSOP_GETARGPROP; |
2363 | goto do_indexconst; |
2364 | case JSOP_GETLOCAL: |
2365 | op = JSOP_GETLOCALPROP; |
2366 | do_indexconst: { |
2367 | JSAtomListElement *ale; |
2368 | jsatomid atomIndex; |
2369 | |
2370 | ale = js_IndexAtom(cx, pn->pn_atom, &cg->atomList); |
2371 | if (!ale) |
2372 | return JS_FALSE; |
2373 | atomIndex = ALE_INDEX(ale); |
2374 | return EmitSlotIndexOp(cx, op, pn2->pn_slot, atomIndex, cg); |
2375 | } |
2376 | |
2377 | default:; |
2378 | } |
2379 | } |
2380 | } |
2381 | } |
2382 | |
2383 | /* |
2384 | * If the object operand is also a dotted property reference, reverse the |
2385 | * list linked via pn_expr temporarily so we can iterate over it from the |
2386 | * bottom up (reversing again as we go), to avoid excessive recursion. |
2387 | */ |
2388 | if (pn2->pn_type == TOK_DOT) { |
2389 | pndot = pn2; |
2390 | pnup = NULL; |
2391 | top = CG_OFFSET(cg); |
2392 | for (;;) { |
2393 | /* Reverse pndot->pn_expr to point up, not down. */ |
2394 | pndot->pn_offset = top; |
2395 | pndown = pndot->pn_expr; |
2396 | pndot->pn_expr = pnup; |
2397 | if (pndown->pn_type != TOK_DOT) |
2398 | break; |
2399 | pnup = pndot; |
2400 | pndot = pndown; |
2401 | } |
2402 | |
2403 | /* pndown is a primary expression, not a dotted property reference. */ |
2404 | if (!js_EmitTree(cx, cg, pndown)) |
2405 | return JS_FALSE; |
2406 | |
2407 | do { |
2408 | /* Walk back up the list, emitting annotated name ops. */ |
2409 | if (js_NewSrcNote2(cx, cg, SRC_PCBASE, |
2410 | CG_OFFSET(cg) - pndown->pn_offset) < 0) { |
2411 | return JS_FALSE; |
2412 | } |
2413 | if (!EmitAtomOp(cx, pndot, PN_OP(pndot), cg)) |
2414 | return JS_FALSE; |
2415 | |
2416 | /* Reverse the pn_expr link again. */ |
2417 | pnup = pndot->pn_expr; |
2418 | pndot->pn_expr = pndown; |
2419 | pndown = pndot; |
2420 | } while ((pndot = pnup) != NULL); |
2421 | } else { |
2422 | if (!js_EmitTree(cx, cg, pn2)) |
2423 | return JS_FALSE; |
2424 | } |
2425 | |
2426 | if (js_NewSrcNote2(cx, cg, SRC_PCBASE, |
2427 | CG_OFFSET(cg) - pn2->pn_offset) < 0) { |
2428 | return JS_FALSE; |
2429 | } |
2430 | |
2431 | return EmitAtomOp(cx, pn, op, cg); |
2432 | } |
2433 | |
2434 | static JSBool |
2435 | EmitElemOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg) |
2436 | { |
2437 | ptrdiff_t top; |
2438 | JSParseNode *left, *right, *next, ltmp, rtmp; |
2439 | jsint slot; |
2440 | |
2441 | top = CG_OFFSET(cg); |
2442 | if (pn->pn_arity == PN_LIST) { |
2443 | /* Left-associative operator chain to avoid too much recursion. */ |
2444 | JS_ASSERT(pn->pn_op == JSOP_GETELEM); |
2445 | JS_ASSERT(pn->pn_count >= 3); |
2446 | left = pn->pn_head; |
2447 | right = PN_LAST(pn); |
2448 | next = left->pn_next; |
2449 | JS_ASSERT(next != right); |
2450 | |
2451 | /* |
2452 | * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by |
2453 | * one or more index expression and JSOP_GETELEM op pairs. |
2454 | */ |
2455 | if (left->pn_type == TOK_NAME && next->pn_type == TOK_NUMBER) { |
2456 | if (!BindNameToSlot(cx, cg, left)) |
2457 | return JS_FALSE; |
2458 | if (left->pn_op == JSOP_ARGUMENTS && |
2459 | JSDOUBLE_IS_INT(next->pn_dval, slot) && |
2460 | (jsuint)slot < JS_BIT(16)) { |
2461 | /* |
2462 | * arguments[i]() requires arguments object as "this". |
2463 | * Check that we never generates list for that usage. |
2464 | */ |
2465 | JS_ASSERT(op != JSOP_CALLELEM || next->pn_next); |
2466 | left->pn_offset = next->pn_offset = top; |
2467 | EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot); |
2468 | left = next; |
2469 | next = left->pn_next; |
2470 | } |
2471 | } |
2472 | |
2473 | /* |
2474 | * Check whether we generated JSOP_ARGSUB, just above, and have only |
2475 | * one more index expression to emit. Given arguments[0][j], we must |
2476 | * skip the while loop altogether, falling through to emit code for j |
2477 | * (in the subtree referenced by right), followed by the annotated op, |
2478 | * at the bottom of this function. |
2479 | */ |
2480 | JS_ASSERT(next != right || pn->pn_count == 3); |
2481 | if (left == pn->pn_head) { |
2482 | if (!js_EmitTree(cx, cg, left)) |
2483 | return JS_FALSE; |
2484 | } |
2485 | while (next != right) { |
2486 | if (!js_EmitTree(cx, cg, next)) |
2487 | return JS_FALSE; |
2488 | if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0) |
2489 | return JS_FALSE; |
2490 | if (js_Emit1(cx, cg, JSOP_GETELEM) < 0) |
2491 | return JS_FALSE; |
2492 | next = next->pn_next; |
2493 | } |
2494 | } else { |
2495 | if (pn->pn_arity == PN_NAME) { |
2496 | /* |
2497 | * Set left and right so pn appears to be a TOK_LB node, instead |
2498 | * of a TOK_DOT node. See the TOK_FOR/IN case in js_EmitTree, and |
2499 | * EmitDestructuringOps nearer below. In the destructuring case, |
2500 | * the base expression (pn_expr) of the name may be null, which |
2501 | * means we have to emit a JSOP_BINDNAME. |
2502 | */ |
2503 | left = pn->pn_expr; |
2504 | if (!left) { |
2505 | left = <mp; |
2506 | left->pn_type = TOK_STRING; |
2507 | left->pn_op = JSOP_BINDNAME; |
2508 | left->pn_arity = PN_NULLARY; |
2509 | left->pn_pos = pn->pn_pos; |
2510 | left->pn_atom = pn->pn_atom; |
2511 | } |
2512 | right = &rtmp; |
2513 | right->pn_type = TOK_STRING; |
2514 | JS_ASSERT(ATOM_IS_STRING(pn->pn_atom)); |
2515 | right->pn_op = js_IsIdentifier(ATOM_TO_STRING(pn->pn_atom)) |
2516 | ? JSOP_QNAMEPART |
2517 | : JSOP_STRING; |
2518 | right->pn_arity = PN_NULLARY; |
2519 | right->pn_pos = pn->pn_pos; |
2520 | right->pn_atom = pn->pn_atom; |
2521 | } else { |
2522 | JS_ASSERT(pn->pn_arity == PN_BINARY); |
2523 | left = pn->pn_left; |
2524 | right = pn->pn_right; |
2525 | } |
2526 | |
2527 | /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */ |
2528 | if (op == JSOP_GETELEM && |
2529 | left->pn_type == TOK_NAME && |
2530 | right->pn_type == TOK_NUMBER) { |
2531 | if (!BindNameToSlot(cx, cg, left)) |
2532 | return JS_FALSE; |
2533 | if (left->pn_op == JSOP_ARGUMENTS && |
2534 | JSDOUBLE_IS_INT(right->pn_dval, slot) && |
2535 | (jsuint)slot < JS_BIT(16)) { |
2536 | left->pn_offset = right->pn_offset = top; |
2537 | EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot); |
2538 | return JS_TRUE; |
2539 | } |
2540 | } |
2541 | |
2542 | if (!js_EmitTree(cx, cg, left)) |
2543 | return JS_FALSE; |
2544 | } |
2545 | |
2546 | /* The right side of the descendant operator is implicitly quoted. */ |
2547 | JS_ASSERT(op != JSOP_DESCENDANTS || right->pn_type != TOK_STRING || |
2548 | right->pn_op == JSOP_QNAMEPART); |
2549 | if (!js_EmitTree(cx, cg, right)) |
2550 | return JS_FALSE; |
2551 | if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0) |
2552 | return JS_FALSE; |
2553 | return js_Emit1(cx, cg, op) >= 0; |
2554 | } |
2555 | |
2556 | static JSBool |
2557 | EmitNumberOp(JSContext *cx, jsdouble dval, JSCodeGenerator *cg) |
2558 | { |
2559 | jsint ival; |
2560 | uint32 u; |
2561 | ptrdiff_t off; |
2562 | jsbytecode *pc; |
2563 | JSAtom *atom; |
2564 | JSAtomListElement *ale; |
2565 | |
2566 | if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) { |
2567 | if (ival == 0) |
2568 | return js_Emit1(cx, cg, JSOP_ZERO) >= 0; |
2569 | if (ival == 1) |
2570 | return js_Emit1(cx, cg, JSOP_ONE) >= 0; |
2571 | if ((jsint)(int8)ival == ival) |
2572 | return js_Emit2(cx, cg, JSOP_INT8, (jsbytecode)(int8)ival) >= 0; |
2573 | |
2574 | u = (uint32)ival; |
2575 | if (u < JS_BIT(16)) { |
2576 | EMIT_UINT16_IMM_OP(JSOP_UINT16, u); |
2577 | } else if (u < JS_BIT(24)) { |
2578 | off = js_EmitN(cx, cg, JSOP_UINT24, 3); |
2579 | if (off < 0) |
2580 | return JS_FALSE; |
2581 | pc = CG_CODE(cg, off); |
2582 | SET_UINT24(pc, u); |
2583 | } else { |
2584 | off = js_EmitN(cx, cg, JSOP_INT32, 4); |
2585 | if (off < 0) |
2586 | return JS_FALSE; |
2587 | pc = CG_CODE(cg, off); |
2588 | SET_INT32(pc, ival); |
2589 | } |
2590 | return JS_TRUE; |
2591 | } |
2592 | |
2593 | atom = js_AtomizeDouble(cx, dval); |
2594 | if (!atom) |
2595 | return JS_FALSE; |
2596 | |
2597 | ale = js_IndexAtom(cx, atom, &cg->atomList); |
2598 | if (!ale) |
2599 | return JS_FALSE; |
2600 | return EmitIndexOp(cx, JSOP_DOUBLE, ALE_INDEX(ale), cg); |
2601 | } |
2602 | |
2603 | static JSBool |
2604 | EmitSwitch(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn, |
2605 | JSStmtInfo *stmtInfo) |
2606 | { |
2607 | JSOp switchOp; |
2608 | JSBool ok, hasDefault, constPropagated; |
2609 | ptrdiff_t top, off, defaultOffset; |
2610 | JSParseNode *pn2, *pn3, *pn4; |
2611 | uint32 caseCount, tableLength; |
2612 | JSParseNode **table; |
2613 | jsdouble d; |
2614 | jsint i, low, high; |
2615 | jsval v; |
2616 | JSAtom *atom; |
2617 | JSAtomListElement *ale; |
2618 | intN noteIndex; |
2619 | size_t switchSize, tableSize; |
2620 | jsbytecode *pc, *savepc; |
2621 | #if JS_HAS_BLOCK_SCOPE |
2622 | jsint count; |
2623 | #endif |
2624 | |
2625 | /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */ |
2626 | switchOp = JSOP_TABLESWITCH; |
2627 | ok = JS_TRUE; |
2628 | hasDefault = constPropagated = JS_FALSE; |
2629 | defaultOffset = -1; |
2630 | |
2631 | /* |
2632 | * If the switch contains let variables scoped by its body, model the |
2633 | * resulting block on the stack first, before emitting the discriminant's |
2634 | * bytecode (in case the discriminant contains a stack-model dependency |
2635 | * such as a let expression). |
2636 | */ |
2637 | pn2 = pn->pn_right; |
2638 | #if JS_HAS_BLOCK_SCOPE |
2639 | if (pn2->pn_type == TOK_LEXICALSCOPE) { |
2640 | /* |
2641 | * Push the body's block scope before discriminant code-gen for proper |
2642 | * static block scope linkage in case the discriminant contains a let |
2643 | * expression. The block's locals must lie under the discriminant on |
2644 | * the stack so that case-dispatch bytecodes can find the discriminant |
2645 | * on top of stack. |
2646 | */ |
2647 | count = OBJ_BLOCK_COUNT(cx, pn2->pn_pob->object); |
2648 | js_PushBlockScope(&cg->treeContext, stmtInfo, pn2->pn_pob->object, -1); |
2649 | stmtInfo->type = STMT_SWITCH; |
2650 | |
2651 | /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */ |
2652 | if (!EmitObjectOp(cx, pn2->pn_pob, JSOP_ENTERBLOCK, cg)) |
2653 | return JS_FALSE; |
2654 | |
2655 | /* |
2656 | * Pop the switch's statement info around discriminant code-gen. Note |
2657 | * how this leaves cg->treeContext.blockChain referencing the switch's |
2658 | * block scope object, which is necessary for correct block parenting |
2659 | * in the case where the discriminant contains a let expression. |
2660 | */ |
2661 | cg->treeContext.topStmt = stmtInfo->down; |
2662 | cg->treeContext.topScopeStmt = stmtInfo->downScope; |
2663 | } |
2664 | #ifdef __GNUC__ |
2665 | else { |
2666 | count = 0; |
2667 | } |
2668 | #endif |
2669 | #endif |
2670 | |
2671 | /* |
2672 | * Emit code for the discriminant first (or nearly first, in the case of a |
2673 | * switch whose body is a block scope). |
2674 | */ |
2675 | if (!js_EmitTree(cx, cg, pn->pn_left)) |
2676 | return JS_FALSE; |
2677 | |
2678 | /* Switch bytecodes run from here till end of final case. */ |
2679 | top = CG_OFFSET(cg); |
2680 | #if !JS_HAS_BLOCK_SCOPE |
2681 | js_PushStatement(&cg->treeContext, stmtInfo, STMT_SWITCH, top); |
2682 | #else |
2683 | if (pn2->pn_type == TOK_LC) { |
2684 | js_PushStatement(&cg->treeContext, stmtInfo, STMT_SWITCH, top); |
2685 | } else { |
2686 | /* Re-push the switch's statement info record. */ |
2687 | cg->treeContext.topStmt = cg->treeContext.topScopeStmt = stmtInfo; |
2688 | |
2689 | /* Set the statement info record's idea of top. */ |
2690 | stmtInfo->update = top; |
2691 | |
2692 | /* Advance pn2 to refer to the switch case list. */ |
2693 | pn2 = pn2->pn_expr; |
2694 | } |
2695 | #endif |
2696 | |
2697 | caseCount = pn2->pn_count; |
2698 | tableLength = 0; |
2699 | table = NULL; |
2700 | |
2701 | if (caseCount == 0 || |
2702 | (caseCount == 1 && |
2703 | (hasDefault = (pn2->pn_head->pn_type == TOK_DEFAULT)))) { |
2704 | caseCount = 0; |
2705 | low = 0; |
2706 | high = -1; |
2707 | } else { |
2708 | #define INTMAP_LENGTH 256 |
2709 | jsbitmap intmap_space[INTMAP_LENGTH]; |
2710 | jsbitmap *intmap = NULL; |
2711 | int32 intmap_bitlen = 0; |
2712 | |
2713 | low = JSVAL_INT_MAX; |
2714 | high = JSVAL_INT_MIN; |
2715 | |
2716 | for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { |
2717 | if (pn3->pn_type == TOK_DEFAULT) { |
2718 | hasDefault = JS_TRUE; |
2719 | caseCount--; /* one of the "cases" was the default */ |
2720 | continue; |
2721 | } |
2722 | |
2723 | JS_ASSERT(pn3->pn_type == TOK_CASE); |
2724 | if (switchOp == JSOP_CONDSWITCH) |
2725 | continue; |
2726 | |
2727 | pn4 = pn3->pn_left; |
2728 | switch (pn4->pn_type) { |
2729 | case TOK_NUMBER: |
2730 | d = pn4->pn_dval; |
2731 | if (JSDOUBLE_IS_INT(d, i) && INT_FITS_IN_JSVAL(i)) { |
2732 | pn3->pn_val = INT_TO_JSVAL(i); |
2733 | } else { |
2734 | atom = js_AtomizeDouble(cx, d); |
2735 | if (!atom) { |
2736 | ok = JS_FALSE; |
2737 | goto release; |
2738 | } |
2739 | pn3->pn_val = ATOM_KEY(atom); |
2740 | } |
2741 | break; |
2742 | case TOK_STRING: |
2743 | pn3->pn_val = ATOM_KEY(pn4->pn_atom); |
2744 | break; |
2745 | case TOK_NAME: |
2746 | if (!pn4->pn_expr) { |
2747 | ok = LookupCompileTimeConstant(cx, cg, pn4->pn_atom, &v); |
2748 | if (!ok) |
2749 | goto release; |
2750 | if (v != JSVAL_HOLE) { |
2751 | if (!JSVAL_IS_PRIMITIVE(v)) { |
2752 | /* |
2753 | * XXX JSOP_LOOKUPSWITCH does not support const- |
2754 | * propagated object values, see bug 407186. |
2755 | */ |
2756 | switchOp = JSOP_CONDSWITCH; |
2757 | continue; |
2758 | } |
2759 | pn3->pn_val = v; |
2760 | constPropagated = JS_TRUE; |
2761 | break; |
2762 | } |
2763 | } |
2764 | /* FALL THROUGH */ |
2765 | case TOK_PRIMARY: |
2766 | if (pn4->pn_op == JSOP_TRUE) { |
2767 | pn3->pn_val = JSVAL_TRUE; |
2768 | break; |
2769 | } |
2770 | if (pn4->pn_op == JSOP_FALSE) { |
2771 | pn3->pn_val = JSVAL_FALSE; |
2772 | break; |
2773 | } |
2774 | /* FALL THROUGH */ |
2775 | default: |
2776 | switchOp = JSOP_CONDSWITCH; |
2777 | continue; |
2778 | } |
2779 | |
2780 | JS_ASSERT(JSVAL_IS_PRIMITIVE(pn3->pn_val)); |
2781 | |
2782 | if (switchOp != JSOP_TABLESWITCH) |
2783 | continue; |
2784 | if (!JSVAL_IS_INT(pn3->pn_val)) { |
2785 | switchOp = JSOP_LOOKUPSWITCH; |
2786 | continue; |
2787 | } |
2788 | i = JSVAL_TO_INT(pn3->pn_val); |
2789 | if ((jsuint)(i + (jsint)JS_BIT(15)) >= (jsuint)JS_BIT(16)) { |
2790 | switchOp = JSOP_LOOKUPSWITCH; |
2791 | continue; |
2792 | } |
2793 | if (i < low) |
2794 | low = i; |
2795 | if (high < i) |
2796 | high = i; |
2797 | |
2798 | /* |
2799 | * Check for duplicates, which require a JSOP_LOOKUPSWITCH. |
2800 | * We bias i by 65536 if it's negative, and hope that's a rare |
2801 | * case (because it requires a malloc'd bitmap). |
2802 | */ |
2803 | if (i < 0) |
2804 | i += JS_BIT(16); |
2805 | if (i >= intmap_bitlen) { |
2806 | if (!intmap && |
2807 | i < (INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2)) { |
2808 | intmap = intmap_space; |
2809 | intmap_bitlen = INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2; |
2810 | } else { |
2811 | /* Just grab 8K for the worst-case bitmap. */ |
2812 | intmap_bitlen = JS_BIT(16); |
2813 | intmap = (jsbitmap *) |
2814 | JS_malloc(cx, |
2815 | (JS_BIT(16) >> JS_BITS_PER_WORD_LOG2) |
2816 | * sizeof(jsbitmap)); |
2817 | if (!intmap) { |
2818 | JS_ReportOutOfMemory(cx); |
2819 | return JS_FALSE; |
2820 | } |
2821 | } |
2822 | memset(intmap, 0, intmap_bitlen >> JS_BITS_PER_BYTE_LOG2); |
2823 | } |
2824 | if (JS_TEST_BIT(intmap, i)) { |
2825 | switchOp = JSOP_LOOKUPSWITCH; |
2826 | continue; |
2827 | } |
2828 | JS_SET_BIT(intmap, i); |
2829 | } |
2830 | |
2831 | release: |
2832 | if (intmap && intmap != intmap_space) |
2833 | JS_free(cx, intmap); |
2834 | if (!ok) |
2835 | return JS_FALSE; |
2836 | |
2837 | /* |
2838 | * Compute table length and select lookup instead if overlarge or |
2839 | * more than half-sparse. |
2840 | */ |
2841 | if (switchOp == JSOP_TABLESWITCH) { |
2842 | tableLength = (uint32)(high - low + 1); |
2843 | if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount) |
2844 | switchOp = JSOP_LOOKUPSWITCH; |
2845 | } else if (switchOp == JSOP_LOOKUPSWITCH) { |
2846 | /* |
2847 | * Lookup switch supports only atom indexes below 64K limit. |
2848 | * Conservatively estimate the maximum possible index during |
2849 | * switch generation and use conditional switch if it exceeds |
2850 | * the limit. |
2851 | */ |
2852 | if (caseCount + cg->atomList.count > JS_BIT(16)) |
2853 | switchOp = JSOP_CONDSWITCH; |
2854 | } |
2855 | } |
2856 | |
2857 | /* |
2858 | * Emit a note with two offsets: first tells total switch code length, |
2859 | * second tells offset to first JSOP_CASE if condswitch. |
2860 | */ |
2861 | noteIndex = js_NewSrcNote3(cx, cg, SRC_SWITCH, 0, 0); |
2862 | if (noteIndex < 0) |
2863 | return JS_FALSE; |
2864 | |
2865 | if (switchOp == JSOP_CONDSWITCH) { |
2866 | /* |
2867 | * 0 bytes of immediate for unoptimized ECMAv2 switch. |
2868 | */ |
2869 | switchSize = 0; |
2870 | } else if (switchOp == JSOP_TABLESWITCH) { |
2871 | /* |
2872 | * 3 offsets (len, low, high) before the table, 1 per entry. |
2873 | */ |
2874 | switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength)); |
2875 | } else { |
2876 | /* |
2877 | * JSOP_LOOKUPSWITCH: |
2878 | * 1 offset (len) and 1 atom index (npairs) before the table, |
2879 | * 1 atom index and 1 jump offset per entry. |
2880 | */ |
2881 | switchSize = (size_t)(JUMP_OFFSET_LEN + INDEX_LEN + |
2882 | (INDEX_LEN + JUMP_OFFSET_LEN) * caseCount); |
2883 | } |
2884 | |
2885 | /* |
2886 | * Emit switchOp followed by switchSize bytes of jump or lookup table. |
2887 | * |
2888 | * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial |
2889 | * to emit the immediate operand(s) by which bytecode readers such as |
2890 | * BuildSpanDepTable discover the length of the switch opcode *before* |
2891 | * calling js_SetJumpOffset (which may call BuildSpanDepTable). It's |
2892 | * also important to zero all unknown jump offset immediate operands, |
2893 | * so they can be converted to span dependencies with null targets to |
2894 | * be computed later (js_EmitN zeros switchSize bytes after switchOp). |
2895 | */ |
2896 | if (js_EmitN(cx, cg, switchOp, switchSize) < 0) |
2897 | return JS_FALSE; |
2898 | |
2899 | off = -1; |
2900 | if (switchOp == JSOP_CONDSWITCH) { |
2901 | intN caseNoteIndex = -1; |
2902 | JSBool beforeCases = JS_TRUE; |
2903 | |
2904 | /* Emit code for evaluating cases and jumping to case statements. */ |
2905 | for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { |
2906 | pn4 = pn3->pn_left; |
2907 | if (pn4 && !js_EmitTree(cx, cg, pn4)) |
2908 | return JS_FALSE; |
2909 | if (caseNoteIndex >= 0) { |
2910 | /* off is the previous JSOP_CASE's bytecode offset. */ |
2911 | if (!js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0, |
2912 | CG_OFFSET(cg) - off)) { |
2913 | return JS_FALSE; |
2914 | } |
2915 | } |
2916 | if (!pn4) { |
2917 | JS_ASSERT(pn3->pn_type == TOK_DEFAULT); |
2918 | continue; |
2919 | } |
2920 | caseNoteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0); |
2921 | if (caseNoteIndex < 0) |
2922 | return JS_FALSE; |
2923 | off = EmitJump(cx, cg, JSOP_CASE, 0); |
2924 | if (off < 0) |
2925 | return JS_FALSE; |
2926 | pn3->pn_offset = off; |
2927 | if (beforeCases) { |
2928 | uintN noteCount, noteCountDelta; |
2929 | |
2930 | /* Switch note's second offset is to first JSOP_CASE. */ |
2931 | noteCount = CG_NOTE_COUNT(cg); |
2932 | if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1, |
2933 | off - top)) { |
2934 | return JS_FALSE; |
2935 | } |
2936 | noteCountDelta = CG_NOTE_COUNT(cg) - noteCount; |
2937 | if (noteCountDelta != 0) |
2938 | caseNoteIndex += noteCountDelta; |
2939 | beforeCases = JS_FALSE; |
2940 | } |
2941 | } |
2942 | |
2943 | /* |
2944 | * If we didn't have an explicit default (which could fall in between |
2945 | * cases, preventing us from fusing this js_SetSrcNoteOffset with the |
2946 | * call in the loop above), link the last case to the implicit default |
2947 | * for the decompiler. |
2948 | */ |
2949 | if (!hasDefault && |
2950 | caseNoteIndex >= 0 && |
2951 | !js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0, |
2952 | CG_OFFSET(cg) - off)) { |
2953 | return JS_FALSE; |
2954 | } |
2955 | |
2956 | /* Emit default even if no explicit default statement. */ |
2957 | defaultOffset = EmitJump(cx, cg, JSOP_DEFAULT, 0); |
2958 | if (defaultOffset < 0) |
2959 | return JS_FALSE; |
2960 | } else { |
2961 | pc = CG_CODE(cg, top + JUMP_OFFSET_LEN); |
2962 | |
2963 | if (switchOp == JSOP_TABLESWITCH) { |
2964 | /* Fill in switch bounds, which we know fit in 16-bit offsets. */ |
2965 | SET_JUMP_OFFSET(pc, low); |
2966 | pc += JUMP_OFFSET_LEN; |
2967 | SET_JUMP_OFFSET(pc, high); |
2968 | pc += JUMP_OFFSET_LEN; |
2969 | |
2970 | /* |
2971 | * Use malloc to avoid arena bloat for programs with many switches. |
2972 | * We free table if non-null at label out, so all control flow must |
2973 | * exit this function through goto out or goto bad. |
2974 | */ |
2975 | if (tableLength != 0) { |
2976 | tableSize = (size_t)tableLength * sizeof *table; |
2977 | table = (JSParseNode **) JS_malloc(cx, tableSize); |
2978 | if (!table) |
2979 | return JS_FALSE; |
2980 | memset(table, 0, tableSize); |
2981 | for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { |
2982 | if (pn3->pn_type == TOK_DEFAULT) |
2983 | continue; |
2984 | i = JSVAL_TO_INT(pn3->pn_val); |
2985 | i -= low; |
2986 | JS_ASSERT((uint32)i < tableLength); |
2987 | table[i] = pn3; |
2988 | } |
2989 | } |
2990 | } else { |
2991 | JS_ASSERT(switchOp == JSOP_LOOKUPSWITCH); |
2992 | |
2993 | /* Fill in the number of cases. */ |
2994 | SET_INDEX(pc, caseCount); |
2995 | pc += INDEX_LEN; |
2996 | } |
2997 | |
2998 | /* |
2999 | * After this point, all control flow involving JSOP_TABLESWITCH |
3000 | * must set ok and goto out to exit this function. To keep things |
3001 | * simple, all switchOp cases exit that way. |
3002 | */ |
3003 | MUST_FLOW_THROUGH("out"); |
3004 | if (cg->spanDeps) { |
3005 | /* |
3006 | * We have already generated at least one big jump so we must |
3007 | * explicitly add span dependencies for the switch jumps. When |
3008 | * called below, js_SetJumpOffset can only do it when patching |
3009 | * the first big jump or when cg->spanDeps is null. |
3010 | */ |
3011 | if (!AddSwitchSpanDeps(cx, cg, CG_CODE(cg, top))) |
3012 | goto bad; |
3013 | } |
3014 | |
3015 | if (constPropagated) { |
3016 | /* |
3017 | * Skip switchOp, as we are not setting jump offsets in the two |
3018 | * for loops below. We'll restore CG_NEXT(cg) from savepc after, |
3019 | * unless there was an error. |
3020 | */ |
3021 | savepc = CG_NEXT(cg); |
3022 | CG_NEXT(cg) = pc + 1; |
3023 | if (switchOp == JSOP_TABLESWITCH) { |
3024 | for (i = 0; i < (jsint)tableLength; i++) { |
3025 | pn3 = table[i]; |
3026 | if (pn3 && |
3027 | (pn4 = pn3->pn_left) != NULL && |
3028 | pn4->pn_type == TOK_NAME) { |
3029 | /* Note a propagated constant with the const's name. */ |
3030 | JS_ASSERT(!pn4->pn_expr); |
3031 | ale = js_IndexAtom(cx, pn4->pn_atom, &cg->atomList); |
3032 | if (!ale) |
3033 | goto bad; |
3034 | CG_NEXT(cg) = pc; |
3035 | if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t) |
3036 | ALE_INDEX(ale)) < 0) { |
3037 | goto bad; |
3038 | } |
3039 | } |
3040 | pc += JUMP_OFFSET_LEN; |
3041 | } |
3042 | } else { |
3043 | for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { |
3044 | pn4 = pn3->pn_left; |
3045 | if (pn4 && pn4->pn_type == TOK_NAME) { |
3046 | /* Note a propagated constant with the const's name. */ |
3047 | JS_ASSERT(!pn4->pn_expr); |
3048 | ale = js_IndexAtom(cx, pn4->pn_atom, &cg->atomList); |
3049 | if (!ale) |
3050 | goto bad; |
3051 | CG_NEXT(cg) = pc; |
3052 | if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t) |
3053 | ALE_INDEX(ale)) < 0) { |
3054 | goto bad; |
3055 | } |
3056 | } |
3057 | pc += INDEX_LEN + JUMP_OFFSET_LEN; |
3058 | } |
3059 | } |
3060 | CG_NEXT(cg) = savepc; |
3061 | } |
3062 | } |
3063 | |
3064 | /* Emit code for each case's statements, copying pn_offset up to pn3. */ |
3065 | for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { |
3066 | if (switchOp == JSOP_CONDSWITCH && pn3->pn_type != TOK_DEFAULT) |
3067 | CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx, cg, pn3->pn_offset, goto bad); |
3068 | pn4 = pn3->pn_right; |
3069 | ok = js_EmitTree(cx, cg, pn4); |
3070 | if (!ok) |
3071 | goto out; |
3072 | pn3->pn_offset = pn4->pn_offset; |
3073 | if (pn3->pn_type == TOK_DEFAULT) |
3074 | off = pn3->pn_offset - top; |
3075 | } |
3076 | |
3077 | if (!hasDefault) { |
3078 | /* If no default case, offset for default is to end of switch. */ |
3079 | off = CG_OFFSET(cg) - top; |
3080 | } |
3081 | |
3082 | /* We better have set "off" by now. */ |
3083 | JS_ASSERT(off != -1); |
3084 | |
3085 | /* Set the default offset (to end of switch if no default). */ |
3086 | if (switchOp == JSOP_CONDSWITCH) { |
3087 | pc = NULL; |
3088 | JS_ASSERT(defaultOffset != -1); |
3089 | ok = js_SetJumpOffset(cx, cg, CG_CODE(cg, defaultOffset), |
3090 | off - (defaultOffset - top)); |
3091 | if (!ok) |
3092 | goto out; |
3093 | } else { |
3094 | pc = CG_CODE(cg, top); |
3095 | ok = js_SetJumpOffset(cx, cg, pc, off); |
3096 | if (!ok) |
3097 | goto out; |
3098 | pc += JUMP_OFFSET_LEN; |
3099 | } |
3100 | |
3101 | /* Set the SRC_SWITCH note's offset operand to tell end of switch. */ |
3102 | off = CG_OFFSET(cg) - top; |
3103 | ok = js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, off); |
3104 | if (!ok) |
3105 | goto out; |
3106 | |
3107 | if (switchOp == JSOP_TABLESWITCH) { |
3108 | /* Skip over the already-initialized switch bounds. */ |
3109 | pc += 2 * JUMP_OFFSET_LEN; |
3110 | |
3111 | /* Fill in the jump table, if there is one. */ |
3112 | for (i = 0; i < (jsint)tableLength; i++) { |
3113 | pn3 = table[i]; |
3114 | off = pn3 ? pn3->pn_offset - top : 0; |
3115 | ok = js_SetJumpOffset(cx, cg, pc, off); |
3116 | if (!ok) |
3117 | goto out; |
3118 | pc += JUMP_OFFSET_LEN; |
3119 | } |
3120 | } else if (switchOp == JSOP_LOOKUPSWITCH) { |
3121 | /* Skip over the already-initialized number of cases. */ |
3122 | pc += INDEX_LEN; |
3123 | |
3124 | for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { |
3125 | if (pn3->pn_type == TOK_DEFAULT) |
3126 | continue; |
3127 | if (!js_AtomizePrimitiveValue(cx, pn3->pn_val, &atom)) |
3128 | goto bad; |
3129 | ale = js_IndexAtom(cx, atom, &cg->atomList); |
3130 | if (!ale) |
3131 | goto bad; |
3132 | SET_INDEX(pc, ALE_INDEX(ale)); |
3133 | pc += INDEX_LEN; |
3134 | |
3135 | off = pn3->pn_offset - top; |
3136 | ok = js_SetJumpOffset(cx, cg, pc, off); |
3137 | if (!ok) |
3138 | goto out; |
3139 | pc += JUMP_OFFSET_LEN; |
3140 | } |
3141 | } |
3142 | |
3143 | out: |
3144 | if (table) |
3145 | JS_free(cx, table); |
3146 | if (ok) { |
3147 | ok = js_PopStatementCG(cx, cg); |
3148 | |
3149 | #if JS_HAS_BLOCK_SCOPE |
3150 | if (ok && pn->pn_right->pn_type == TOK_LEXICALSCOPE) |
3151 | EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count); |
3152 | #endif |
3153 | } |
3154 | return ok; |
3155 | |
3156 | bad: |
3157 | ok = JS_FALSE; |
3158 | goto out; |
3159 | } |
3160 | |
3161 | JSBool |
3162 | js_EmitFunctionScript(JSContext *cx, JSCodeGenerator *cg, JSParseNode *body) |
3163 | { |
3164 | if (cg->treeContext.flags & TCF_FUN_IS_GENERATOR) { |
3165 | /* JSOP_GENERATOR must be the first instruction. */ |
3166 | CG_SWITCH_TO_PROLOG(cg); |
3167 | JS_ASSERT(CG_NEXT(cg) == CG_BASE(cg)); |
3168 | if (js_Emit1(cx, cg, JSOP_GENERATOR) < 0) |
3169 | return JS_FALSE; |
3170 | CG_SWITCH_TO_MAIN(cg); |
3171 | } |
3172 | |
3173 | return js_EmitTree(cx, cg, body) && |
3174 | js_Emit1(cx, cg, JSOP_STOP) >= 0 && |
3175 | js_NewScriptFromCG(cx, cg); |
3176 | } |
3177 | |
3178 | /* A macro for inlining at the top of js_EmitTree (whence it came). */ |
3179 | #define UPDATE_LINE_NUMBER_NOTES(cx, cg, pn) \ |
3180 | JS_BEGIN_MACRO \ |
3181 | uintN line_ = (pn)->pn_pos.begin.lineno; \ |
3182 | uintN delta_ = line_ - CG_CURRENT_LINE(cg); \ |
3183 | if (delta_ != 0) { \ |
3184 | /* \ |
3185 | * Encode any change in the current source line number by using \ |
3186 | * either several SRC_NEWLINE notes or just one SRC_SETLINE note, \ |
3187 | * whichever consumes less space. \ |
3188 | * \ |
3189 | * NB: We handle backward line number deltas (possible with for \ |
3190 | * loops where the update part is emitted after the body, but its \ |
3191 | * line number is <= any line number in the body) here by letting \ |
3192 | * unsigned delta_ wrap to a very large number, which triggers a \ |
3193 | * SRC_SETLINE. \ |
3194 | */ \ |
3195 | CG_CURRENT_LINE(cg) = line_; \ |
3196 | if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \ |
3197 | if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)\ |
3198 | return JS_FALSE; \ |
3199 | } else { \ |
3200 | do { \ |
3201 | if (js_NewSrcNote(cx, cg, SRC_NEWLINE) < 0) \ |
3202 | return JS_FALSE; \ |
3203 | } while (--delta_ != 0); \ |
3204 | } \ |
3205 | } \ |
3206 | JS_END_MACRO |
3207 | |
3208 | /* A function, so that we avoid macro-bloating all the other callsites. */ |
3209 | static JSBool |
3210 | UpdateLineNumberNotes(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn) |
3211 | { |
3212 | UPDATE_LINE_NUMBER_NOTES(cx, cg, pn); |
3213 | return JS_TRUE; |
3214 | } |
3215 | |
3216 | static JSBool |
3217 | MaybeEmitVarDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp, |
3218 | JSParseNode *pn, jsatomid *result) |
3219 | { |
3220 | jsatomid atomIndex; |
3221 | JSAtomListElement *ale; |
3222 | |
3223 | if (pn->pn_slot >= 0) { |
3224 | atomIndex = (jsatomid) pn->pn_slot; |
3225 | } else { |
3226 | ale = js_IndexAtom(cx, pn->pn_atom, &cg->atomList); |
3227 | if (!ale) |
3228 | return JS_FALSE; |
3229 | atomIndex = ALE_INDEX(ale); |
3230 | } |
3231 | |
3232 | if (JOF_OPTYPE(pn->pn_op) == JOF_ATOM && |
3233 | (!(cg->treeContext.flags & TCF_IN_FUNCTION) || |
3234 | (cg->treeContext.flags & TCF_FUN_HEAVYWEIGHT))) { |
3235 | /* Emit a prolog bytecode to predefine the variable. */ |
3236 | CG_SWITCH_TO_PROLOG(cg); |
3237 | if (!UpdateLineNumberNotes(cx, cg, pn)) |
3238 | return JS_FALSE; |
3239 | EMIT_INDEX_OP(prologOp, atomIndex); |
3240 | CG_SWITCH_TO_MAIN(cg); |
3241 | } |
3242 | |
3243 | if (result) |
3244 | *result = atomIndex; |
3245 | return JS_TRUE; |
3246 | } |
3247 | |
3248 | #if JS_HAS_DESTRUCTURING |
3249 | |
3250 | typedef JSBool |
3251 | (*DestructuringDeclEmitter)(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp, |
3252 | JSParseNode *pn); |
3253 | |
3254 | static JSBool |
3255 | EmitDestructuringDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp, |
3256 | JSParseNode *pn) |
3257 | { |
3258 | JS_ASSERT(pn->pn_type == TOK_NAME); |
3259 | if (!BindNameToSlot(cx, cg, pn)) |
3260 | return JS_FALSE; |
3261 | |
3262 | JS_ASSERT(pn->pn_op != JSOP_ARGUMENTS); |
3263 | return MaybeEmitVarDecl(cx, cg, prologOp, pn, NULL); |
3264 | } |
3265 | |
3266 | static JSBool |
3267 | EmitDestructuringDecls(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp, |
3268 | JSParseNode *pn) |
3269 | { |
3270 | JSParseNode *pn2, *pn3; |
3271 | DestructuringDeclEmitter emitter; |
3272 | |
3273 | if (pn->pn_type == TOK_RB) { |
3274 | for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) { |
3275 | if (pn2->pn_type == TOK_COMMA) |
3276 | continue; |
3277 | emitter = (pn2->pn_type == TOK_NAME) |
3278 | ? EmitDestructuringDecl |