/[jscoverage]/trunk/js/jsparse.cpp
ViewVC logotype

Annotation of /trunk/js/jsparse.cpp

Parent Directory Parent Directory | Revision Log Revision Log


Revision 460 - (hide annotations)
Sat Sep 26 23:15:22 2009 UTC (12 years, 9 months ago) by siliconforks
File size: 306292 byte(s)
Upgrade to SpiderMonkey from Firefox 3.5.3.

1 siliconforks 332 /* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2     * vim: set ts=8 sw=4 et tw=99:
3     *
4     * ***** BEGIN LICENSE BLOCK *****
5     * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6     *
7     * The contents of this file are subject to the Mozilla Public License Version
8     * 1.1 (the "License"); you may not use this file except in compliance with
9     * the License. You may obtain a copy of the License at
10     * http://www.mozilla.org/MPL/
11     *
12     * Software distributed under the License is distributed on an "AS IS" basis,
13     * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14     * for the specific language governing rights and limitations under the
15     * License.
16     *
17     * The Original Code is Mozilla Communicator client code, released
18     * March 31, 1998.
19     *
20     * The Initial Developer of the Original Code is
21     * Netscape Communications Corporation.
22     * Portions created by the Initial Developer are Copyright (C) 1998
23     * the Initial Developer. All Rights Reserved.
24     *
25     * Contributor(s):
26     *
27     * Alternatively, the contents of this file may be used under the terms of
28     * either of the GNU General Public License Version 2 or later (the "GPL"),
29     * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30     * in which case the provisions of the GPL or the LGPL are applicable instead
31     * of those above. If you wish to allow use of your version of this file only
32     * under the terms of either the GPL or the LGPL, and not to allow others to
33     * use your version of this file under the terms of the MPL, indicate your
34     * decision by deleting the provisions above and replace them with the notice
35     * and other provisions required by the GPL or the LGPL. If you do not delete
36     * the provisions above, a recipient may use your version of this file under
37     * the terms of any one of the MPL, the GPL or the LGPL.
38     *
39     * ***** END LICENSE BLOCK ***** */
40    
41     /*
42     * JS parser.
43     *
44     * This is a recursive-descent parser for the JavaScript language specified by
45     * "The JavaScript 1.5 Language Specification". It uses lexical and semantic
46     * feedback to disambiguate non-LL(1) structures. It generates trees of nodes
47     * induced by the recursive parsing (not precise syntax trees, see jsparse.h).
48     * After tree construction, it rewrites trees to fold constants and evaluate
49     * compile-time expressions. Finally, it calls js_EmitTree (see jsemit.h) to
50     * generate bytecode.
51     *
52     * This parser attempts no error recovery.
53     */
54     #include "jsstddef.h"
55     #include <stdlib.h>
56     #include <string.h>
57     #include <math.h>
58     #include "jstypes.h"
59     #include "jsarena.h" /* Added by JSIFY */
60     #include "jsutil.h" /* Added by JSIFY */
61     #include "jsapi.h"
62     #include "jsarray.h"
63     #include "jsatom.h"
64     #include "jscntxt.h"
65     #include "jsversion.h"
66     #include "jsemit.h"
67     #include "jsfun.h"
68     #include "jsinterp.h"
69     #include "jsiter.h"
70     #include "jslock.h"
71     #include "jsnum.h"
72     #include "jsobj.h"
73     #include "jsopcode.h"
74     #include "jsparse.h"
75     #include "jsscan.h"
76     #include "jsscope.h"
77     #include "jsscript.h"
78     #include "jsstr.h"
79     #include "jsstaticcheck.h"
80    
81     #if JS_HAS_XML_SUPPORT
82     #include "jsxml.h"
83     #endif
84    
85     #if JS_HAS_DESTRUCTURING
86     #include "jsdhash.h"
87     #endif
88    
89     /*
90     * Asserts to verify assumptions behind pn_ macros.
91     */
92 siliconforks 460 #define pn_offsetof(m) offsetof(JSParseNode, m)
93 siliconforks 332
94 siliconforks 460 JS_STATIC_ASSERT(pn_offsetof(pn_link) == pn_offsetof(dn_uses));
95     JS_STATIC_ASSERT(pn_offsetof(pn_u.name.atom) == pn_offsetof(pn_u.apair.atom));
96    
97     #undef pn_offsetof
98    
99 siliconforks 332 /*
100     * JS parsers, from lowest to highest precedence.
101     *
102     * Each parser takes a context, a token stream, and a tree context struct.
103     * Each returns a parse node tree or null on error.
104     */
105    
106     typedef JSParseNode *
107     JSParser(JSContext *cx, JSTokenStream *ts, JSTreeContext *tc);
108    
109     typedef JSParseNode *
110 siliconforks 460 JSVariablesParser(JSContext *cx, JSTokenStream *ts, JSTreeContext *tc,
111     bool inLetHead);
112    
113     typedef JSParseNode *
114 siliconforks 332 JSMemberParser(JSContext *cx, JSTokenStream *ts, JSTreeContext *tc,
115     JSBool allowCallSyntax);
116    
117     typedef JSParseNode *
118     JSPrimaryParser(JSContext *cx, JSTokenStream *ts, JSTreeContext *tc,
119     JSTokenType tt, JSBool afterDot);
120    
121     typedef JSParseNode *
122     JSParenParser(JSContext *cx, JSTokenStream *ts, JSTreeContext *tc,
123     JSParseNode *pn1, JSBool *genexp);
124    
125     static JSParser FunctionStmt;
126     static JSParser FunctionExpr;
127     static JSParser Statements;
128     static JSParser Statement;
129 siliconforks 460 static JSVariablesParser Variables;
130 siliconforks 332 static JSParser Expr;
131     static JSParser AssignExpr;
132     static JSParser CondExpr;
133     static JSParser OrExpr;
134     static JSParser AndExpr;
135     static JSParser BitOrExpr;
136     static JSParser BitXorExpr;
137     static JSParser BitAndExpr;
138     static JSParser EqExpr;
139     static JSParser RelExpr;
140     static JSParser ShiftExpr;
141     static JSParser AddExpr;
142     static JSParser MulExpr;
143     static JSParser UnaryExpr;
144     static JSMemberParser MemberExpr;
145     static JSPrimaryParser PrimaryExpr;
146     static JSParenParser ParenExpr;
147    
148     /*
149     * Insist that the next token be of type tt, or report errno and return null.
150     * NB: this macro uses cx and ts from its lexical environment.
151     */
152     #define MUST_MATCH_TOKEN(tt, errno) \
153     JS_BEGIN_MACRO \
154     if (js_GetToken(cx, ts) != tt) { \
155     js_ReportCompileErrorNumber(cx, ts, NULL, JSREPORT_ERROR, errno); \
156     return NULL; \
157     } \
158     JS_END_MACRO
159    
160     #ifdef METER_PARSENODES
161     static uint32 parsenodes = 0;
162     static uint32 maxparsenodes = 0;
163     static uint32 recyclednodes = 0;
164     #endif
165    
166 siliconforks 460 void
167     JSParseNode::become(JSParseNode *pn2)
168 siliconforks 332 {
169 siliconforks 460 JS_ASSERT(!pn_defn);
170     JS_ASSERT(!pn2->pn_defn);
171 siliconforks 332
172 siliconforks 460 JS_ASSERT(!pn_used);
173     if (pn2->pn_used) {
174     JSParseNode **pnup = &pn2->pn_lexdef->dn_uses;
175     while (*pnup != pn2)
176     pnup = &(*pnup)->pn_link;
177     *pnup = this;
178     pn_link = pn2->pn_link;
179     pn_used = true;
180     pn2->pn_link = NULL;
181     pn2->pn_used = false;
182 siliconforks 332 }
183    
184 siliconforks 460 /* If this is a function node fix up the pn_funbox->node back-pointer. */
185     if (PN_TYPE(pn2) == TOK_FUNCTION && pn2->pn_arity == PN_FUNC)
186     pn2->pn_funbox->node = this;
187    
188     pn_type = pn2->pn_type;
189     pn_op = pn2->pn_op;
190     pn_arity = pn2->pn_arity;
191     pn_u = pn2->pn_u;
192     pn2->clear();
193     }
194    
195     void
196     JSParseNode::clear()
197     {
198     pn_type = TOK_EOF;
199     pn_op = JSOP_NOP;
200     pn_used = pn_defn = false;
201     pn_arity = PN_NULLARY;
202     }
203    
204     bool
205     JSCompiler::init(const jschar *base, size_t length,
206     FILE *fp, const char *filename, uintN lineno)
207     {
208     JSContext *cx = context;
209    
210     tempPoolMark = JS_ARENA_MARK(&cx->tempPool);
211     if (!js_InitTokenStream(cx, TS(this), base, length, fp, filename, lineno)) {
212     JS_ARENA_RELEASE(&cx->tempPool, tempPoolMark);
213     return false;
214     }
215    
216 siliconforks 332 /* Root atoms and objects allocated for the parsed tree. */
217     JS_KEEP_ATOMS(cx->runtime);
218 siliconforks 460 JS_PUSH_TEMP_ROOT_COMPILER(cx, this, &tempRoot);
219     return true;
220 siliconforks 332 }
221    
222 siliconforks 460 JSCompiler::~JSCompiler()
223 siliconforks 332 {
224 siliconforks 460 JSContext *cx = context;
225    
226     if (principals)
227     JSPRINCIPALS_DROP(cx, principals);
228     JS_ASSERT(tempRoot.u.compiler == this);
229     JS_POP_TEMP_ROOT(cx, &tempRoot);
230 siliconforks 332 JS_UNKEEP_ATOMS(cx->runtime);
231 siliconforks 460 js_CloseTokenStream(cx, TS(this));
232     JS_ARENA_RELEASE(&cx->tempPool, tempPoolMark);
233 siliconforks 332 }
234    
235     void
236 siliconforks 460 JSCompiler::setPrincipals(JSPrincipals *prin)
237 siliconforks 332 {
238 siliconforks 460 JS_ASSERT(!principals);
239     if (prin)
240     JSPRINCIPALS_HOLD(context, prin);
241     principals = prin;
242 siliconforks 332 }
243    
244 siliconforks 460 JSObjectBox *
245     JSCompiler::newObjectBox(JSObject *obj)
246 siliconforks 332 {
247 siliconforks 460 JS_ASSERT(obj);
248 siliconforks 332
249     /*
250     * We use JSContext.tempPool to allocate parsed objects and place them on
251     * a list in JSTokenStream to ensure GC safety. Thus the tempPool arenas
252     * containing the entries must be alive until we are done with scanning,
253     * parsing and code generation for the whole script or top-level function.
254     */
255 siliconforks 460 JSObjectBox *objbox;
256     JS_ARENA_ALLOCATE_TYPE(objbox, JSObjectBox, &context->tempPool);
257     if (!objbox) {
258     js_ReportOutOfScriptQuota(context);
259     return NULL;
260     }
261     objbox->traceLink = traceListHead;
262     traceListHead = objbox;
263     objbox->emitLink = NULL;
264     objbox->object = obj;
265     return objbox;
266     }
267    
268     JSFunctionBox *
269     JSCompiler::newFunctionBox(JSObject *obj, JSParseNode *fn, JSTreeContext *tc)
270     {
271 siliconforks 332 JS_ASSERT(obj);
272 siliconforks 460 JS_ASSERT(HAS_FUNCTION_CLASS(obj));
273    
274     /*
275     * We use JSContext.tempPool to allocate parsed objects and place them on
276     * a list in JSTokenStream to ensure GC safety. Thus the tempPool arenas
277     * containing the entries must be alive until we are done with scanning,
278     * parsing and code generation for the whole script or top-level function.
279     */
280     JSFunctionBox *funbox;
281     JS_ARENA_ALLOCATE_TYPE(funbox, JSFunctionBox, &context->tempPool);
282     if (!funbox) {
283     js_ReportOutOfScriptQuota(context);
284 siliconforks 332 return NULL;
285     }
286 siliconforks 460 funbox->traceLink = traceListHead;
287     traceListHead = funbox;
288     funbox->emitLink = NULL;
289     funbox->object = obj;
290     funbox->node = fn;
291     funbox->siblings = tc->functionList;
292     tc->functionList = funbox;
293     ++tc->compiler->functionCount;
294     funbox->kids = NULL;
295     funbox->parent = tc->funbox;
296     funbox->queued = false;
297     funbox->inLoop = false;
298     for (JSStmtInfo *stmt = tc->topStmt; stmt; stmt = stmt->down) {
299     if (STMT_IS_LOOP(stmt)) {
300     funbox->inLoop = true;
301     break;
302     }
303     }
304     funbox->level = tc->staticLevel;
305     funbox->tcflags = TCF_IN_FUNCTION | (tc->flags & TCF_COMPILE_N_GO);
306     return funbox;
307 siliconforks 332 }
308    
309     void
310 siliconforks 460 JSCompiler::trace(JSTracer *trc)
311 siliconforks 332 {
312 siliconforks 460 JSObjectBox *objbox;
313 siliconforks 332
314 siliconforks 460 JS_ASSERT(tempRoot.u.compiler == this);
315     objbox = traceListHead;
316     while (objbox) {
317     JS_CALL_OBJECT_TRACER(trc, objbox->object, "parser.object");
318     objbox = objbox->traceLink;
319 siliconforks 332 }
320     }
321    
322 siliconforks 460 static void
323     UnlinkFunctionBoxes(JSParseNode *pn, JSTreeContext *tc);
324    
325     static void
326     UnlinkFunctionBox(JSParseNode *pn, JSTreeContext *tc)
327     {
328     JSFunctionBox *funbox = pn->pn_funbox;
329     if (funbox) {
330     JS_ASSERT(funbox->node == pn);
331     funbox->node = NULL;
332    
333     JSFunctionBox **funboxp = &tc->functionList;
334     while (*funboxp) {
335     if (*funboxp == funbox) {
336     *funboxp = funbox->siblings;
337     break;
338     }
339     funboxp = &(*funboxp)->siblings;
340     }
341    
342     uint16 oldflags = tc->flags;
343     JSFunctionBox *oldlist = tc->functionList;
344    
345     tc->flags = (uint16) funbox->tcflags;
346     tc->functionList = funbox->kids;
347     UnlinkFunctionBoxes(pn->pn_body, tc);
348     funbox->kids = tc->functionList;
349     tc->flags = oldflags;
350     tc->functionList = oldlist;
351    
352     // FIXME: use a funbox freelist (consolidate aleFreeList and nodeList).
353     pn->pn_funbox = NULL;
354     }
355     }
356    
357     static void
358     UnlinkFunctionBoxes(JSParseNode *pn, JSTreeContext *tc)
359     {
360     if (pn) {
361     switch (pn->pn_arity) {
362     case PN_NULLARY:
363     return;
364     case PN_UNARY:
365     UnlinkFunctionBoxes(pn->pn_kid, tc);
366     return;
367     case PN_BINARY:
368     UnlinkFunctionBoxes(pn->pn_left, tc);
369     UnlinkFunctionBoxes(pn->pn_right, tc);
370     return;
371     case PN_TERNARY:
372     UnlinkFunctionBoxes(pn->pn_kid1, tc);
373     UnlinkFunctionBoxes(pn->pn_kid2, tc);
374     UnlinkFunctionBoxes(pn->pn_kid3, tc);
375     return;
376     case PN_LIST:
377     for (JSParseNode *pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
378     UnlinkFunctionBoxes(pn2, tc);
379     return;
380     case PN_FUNC:
381     UnlinkFunctionBox(pn, tc);
382     return;
383     case PN_NAME:
384     UnlinkFunctionBoxes(pn->maybeExpr(), tc);
385     return;
386     case PN_NAMESET:
387     UnlinkFunctionBoxes(pn->pn_tree, tc);
388     }
389     }
390     }
391    
392     static void
393     RecycleFuncNameKids(JSParseNode *pn, JSTreeContext *tc);
394    
395 siliconforks 332 static JSParseNode *
396     RecycleTree(JSParseNode *pn, JSTreeContext *tc)
397     {
398 siliconforks 460 JSParseNode *next, **head;
399 siliconforks 332
400     if (!pn)
401     return NULL;
402    
403     /* Catch back-to-back dup recycles. */
404 siliconforks 460 JS_ASSERT(pn != tc->compiler->nodeList);
405 siliconforks 332 next = pn->pn_next;
406 siliconforks 460 if (pn->pn_used || pn->pn_defn) {
407     /*
408     * JSAtomLists own definition nodes along with their used-node chains.
409     * Defer recycling such nodes until we unwind to top level to avoid
410     * linkage overhead or (alternatively) unlinking runtime complexity.
411     * Yes, this means dead code can contribute to static analysis results!
412     *
413     * Do recycle kids here, since they are no longer needed.
414     */
415     pn->pn_next = NULL;
416     RecycleFuncNameKids(pn, tc);
417     } else {
418     UnlinkFunctionBoxes(pn, tc);
419     head = &tc->compiler->nodeList;
420     pn->pn_next = *head;
421     *head = pn;
422 siliconforks 332 #ifdef METER_PARSENODES
423 siliconforks 460 recyclednodes++;
424 siliconforks 332 #endif
425 siliconforks 460 }
426 siliconforks 332 return next;
427     }
428    
429 siliconforks 460 static void
430     RecycleFuncNameKids(JSParseNode *pn, JSTreeContext *tc)
431     {
432     switch (pn->pn_arity) {
433     case PN_FUNC:
434     UnlinkFunctionBox(pn, tc);
435     /* FALL THROUGH */
436    
437     case PN_NAME:
438     /*
439     * Only a definition node might have a non-null strong pn_expr link
440     * to recycle, but we test !pn_used to handle PN_FUNC fall through.
441     * Every node with the pn_used flag set has a non-null pn_lexdef
442     * weak reference to its definition node.
443     */
444     if (!pn->pn_used && pn->pn_expr) {
445     RecycleTree(pn->pn_expr, tc);
446     pn->pn_expr = NULL;
447     }
448     break;
449    
450     default:
451     JS_ASSERT(PN_TYPE(pn) == TOK_FUNCTION);
452     }
453     }
454    
455 siliconforks 332 static JSParseNode *
456 siliconforks 460 NewOrRecycledNode(JSTreeContext *tc)
457 siliconforks 332 {
458 siliconforks 460 JSParseNode *pn, *pn2;
459 siliconforks 332
460 siliconforks 460 pn = tc->compiler->nodeList;
461 siliconforks 332 if (!pn) {
462 siliconforks 460 JSContext *cx = tc->compiler->context;
463    
464 siliconforks 332 JS_ARENA_ALLOCATE_TYPE(pn, JSParseNode, &cx->tempPool);
465     if (!pn)
466     js_ReportOutOfScriptQuota(cx);
467     } else {
468 siliconforks 460 tc->compiler->nodeList = pn->pn_next;
469 siliconforks 332
470     /* Recycle immediate descendents only, to save work and working set. */
471     switch (pn->pn_arity) {
472     case PN_FUNC:
473     RecycleTree(pn->pn_body, tc);
474     break;
475     case PN_LIST:
476 siliconforks 460 pn2 = pn->pn_head;
477     if (pn2) {
478     while (pn2 && !pn2->pn_used && !pn2->pn_defn)
479     pn2 = pn2->pn_next;
480     if (pn2) {
481     pn2 = pn->pn_head;
482     do {
483     pn2 = RecycleTree(pn2, tc);
484     } while (pn2);
485     } else {
486     *pn->pn_tail = tc->compiler->nodeList;
487     tc->compiler->nodeList = pn->pn_head;
488 siliconforks 332 #ifdef METER_PARSENODES
489 siliconforks 460 recyclednodes += pn->pn_count;
490 siliconforks 332 #endif
491 siliconforks 460 break;
492     }
493 siliconforks 332 }
494     break;
495     case PN_TERNARY:
496     RecycleTree(pn->pn_kid1, tc);
497     RecycleTree(pn->pn_kid2, tc);
498     RecycleTree(pn->pn_kid3, tc);
499     break;
500     case PN_BINARY:
501     if (pn->pn_left != pn->pn_right)
502     RecycleTree(pn->pn_left, tc);
503     RecycleTree(pn->pn_right, tc);
504     break;
505     case PN_UNARY:
506     RecycleTree(pn->pn_kid, tc);
507     break;
508     case PN_NAME:
509 siliconforks 460 if (!pn->pn_used)
510     RecycleTree(pn->pn_expr, tc);
511 siliconforks 332 break;
512     case PN_NULLARY:
513     break;
514     }
515     }
516     if (pn) {
517     #ifdef METER_PARSENODES
518     parsenodes++;
519     if (parsenodes - recyclednodes > maxparsenodes)
520     maxparsenodes = parsenodes - recyclednodes;
521     #endif
522 siliconforks 460 pn->pn_used = pn->pn_defn = false;
523 siliconforks 332 memset(&pn->pn_u, 0, sizeof pn->pn_u);
524     pn->pn_next = NULL;
525     }
526     return pn;
527     }
528    
529 siliconforks 460 static inline void
530     InitParseNode(JSParseNode *pn, JSTokenType type, JSOp op, JSParseNodeArity arity)
531     {
532     pn->pn_type = type;
533     pn->pn_op = op;
534     pn->pn_arity = arity;
535     JS_ASSERT(!pn->pn_used);
536     JS_ASSERT(!pn->pn_defn);
537     pn->pn_next = pn->pn_link = NULL;
538     }
539    
540 siliconforks 332 /*
541 siliconforks 460 * Allocate a JSParseNode from tc's node freelist or, failing that, from cx's
542     * temporary arena.
543 siliconforks 332 */
544     static JSParseNode *
545 siliconforks 460 NewParseNode(JSParseNodeArity arity, JSTreeContext *tc)
546 siliconforks 332 {
547     JSParseNode *pn;
548     JSToken *tp;
549    
550 siliconforks 460 pn = NewOrRecycledNode(tc);
551 siliconforks 332 if (!pn)
552     return NULL;
553 siliconforks 460 tp = &CURRENT_TOKEN(&tc->compiler->tokenStream);
554     InitParseNode(pn, tp->type, JSOP_NOP, arity);
555 siliconforks 332 pn->pn_pos = tp->pos;
556     return pn;
557     }
558    
559 siliconforks 460 static inline void
560     InitNameNodeCommon(JSParseNode *pn, JSTreeContext *tc)
561     {
562     pn->pn_expr = NULL;
563     pn->pn_cookie = FREE_UPVAR_COOKIE;
564     pn->pn_dflags = tc->atTopLevel() ? PND_TOPLEVEL : 0;
565     if (!tc->topStmt || tc->topStmt->type == STMT_BLOCK)
566     pn->pn_dflags |= PND_BLOCKCHILD;
567     pn->pn_blockid = tc->blockid();
568     }
569    
570 siliconforks 332 static JSParseNode *
571 siliconforks 460 NewNameNode(JSContext *cx, JSTokenStream *ts, JSAtom *atom, JSTreeContext *tc)
572     {
573     JSParseNode *pn;
574    
575     pn = NewParseNode(PN_NAME, tc);
576     if (pn) {
577     pn->pn_atom = atom;
578     InitNameNodeCommon(pn, tc);
579     }
580     return pn;
581     }
582    
583     static JSParseNode *
584     NewBinary(JSTokenType tt, JSOp op, JSParseNode *left, JSParseNode *right,
585 siliconforks 332 JSTreeContext *tc)
586     {
587     JSParseNode *pn, *pn1, *pn2;
588    
589     if (!left || !right)
590     return NULL;
591    
592     /*
593     * Flatten a left-associative (left-heavy) tree of a given operator into
594     * a list, to reduce js_FoldConstants and js_EmitTree recursion.
595     */
596 siliconforks 460 if (PN_TYPE(left) == tt &&
597     PN_OP(left) == op &&
598 siliconforks 332 (js_CodeSpec[op].format & JOF_LEFTASSOC)) {
599     if (left->pn_arity != PN_LIST) {
600     pn1 = left->pn_left, pn2 = left->pn_right;
601     left->pn_arity = PN_LIST;
602 siliconforks 460 left->initList(pn1);
603     left->append(pn2);
604 siliconforks 332 if (tt == TOK_PLUS) {
605     if (pn1->pn_type == TOK_STRING)
606 siliconforks 460 left->pn_xflags |= PNX_STRCAT;
607 siliconforks 332 else if (pn1->pn_type != TOK_NUMBER)
608 siliconforks 460 left->pn_xflags |= PNX_CANTFOLD;
609 siliconforks 332 if (pn2->pn_type == TOK_STRING)
610 siliconforks 460 left->pn_xflags |= PNX_STRCAT;
611 siliconforks 332 else if (pn2->pn_type != TOK_NUMBER)
612 siliconforks 460 left->pn_xflags |= PNX_CANTFOLD;
613 siliconforks 332 }
614     }
615 siliconforks 460 left->append(right);
616 siliconforks 332 left->pn_pos.end = right->pn_pos.end;
617     if (tt == TOK_PLUS) {
618     if (right->pn_type == TOK_STRING)
619 siliconforks 460 left->pn_xflags |= PNX_STRCAT;
620 siliconforks 332 else if (right->pn_type != TOK_NUMBER)
621 siliconforks 460 left->pn_xflags |= PNX_CANTFOLD;
622 siliconforks 332 }
623     return left;
624     }
625    
626     /*
627     * Fold constant addition immediately, to conserve node space and, what's
628     * more, so js_FoldConstants never sees mixed addition and concatenation
629     * operations with more than one leading non-string operand in a PN_LIST
630     * generated for expressions such as 1 + 2 + "pt" (which should evaluate
631     * to "3pt", not "12pt").
632     */
633     if (tt == TOK_PLUS &&
634     left->pn_type == TOK_NUMBER &&
635     right->pn_type == TOK_NUMBER) {
636     left->pn_dval += right->pn_dval;
637     left->pn_pos.end = right->pn_pos.end;
638     RecycleTree(right, tc);
639     return left;
640     }
641    
642 siliconforks 460 pn = NewOrRecycledNode(tc);
643 siliconforks 332 if (!pn)
644     return NULL;
645 siliconforks 460 InitParseNode(pn, tt, op, PN_BINARY);
646 siliconforks 332 pn->pn_pos.begin = left->pn_pos.begin;
647     pn->pn_pos.end = right->pn_pos.end;
648     pn->pn_left = left;
649     pn->pn_right = right;
650     return pn;
651     }
652    
653     #if JS_HAS_GETTER_SETTER
654     static JSTokenType
655     CheckGetterOrSetter(JSContext *cx, JSTokenStream *ts, JSTokenType tt)
656     {
657     JSAtom *atom;
658     JSRuntime *rt;
659     JSOp op;
660     const char *name;
661    
662     JS_ASSERT(CURRENT_TOKEN(ts).type == TOK_NAME);
663     atom = CURRENT_TOKEN(ts).t_atom;
664     rt = cx->runtime;
665     if (atom == rt->atomState.getterAtom)
666     op = JSOP_GETTER;
667     else if (atom == rt->atomState.setterAtom)
668     op = JSOP_SETTER;
669     else
670     return TOK_NAME;
671     if (js_PeekTokenSameLine(cx, ts) != tt)
672     return TOK_NAME;
673     (void) js_GetToken(cx, ts);
674     if (CURRENT_TOKEN(ts).t_op != JSOP_NOP) {
675     js_ReportCompileErrorNumber(cx, ts, NULL, JSREPORT_ERROR,
676     JSMSG_BAD_GETTER_OR_SETTER,
677     (op == JSOP_GETTER)
678     ? js_getter_str
679     : js_setter_str);
680     return TOK_ERROR;
681     }
682     CURRENT_TOKEN(ts).t_op = op;
683     if (JS_HAS_STRICT_OPTION(cx)) {
684     name = js_AtomToPrintableString(cx, atom);
685     if (!name ||
686     !js_ReportCompileErrorNumber(cx, ts, NULL,
687     JSREPORT_WARNING | JSREPORT_STRICT,
688     JSMSG_DEPRECATED_USAGE,
689     name)) {
690     return TOK_ERROR;
691     }
692     }
693     return tt;
694     }
695     #endif
696    
697 siliconforks 460 static bool
698     GenerateBlockId(JSTreeContext *tc, uint32& blockid)
699     {
700     if (tc->blockidGen == JS_BIT(20)) {
701     JS_ReportErrorNumber(tc->compiler->context, js_GetErrorMessage, NULL,
702     JSMSG_NEED_DIET, "program");
703     return false;
704     }
705     blockid = tc->blockidGen++;
706     return true;
707     }
708    
709     static bool
710     GenerateBlockIdForStmtNode(JSParseNode *pn, JSTreeContext *tc)
711     {
712     JS_ASSERT(tc->topStmt);
713     JS_ASSERT(STMT_MAYBE_SCOPE(tc->topStmt));
714     JS_ASSERT(pn->pn_type == TOK_LC || pn->pn_type == TOK_LEXICALSCOPE);
715     if (!GenerateBlockId(tc, tc->topStmt->blockid))
716     return false;
717     pn->pn_blockid = tc->topStmt->blockid;
718     return true;
719     }
720    
721 siliconforks 332 /*
722     * Parse a top-level JS script.
723     */
724     JSParseNode *
725 siliconforks 460 JSCompiler::parse(JSObject *chain)
726 siliconforks 332 {
727     /*
728     * Protect atoms from being collected by a GC activation, which might
729     * - nest on this thread due to out of memory (the so-called "last ditch"
730     * GC attempted within js_NewGCThing), or
731     * - run for any reason on another thread if this thread is suspended on
732     * an object lock before it finishes generating bytecode into a script
733     * protected from the GC by a root or a stack frame reference.
734     */
735 siliconforks 460 JSTreeContext tc(this);
736     tc.scopeChain = chain;
737     if (!GenerateBlockId(&tc, tc.bodyid))
738     return NULL;
739    
740     JSParseNode *pn = Statements(context, TS(this), &tc);
741 siliconforks 332 if (pn) {
742 siliconforks 460 if (!js_MatchToken(context, TS(this), TOK_EOF)) {
743     js_ReportCompileErrorNumber(context, TS(this), NULL, JSREPORT_ERROR,
744 siliconforks 332 JSMSG_SYNTAX_ERROR);
745     pn = NULL;
746     } else {
747 siliconforks 460 if (!js_FoldConstants(context, pn, &tc))
748 siliconforks 332 pn = NULL;
749     }
750     }
751     return pn;
752     }
753    
754 siliconforks 460 JS_STATIC_ASSERT(FREE_STATIC_LEVEL == JS_BITMASK(JSFB_LEVEL_BITS));
755    
756     static inline bool
757     SetStaticLevel(JSTreeContext *tc, uintN staticLevel)
758     {
759     /*
760     * Reserve FREE_STATIC_LEVEL (0xffff) in order to reserve FREE_UPVAR_COOKIE
761     * (0xffffffff) and other cookies with that level.
762     *
763     * This is a lot simpler than error-checking every MAKE_UPVAR_COOKIE, and
764     * practically speaking it leaves more than enough room for upvars. In fact
765     * we might want to split cookie fields giving fewer bits for skip and more
766     * for slot, but only based on evidence.
767     */
768     if (staticLevel >= FREE_STATIC_LEVEL) {
769     JS_ReportErrorNumber(tc->compiler->context, js_GetErrorMessage, NULL,
770     JSMSG_TOO_DEEP, js_function_str);
771     return false;
772     }
773     tc->staticLevel = staticLevel;
774     return true;
775     }
776    
777 siliconforks 332 /*
778     * Compile a top-level script.
779     */
780 siliconforks 460 JSScript *
781     JSCompiler::compileScript(JSContext *cx, JSObject *scopeChain, JSStackFrame *callerFrame,
782     JSPrincipals *principals, uint32 tcflags,
783     const jschar *chars, size_t length,
784     FILE *file, const char *filename, uintN lineno,
785     JSString *source /* = NULL */)
786 siliconforks 332 {
787 siliconforks 460 JSCompiler jsc(cx, principals, callerFrame);
788 siliconforks 332 JSArenaPool codePool, notePool;
789     JSTokenType tt;
790     JSParseNode *pn;
791     uint32 scriptGlobals;
792     JSScript *script;
793     #ifdef METER_PARSENODES
794     void *sbrk(ptrdiff_t), *before = sbrk(0);
795     #endif
796    
797     JS_ASSERT(!(tcflags & ~(TCF_COMPILE_N_GO | TCF_NO_SCRIPT_RVAL |
798 siliconforks 460 TCF_STATIC_LEVEL_MASK)));
799 siliconforks 332
800     /*
801     * The scripted callerFrame can only be given for compile-and-go scripts
802 siliconforks 460 * and non-zero static level requires callerFrame.
803 siliconforks 332 */
804     JS_ASSERT_IF(callerFrame, tcflags & TCF_COMPILE_N_GO);
805 siliconforks 460 JS_ASSERT_IF(TCF_GET_STATIC_LEVEL(tcflags) != 0, callerFrame);
806 siliconforks 332
807 siliconforks 460 if (!jsc.init(chars, length, file, filename, lineno))
808 siliconforks 332 return NULL;
809    
810     JS_INIT_ARENA_POOL(&codePool, "code", 1024, sizeof(jsbytecode),
811     &cx->scriptStackQuota);
812     JS_INIT_ARENA_POOL(&notePool, "note", 1024, sizeof(jssrcnote),
813     &cx->scriptStackQuota);
814    
815 siliconforks 460 JSCodeGenerator cg(&jsc, &codePool, &notePool, jsc.tokenStream.lineno);
816    
817 siliconforks 332 MUST_FLOW_THROUGH("out");
818    
819 siliconforks 460 /* Null script early in case of error, to reduce our code footprint. */
820     script = NULL;
821    
822     cg.flags |= uint16(tcflags);
823     cg.scopeChain = scopeChain;
824     if (!SetStaticLevel(&cg, TCF_GET_STATIC_LEVEL(tcflags)))
825     goto out;
826    
827     /*
828     * If funbox is non-null after we create the new script, callerFrame->fun
829     * was saved in the 0th object table entry.
830     */
831     JSObjectBox *funbox;
832     funbox = NULL;
833    
834     if (tcflags & TCF_COMPILE_N_GO) {
835     if (source) {
836     /*
837     * Save eval program source in script->atomMap.vector[0] for the
838     * eval cache (see obj_eval in jsobj.cpp).
839     */
840     JSAtom *atom = js_AtomizeString(cx, source, 0);
841     if (!atom || !cg.atomList.add(&jsc, atom))
842     goto out;
843     }
844    
845     if (callerFrame && callerFrame->fun) {
846     /*
847     * An eval script in a caller frame needs to have its enclosing
848     * function captured in case it refers to an upvar, and someone
849     * wishes to decompile it while it's running.
850     */
851     funbox = jsc.newObjectBox(FUN_OBJECT(callerFrame->fun));
852     if (!funbox)
853     goto out;
854     funbox->emitLink = cg.objectList.lastbox;
855     cg.objectList.lastbox = funbox;
856     cg.objectList.length++;
857     }
858 siliconforks 399 }
859    
860 siliconforks 460 /*
861     * Inline Statements to emit as we go to save AST space. We must generate
862     * our script-body blockid since we aren't calling Statements.
863     */
864     uint32 bodyid;
865     if (!GenerateBlockId(&cg, bodyid))
866     goto out;
867     cg.bodyid = bodyid;
868    
869     #if JS_HAS_XML_SUPPORT
870     pn = NULL;
871     bool onlyXML;
872     onlyXML = true;
873     #endif
874    
875 siliconforks 332 for (;;) {
876 siliconforks 460 jsc.tokenStream.flags |= TSF_OPERAND;
877     tt = js_PeekToken(cx, &jsc.tokenStream);
878     jsc.tokenStream.flags &= ~TSF_OPERAND;
879 siliconforks 332 if (tt <= TOK_EOF) {
880     if (tt == TOK_EOF)
881     break;
882     JS_ASSERT(tt == TOK_ERROR);
883     goto out;
884     }
885    
886 siliconforks 460 pn = Statement(cx, &jsc.tokenStream, &cg);
887     if (!pn)
888 siliconforks 332 goto out;
889 siliconforks 460 JS_ASSERT(!cg.blockNode);
890    
891     if (!js_FoldConstants(cx, pn, &cg))
892     goto out;
893    
894     if (cg.functionList) {
895     if (!jsc.analyzeFunctions(cg.functionList, cg.flags))
896     goto out;
897     cg.functionList = NULL;
898 siliconforks 332 }
899    
900 siliconforks 460 if (!js_EmitTree(cx, &cg, pn))
901 siliconforks 332 goto out;
902 siliconforks 460 #if JS_HAS_XML_SUPPORT
903     if (PN_TYPE(pn) != TOK_SEMI ||
904     !pn->pn_kid ||
905     !TREE_TYPE_IS_XML(PN_TYPE(pn->pn_kid))) {
906     onlyXML = false;
907 siliconforks 332 }
908 siliconforks 460 #endif
909     RecycleTree(pn, &cg);
910 siliconforks 332 }
911    
912 siliconforks 460 #if JS_HAS_XML_SUPPORT
913 siliconforks 332 /*
914 siliconforks 460 * Prevent XML data theft via <script src="http://victim.com/foo.xml">.
915     * For background, see:
916     *
917     * https://bugzilla.mozilla.org/show_bug.cgi?id=336551
918     */
919     if (pn && onlyXML && (tcflags & TCF_NO_SCRIPT_RVAL)) {
920     js_ReportCompileErrorNumber(cx, &jsc.tokenStream, NULL, JSREPORT_ERROR,
921     JSMSG_XML_WHOLE_PROGRAM);
922     goto out;
923     }
924     #endif
925    
926     /*
927     * Global variables and regexps share the index space with locals. Due to
928 siliconforks 332 * incremental code generation we need to patch the bytecode to adjust the
929     * local references to skip the globals.
930     */
931 siliconforks 460 scriptGlobals = cg.ngvars + cg.regexpList.length;
932 siliconforks 332 if (scriptGlobals != 0) {
933     jsbytecode *code, *end;
934     JSOp op;
935     const JSCodeSpec *cs;
936     uintN len, slot;
937    
938     if (scriptGlobals >= SLOTNO_LIMIT)
939     goto too_many_slots;
940     code = CG_BASE(&cg);
941     for (end = code + CG_OFFSET(&cg); code != end; code += len) {
942     JS_ASSERT(code < end);
943     op = (JSOp) *code;
944     cs = &js_CodeSpec[op];
945     len = (cs->length > 0)
946     ? (uintN) cs->length
947     : js_GetVariableBytecodeLength(code);
948     if (JOF_TYPE(cs->format) == JOF_LOCAL ||
949     (JOF_TYPE(cs->format) == JOF_SLOTATOM)) {
950     /*
951     * JSOP_GETARGPROP also has JOF_SLOTATOM type, but it may be
952     * emitted only for a function.
953     */
954     JS_ASSERT((JOF_TYPE(cs->format) == JOF_SLOTATOM) ==
955     (op == JSOP_GETLOCALPROP));
956     slot = GET_SLOTNO(code);
957     slot += scriptGlobals;
958     if (slot >= SLOTNO_LIMIT)
959     goto too_many_slots;
960     SET_SLOTNO(code, slot);
961     }
962     }
963     }
964    
965     #ifdef METER_PARSENODES
966     printf("Parser growth: %d (%u nodes, %u max, %u unrecycled)\n",
967     (char *)sbrk(0) - (char *)before,
968     parsenodes,
969     maxparsenodes,
970     parsenodes - recyclednodes);
971     before = sbrk(0);
972     #endif
973    
974     /*
975     * Nowadays the threaded interpreter needs a stop instruction, so we
976     * do have to emit that here.
977     */
978 siliconforks 460 if (js_Emit1(cx, &cg, JSOP_STOP) < 0)
979 siliconforks 332 goto out;
980     #ifdef METER_PARSENODES
981     printf("Code-gen growth: %d (%u bytecodes, %u srcnotes)\n",
982 siliconforks 460 (char *)sbrk(0) - (char *)before, CG_OFFSET(&cg), cg.noteCount);
983 siliconforks 332 #endif
984     #ifdef JS_ARENAMETER
985     JS_DumpArenaStats(stdout);
986     #endif
987     script = js_NewScriptFromCG(cx, &cg);
988 siliconforks 460 if (script && funbox)
989     script->flags |= JSSF_SAVED_CALLER_FUN;
990 siliconforks 332
991     #ifdef JS_SCOPE_DEPTH_METER
992     if (script) {
993     JSObject *obj = scopeChain;
994     uintN depth = 1;
995     while ((obj = OBJ_GET_PARENT(cx, obj)) != NULL)
996     ++depth;
997     JS_BASIC_STATS_ACCUM(&cx->runtime->hostenvScopeDepthStats, depth);
998     }
999     #endif
1000    
1001     out:
1002     JS_FinishArenaPool(&codePool);
1003     JS_FinishArenaPool(&notePool);
1004     return script;
1005    
1006     too_many_slots:
1007 siliconforks 460 js_ReportCompileErrorNumber(cx, &jsc.tokenStream, NULL,
1008 siliconforks 332 JSREPORT_ERROR, JSMSG_TOO_MANY_LOCALS);
1009     script = NULL;
1010     goto out;
1011     }
1012    
1013     /*
1014     * Insist on a final return before control flows out of pn. Try to be a bit
1015     * smart about loops: do {...; return e2;} while(0) at the end of a function
1016     * that contains an early return e1 will get a strict warning. Similarly for
1017     * iloops: while (true){...} is treated as though ... returns.
1018     */
1019     #define ENDS_IN_OTHER 0
1020     #define ENDS_IN_RETURN 1
1021     #define ENDS_IN_BREAK 2
1022    
1023     static int
1024     HasFinalReturn(JSParseNode *pn)
1025     {
1026     JSParseNode *pn2, *pn3;
1027     uintN rv, rv2, hasDefault;
1028    
1029     switch (pn->pn_type) {
1030     case TOK_LC:
1031     if (!pn->pn_head)
1032     return ENDS_IN_OTHER;
1033 siliconforks 460 return HasFinalReturn(pn->last());
1034 siliconforks 332
1035     case TOK_IF:
1036     if (!pn->pn_kid3)
1037     return ENDS_IN_OTHER;
1038     return HasFinalReturn(pn->pn_kid2) & HasFinalReturn(pn->pn_kid3);
1039    
1040     case TOK_WHILE:
1041     pn2 = pn->pn_left;
1042     if (pn2->pn_type == TOK_PRIMARY && pn2->pn_op == JSOP_TRUE)
1043     return ENDS_IN_RETURN;
1044     if (pn2->pn_type == TOK_NUMBER && pn2->pn_dval)
1045     return ENDS_IN_RETURN;
1046     return ENDS_IN_OTHER;
1047    
1048     case TOK_DO:
1049     pn2 = pn->pn_right;
1050     if (pn2->pn_type == TOK_PRIMARY) {
1051     if (pn2->pn_op == JSOP_FALSE)
1052     return HasFinalReturn(pn->pn_left);
1053     if (pn2->pn_op == JSOP_TRUE)
1054     return ENDS_IN_RETURN;
1055     }
1056     if (pn2->pn_type == TOK_NUMBER) {
1057     if (pn2->pn_dval == 0)
1058     return HasFinalReturn(pn->pn_left);
1059     return ENDS_IN_RETURN;
1060     }
1061     return ENDS_IN_OTHER;
1062    
1063     case TOK_FOR:
1064     pn2 = pn->pn_left;
1065     if (pn2->pn_arity == PN_TERNARY && !pn2->pn_kid2)
1066     return ENDS_IN_RETURN;
1067     return ENDS_IN_OTHER;
1068    
1069     case TOK_SWITCH:
1070     rv = ENDS_IN_RETURN;
1071     hasDefault = ENDS_IN_OTHER;
1072     pn2 = pn->pn_right;
1073     if (pn2->pn_type == TOK_LEXICALSCOPE)
1074 siliconforks 460 pn2 = pn2->expr();
1075 siliconforks 332 for (pn2 = pn2->pn_head; rv && pn2; pn2 = pn2->pn_next) {
1076     if (pn2->pn_type == TOK_DEFAULT)
1077     hasDefault = ENDS_IN_RETURN;
1078     pn3 = pn2->pn_right;
1079     JS_ASSERT(pn3->pn_type == TOK_LC);
1080     if (pn3->pn_head) {
1081 siliconforks 460 rv2 = HasFinalReturn(pn3->last());
1082 siliconforks 332 if (rv2 == ENDS_IN_OTHER && pn2->pn_next)
1083     /* Falling through to next case or default. */;
1084     else
1085     rv &= rv2;
1086     }
1087     }
1088     /* If a final switch has no default case, we judge it harshly. */
1089     rv &= hasDefault;
1090     return rv;
1091    
1092     case TOK_BREAK:
1093     return ENDS_IN_BREAK;
1094    
1095     case TOK_WITH:
1096     return HasFinalReturn(pn->pn_right);
1097    
1098     case TOK_RETURN:
1099     return ENDS_IN_RETURN;
1100    
1101     case TOK_COLON:
1102     case TOK_LEXICALSCOPE:
1103 siliconforks 460 return HasFinalReturn(pn->expr());
1104 siliconforks 332
1105     case TOK_THROW:
1106     return ENDS_IN_RETURN;
1107    
1108     case TOK_TRY:
1109     /* If we have a finally block that returns, we are done. */
1110     if (pn->pn_kid3) {
1111     rv = HasFinalReturn(pn->pn_kid3);
1112     if (rv == ENDS_IN_RETURN)
1113     return rv;
1114     }
1115    
1116     /* Else check the try block and any and all catch statements. */
1117     rv = HasFinalReturn(pn->pn_kid1);
1118     if (pn->pn_kid2) {
1119     JS_ASSERT(pn->pn_kid2->pn_arity == PN_LIST);
1120     for (pn2 = pn->pn_kid2->pn_head; pn2; pn2 = pn2->pn_next)
1121     rv &= HasFinalReturn(pn2);
1122     }
1123     return rv;
1124    
1125     case TOK_CATCH:
1126     /* Check this catch block's body. */
1127     return HasFinalReturn(pn->pn_kid3);
1128    
1129     case TOK_LET:
1130     /* Non-binary let statements are let declarations. */
1131     if (pn->pn_arity != PN_BINARY)
1132     return ENDS_IN_OTHER;
1133     return HasFinalReturn(pn->pn_right);
1134    
1135     default:
1136     return ENDS_IN_OTHER;
1137     }
1138     }
1139    
1140     static JSBool
1141     ReportBadReturn(JSContext *cx, JSTreeContext *tc, uintN flags, uintN errnum,
1142     uintN anonerrnum)
1143     {
1144     const char *name;
1145    
1146     JS_ASSERT(tc->flags & TCF_IN_FUNCTION);
1147 siliconforks 460 if (tc->fun->atom) {
1148     name = js_AtomToPrintableString(cx, tc->fun->atom);
1149 siliconforks 332 } else {
1150     errnum = anonerrnum;
1151     name = NULL;
1152     }
1153 siliconforks 460 return js_ReportCompileErrorNumber(cx, TS(tc->compiler), NULL, flags,
1154 siliconforks 332 errnum, name);
1155     }
1156    
1157     static JSBool
1158     CheckFinalReturn(JSContext *cx, JSTreeContext *tc, JSParseNode *pn)
1159     {
1160     JS_ASSERT(tc->flags & TCF_IN_FUNCTION);
1161     return HasFinalReturn(pn) == ENDS_IN_RETURN ||
1162     ReportBadReturn(cx, tc, JSREPORT_WARNING | JSREPORT_STRICT,
1163     JSMSG_NO_RETURN_VALUE, JSMSG_ANON_NO_RETURN_VALUE);
1164     }
1165    
1166     static JSParseNode *
1167     FunctionBody(JSContext *cx, JSTokenStream *ts, JSTreeContext *tc)
1168     {
1169     JSStmtInfo stmtInfo;
1170     uintN oldflags, firstLine;
1171     JSParseNode *pn;
1172    
1173     JS_ASSERT(tc->flags & TCF_IN_FUNCTION);
1174     js_PushStatement(tc, &stmtInfo, STMT_BLOCK, -1);
1175     stmtInfo.flags = SIF_BODY_BLOCK;
1176    
1177     oldflags = tc->flags;
1178     tc->flags &= ~(TCF_RETURN_EXPR | TCF_RETURN_VOID);
1179    
1180     /*
1181     * Save the body's first line, and store it in pn->pn_pos.begin.lineno
1182     * later, because we may have not peeked in ts yet, so Statements won't
1183     * acquire a valid pn->pn_pos.begin from the current token.
1184     */
1185     firstLine = ts->lineno;
1186     #if JS_HAS_EXPR_CLOSURES
1187     if (CURRENT_TOKEN(ts).type == TOK_LC) {
1188     pn = Statements(cx, ts, tc);
1189     } else {
1190 siliconforks 460 pn = NewParseNode(PN_UNARY, tc);
1191 siliconforks 332 if (pn) {
1192     pn->pn_kid = AssignExpr(cx, ts, tc);
1193     if (!pn->pn_kid) {
1194     pn = NULL;
1195     } else {
1196     if (tc->flags & TCF_FUN_IS_GENERATOR) {
1197     ReportBadReturn(cx, tc, JSREPORT_ERROR,
1198     JSMSG_BAD_GENERATOR_RETURN,
1199     JSMSG_BAD_ANON_GENERATOR_RETURN);
1200     pn = NULL;
1201     } else {
1202     pn->pn_type = TOK_RETURN;
1203     pn->pn_op = JSOP_RETURN;
1204     pn->pn_pos.end = pn->pn_kid->pn_pos.end;
1205     }
1206     }
1207     }
1208     }
1209     #else
1210     pn = Statements(cx, ts, tc);
1211     #endif
1212    
1213     if (pn) {
1214 siliconforks 460 JS_ASSERT(!(tc->topStmt->flags & SIF_SCOPE));
1215 siliconforks 332 js_PopStatement(tc);
1216     pn->pn_pos.begin.lineno = firstLine;
1217    
1218     /* Check for falling off the end of a function that returns a value. */
1219     if (JS_HAS_STRICT_OPTION(cx) && (tc->flags & TCF_RETURN_EXPR) &&
1220     !CheckFinalReturn(cx, tc, pn)) {
1221     pn = NULL;
1222     }
1223     }
1224    
1225 siliconforks 460 tc->flags = oldflags | (tc->flags & TCF_FUN_FLAGS);
1226 siliconforks 332 return pn;
1227     }
1228    
1229 siliconforks 460 static JSAtomListElement *
1230     MakePlaceholder(JSParseNode *pn, JSTreeContext *tc)
1231     {
1232     JSAtomListElement *ale = tc->lexdeps.add(tc->compiler, pn->pn_atom);
1233     if (!ale)
1234     return NULL;
1235    
1236     JSDefinition *dn = (JSDefinition *)
1237     NewNameNode(tc->compiler->context, TS(tc->compiler), pn->pn_atom, tc);
1238     if (!dn)
1239     return NULL;
1240    
1241     ALE_SET_DEFN(ale, dn);
1242     dn->pn_defn = true;
1243     dn->pn_dflags |= PND_PLACEHOLDER;
1244     return ale;
1245     }
1246    
1247     static bool
1248     Define(JSParseNode *pn, JSAtom *atom, JSTreeContext *tc, bool let = false)
1249     {
1250     JS_ASSERT(!pn->pn_used);
1251     JS_ASSERT_IF(pn->pn_defn, pn->isPlaceholder());
1252    
1253     JSHashEntry **hep;
1254     JSAtomListElement *ale = NULL;
1255     JSAtomList *list = NULL;
1256    
1257     if (let)
1258     ale = (list = &tc->decls)->rawLookup(atom, hep);
1259     if (!ale)
1260     ale = (list = &tc->lexdeps)->rawLookup(atom, hep);
1261    
1262     if (ale) {
1263     JSDefinition *dn = ALE_DEFN(ale);
1264     if (dn != pn) {
1265     JSParseNode **pnup = &dn->dn_uses;
1266     JSParseNode *pnu;
1267     uintN start = let ? pn->pn_blockid : tc->bodyid;
1268    
1269     while ((pnu = *pnup) != NULL && pnu->pn_blockid >= start) {
1270     JS_ASSERT(pnu->pn_used);
1271     pnu->pn_lexdef = (JSDefinition *) pn;
1272     pn->pn_dflags |= pnu->pn_dflags & (PND_ASSIGNED | PND_FUNARG);
1273     pnup = &pnu->pn_link;
1274     }
1275    
1276     if (pnu != dn->dn_uses) {
1277     *pnup = pn->dn_uses;
1278     pn->dn_uses = dn->dn_uses;
1279     dn->dn_uses = pnu;
1280    
1281     if ((!pnu || pnu->pn_blockid < tc->bodyid) && list != &tc->decls)
1282     list->rawRemove(tc->compiler, ale, hep);
1283     }
1284     }
1285     }
1286    
1287     ale = tc->decls.add(tc->compiler, atom, let ? JSAtomList::SHADOW : JSAtomList::UNIQUE);
1288     if (!ale)
1289     return false;
1290     ALE_SET_DEFN(ale, pn);
1291     pn->pn_defn = true;
1292     pn->pn_dflags &= ~PND_PLACEHOLDER;
1293     return true;
1294     }
1295    
1296     static void
1297     LinkUseToDef(JSParseNode *pn, JSDefinition *dn, JSTreeContext *tc)
1298     {
1299     JS_ASSERT(!pn->pn_used);
1300     JS_ASSERT(!pn->pn_defn);
1301     JS_ASSERT(pn != dn->dn_uses);
1302     pn->pn_link = dn->dn_uses;
1303     dn->dn_uses = pn;
1304     pn->pn_used = true;
1305     pn->pn_lexdef = dn;
1306     }
1307    
1308     static void
1309     ForgetUse(JSParseNode *pn)
1310     {
1311     if (!pn->pn_used) {
1312     JS_ASSERT(!pn->pn_defn);
1313     return;
1314     }
1315    
1316     JSParseNode **pnup = &pn->lexdef()->dn_uses;
1317     JSParseNode *pnu;
1318     while ((pnu = *pnup) != pn)
1319     pnup = &pnu->pn_link;
1320     *pnup = pn->pn_link;
1321     pn->pn_used = false;
1322     }
1323    
1324     static JSParseNode *
1325     MakeAssignment(JSParseNode *pn, JSParseNode *rhs, JSTreeContext *tc)
1326     {
1327     JSParseNode *lhs = NewOrRecycledNode(tc);
1328     if (!lhs)
1329     return NULL;
1330     *lhs = *pn;
1331    
1332     if (pn->pn_used) {
1333     JSDefinition *dn = pn->pn_lexdef;
1334     JSParseNode **pnup = &dn->dn_uses;
1335    
1336     while (*pnup != pn)
1337     pnup = &(*pnup)->pn_link;
1338     *pnup = lhs;
1339     lhs->pn_link = pn->pn_link;
1340     pn->pn_link = NULL;
1341     }
1342    
1343     pn->pn_type = TOK_ASSIGN;
1344     pn->pn_op = JSOP_NOP;
1345     pn->pn_arity = PN_BINARY;
1346     pn->pn_used = pn->pn_defn = false;
1347     pn->pn_left = lhs;
1348     pn->pn_right = rhs;
1349     return lhs;
1350     }
1351    
1352     static JSParseNode *
1353     MakeDefIntoUse(JSDefinition *dn, JSParseNode *pn, JSAtom *atom, JSTreeContext *tc)
1354     {
1355     /*
1356     * If dn is var, const, or let, and it has an initializer, then we must
1357     * rewrite it to be an assignment node, whose freshly allocated left-hand
1358     * side becomes a use of pn.
1359     */
1360     if (dn->isBindingForm()) {
1361     JSParseNode *rhs = dn->expr();
1362     if (rhs) {
1363     JSParseNode *lhs = MakeAssignment(dn, rhs, tc);
1364     if (!lhs)
1365     return NULL;
1366     //pn->dn_uses = lhs;
1367     dn = (JSDefinition *) lhs;
1368     }
1369    
1370     dn->pn_op = (js_CodeSpec[dn->pn_op].format & JOF_SET) ? JSOP_SETNAME : JSOP_NAME;
1371     } else if (dn->kind() == JSDefinition::FUNCTION) {
1372     JS_ASSERT(dn->isTopLevel());
1373     JS_ASSERT(dn->pn_op == JSOP_NOP);
1374    
1375     dn->pn_u.name.funbox2 = dn->pn_funbox;
1376     dn->pn_u.name.expr2 = dn->pn_expr;
1377    
1378     dn->pn_type = TOK_NAME;
1379     dn->pn_arity = PN_NAME;
1380     dn->pn_atom = atom;
1381     }
1382    
1383     /* Now make dn no longer a definition, rather a use of pn. */
1384     JS_ASSERT(dn->pn_type == TOK_NAME);
1385     JS_ASSERT(dn->pn_arity == PN_NAME);
1386     JS_ASSERT(dn->pn_atom == atom);
1387    
1388     for (JSParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
1389     JS_ASSERT(pnu->pn_used);
1390     JS_ASSERT(!pnu->pn_defn);
1391     pnu->pn_lexdef = (JSDefinition *) pn;
1392     pn->pn_dflags |= pnu->pn_dflags & (PND_ASSIGNED | PND_FUNARG);
1393     }
1394     pn->pn_dflags |= dn->pn_dflags & (PND_ASSIGNED | PND_FUNARG);
1395     pn->dn_uses = dn;
1396    
1397     dn->pn_defn = false;
1398     dn->pn_used = true;
1399     dn->pn_lexdef = (JSDefinition *) pn;
1400     dn->pn_cookie = FREE_UPVAR_COOKIE;
1401     dn->pn_dflags &= ~PND_BOUND;
1402     return dn;
1403     }
1404    
1405     static bool
1406     DefineArg(JSParseNode *pn, JSAtom *atom, uintN i, JSTreeContext *tc)
1407     {
1408     JSParseNode *argpn, *argsbody;
1409    
1410     /* Flag tc so we don't have to lookup arguments on every use. */
1411     if (atom == tc->compiler->context->runtime->atomState.argumentsAtom)
1412     tc->flags |= TCF_FUN_PARAM_ARGUMENTS;
1413    
1414     /*
1415     * Make an argument definition node, distinguished by being in tc->decls
1416     * but having TOK_NAME type and JSOP_NOP op. Insert it in a TOK_ARGSBODY
1417     * list node returned via pn->pn_body.
1418     */
1419     argpn = NewNameNode(tc->compiler->context, TS(tc->compiler), atom, tc);
1420     if (!argpn)
1421     return false;
1422     JS_ASSERT(PN_TYPE(argpn) == TOK_NAME && PN_OP(argpn) == JSOP_NOP);
1423    
1424     /* Arguments are initialized by definition. */
1425     argpn->pn_dflags |= PND_INITIALIZED;
1426     if (!Define(argpn, atom, tc))
1427     return false;
1428    
1429     argsbody = pn->pn_body;
1430     if (!argsbody) {
1431     argsbody = NewParseNode(PN_LIST, tc);
1432     if (!argsbody)
1433     return false;
1434     argsbody->pn_type = TOK_ARGSBODY;
1435     argsbody->pn_op = JSOP_NOP;
1436     argsbody->makeEmpty();
1437     pn->pn_body = argsbody;
1438     }
1439     argsbody->append(argpn);
1440    
1441     argpn->pn_op = JSOP_GETARG;
1442     argpn->pn_cookie = MAKE_UPVAR_COOKIE(tc->staticLevel, i);
1443     argpn->pn_dflags |= PND_BOUND;
1444     return true;
1445     }
1446    
1447 siliconforks 332 /*
1448     * Compile a JS function body, which might appear as the value of an event
1449     * handler attribute in an HTML <INPUT> tag.
1450     */
1451 siliconforks 460 bool
1452     JSCompiler::compileFunctionBody(JSContext *cx, JSFunction *fun, JSPrincipals *principals,
1453     const jschar *chars, size_t length,
1454     const char *filename, uintN lineno)
1455 siliconforks 332 {
1456 siliconforks 460 JSCompiler jsc(cx, principals);
1457 siliconforks 332
1458 siliconforks 460 if (!jsc.init(chars, length, NULL, filename, lineno))
1459     return false;
1460 siliconforks 332
1461 siliconforks 460 /* No early return from after here until the js_FinishArenaPool calls. */
1462     JSArenaPool codePool, notePool;
1463 siliconforks 332 JS_INIT_ARENA_POOL(&codePool, "code", 1024, sizeof(jsbytecode),
1464     &cx->scriptStackQuota);
1465     JS_INIT_ARENA_POOL(&notePool, "note", 1024, sizeof(jssrcnote),
1466     &cx->scriptStackQuota);
1467    
1468 siliconforks 460 JSCodeGenerator funcg(&jsc, &codePool, &notePool, jsc.tokenStream.lineno);
1469     funcg.flags |= TCF_IN_FUNCTION;
1470     funcg.fun = fun;
1471     if (!GenerateBlockId(&funcg, funcg.bodyid))
1472     return NULL;
1473    
1474     /* FIXME: make Function format the source for a function definition. */
1475     jsc.tokenStream.tokens[0].type = TOK_NAME;
1476     JSParseNode *fn = NewParseNode(PN_FUNC, &funcg);
1477     if (fn) {
1478     fn->pn_body = NULL;
1479     fn->pn_cookie = FREE_UPVAR_COOKIE;
1480    
1481     uintN nargs = fun->nargs;
1482     if (nargs) {
1483     jsuword *names = js_GetLocalNameArray(cx, fun, &cx->tempPool);
1484     if (!names) {
1485     fn = NULL;
1486     } else {
1487     for (uintN i = 0; i < nargs; i++) {
1488     JSAtom *name = JS_LOCAL_NAME_TO_ATOM(names[i]);
1489     if (!DefineArg(fn, name, i, &funcg)) {
1490     fn = NULL;
1491     break;
1492     }
1493     }
1494     }
1495     }
1496     }
1497    
1498 siliconforks 332 /*
1499     * Farble the body so that it looks like a block statement to js_EmitTree,
1500 siliconforks 460 * which is called from js_EmitFunctionBody (see jsemit.cpp). After we're
1501     * done parsing, we must fold constants, analyze any nested functions, and
1502     * generate code for this function, including a stop opcode at the end.
1503 siliconforks 332 */
1504 siliconforks 460 CURRENT_TOKEN(&jsc.tokenStream).type = TOK_LC;
1505     JSParseNode *pn = fn ? FunctionBody(cx, &jsc.tokenStream, &funcg) : NULL;
1506 siliconforks 332 if (pn) {
1507 siliconforks 460 if (!js_MatchToken(cx, &jsc.tokenStream, TOK_EOF)) {
1508     js_ReportCompileErrorNumber(cx, &jsc.tokenStream, NULL,
1509 siliconforks 332 JSREPORT_ERROR, JSMSG_SYNTAX_ERROR);
1510     pn = NULL;
1511 siliconforks 460 } else if (!js_FoldConstants(cx, pn, &funcg)) {
1512     /* js_FoldConstants reported the error already. */
1513     pn = NULL;
1514     } else if (funcg.functionList &&
1515     !jsc.analyzeFunctions(funcg.functionList, funcg.flags)) {
1516     pn = NULL;
1517 siliconforks 332 } else {
1518 siliconforks 460 if (fn->pn_body) {
1519     JS_ASSERT(PN_TYPE(fn->pn_body) == TOK_ARGSBODY);
1520     fn->pn_body->append(pn);
1521     fn->pn_body->pn_pos = pn->pn_pos;
1522     pn = fn->pn_body;
1523     }
1524    
1525     if (!js_EmitFunctionScript(cx, &funcg, pn))
1526 siliconforks 332 pn = NULL;
1527     }
1528     }
1529    
1530     /* Restore saved state and release code generation arenas. */
1531     JS_FinishArenaPool(&codePool);
1532     JS_FinishArenaPool(&notePool);
1533     return pn != NULL;
1534     }
1535    
1536     /*
1537     * Parameter block types for the several Binder functions. We use a common
1538     * helper function signature in order to share code among destructuring and
1539     * simple variable declaration parsers. In the destructuring case, the binder
1540     * function is called indirectly from the variable declaration parser by way
1541     * of CheckDestructuring and its friends.
1542     */
1543     typedef struct BindData BindData;
1544    
1545     typedef JSBool
1546     (*Binder)(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc);
1547    
1548     struct BindData {
1549 siliconforks 460 BindData() : fresh(true) {}
1550    
1551     JSParseNode *pn; /* name node for definition processing and
1552     error source coordinates */
1553     JSOp op; /* prolog bytecode or nop */
1554     Binder binder; /* binder, discriminates u */
1555 siliconforks 332 union {
1556     struct {
1557 siliconforks 460 uintN overflow;
1558 siliconforks 332 } let;
1559 siliconforks 460 };
1560     bool fresh;
1561 siliconforks 332 };
1562    
1563     static JSBool
1564     BindLocalVariable(JSContext *cx, JSFunction *fun, JSAtom *atom,
1565     JSLocalKind localKind)
1566     {
1567     JS_ASSERT(localKind == JSLOCAL_VAR || localKind == JSLOCAL_CONST);
1568    
1569     /*
1570     * Don't bind a variable with the hidden name 'arguments', per ECMA-262.
1571     * Instead 'var arguments' always restates the predefined property of the
1572 siliconforks 460 * activation objects whose name is 'arguments'. Assignment to such a
1573     * variable must be handled specially.
1574 siliconforks 332 */
1575     if (atom == cx->runtime->atomState.argumentsAtom)
1576     return JS_TRUE;
1577    
1578     return js_AddLocal(cx, fun, atom, localKind);
1579     }
1580    
1581     #if JS_HAS_DESTRUCTURING
1582     /*
1583     * Forward declaration to maintain top-down presentation.
1584     */
1585     static JSParseNode *
1586     DestructuringExpr(JSContext *cx, BindData *data, JSTreeContext *tc,
1587     JSTokenType tt);
1588    
1589     static JSBool
1590     BindDestructuringArg(JSContext *cx, BindData *data, JSAtom *atom,
1591     JSTreeContext *tc)
1592     {
1593     JSAtomListElement *ale;
1594 siliconforks 460 JSParseNode *pn;
1595 siliconforks 332
1596 siliconforks 460 /* Flag tc so we don't have to lookup arguments on every use. */
1597     if (atom == tc->compiler->context->runtime->atomState.argumentsAtom)
1598     tc->flags |= TCF_FUN_PARAM_ARGUMENTS;
1599    
1600 siliconforks 332 JS_ASSERT(tc->flags & TCF_IN_FUNCTION);
1601 siliconforks 460 ale = tc->decls.lookup(atom);
1602     pn = data->pn;
1603     if (!ale && !Define(pn, atom, tc))
1604     return JS_FALSE;
1605    
1606     JSLocalKind localKind = js_LookupLocal(cx, tc->fun, atom, NULL);
1607     if (localKind != JSLOCAL_NONE) {
1608     js_ReportCompileErrorNumber(cx, TS(tc->compiler), NULL,
1609     JSREPORT_ERROR, JSMSG_DESTRUCT_DUP_ARG);
1610     return JS_FALSE;
1611 siliconforks 332 }
1612    
1613 siliconforks 460 uintN index = tc->fun->u.i.nvars;
1614     if (!BindLocalVariable(cx, tc->fun, atom, JSLOCAL_VAR))
1615     return JS_FALSE;
1616     pn->pn_op = JSOP_SETLOCAL;
1617     pn->pn_cookie = MAKE_UPVAR_COOKIE(tc->staticLevel, index);
1618     pn->pn_dflags |= PND_BOUND;
1619 siliconforks 332 return JS_TRUE;
1620     }
1621     #endif /* JS_HAS_DESTRUCTURING */
1622    
1623 siliconforks 460 JSFunction *
1624     JSCompiler::newFunction(JSTreeContext *tc, JSAtom *atom, uintN lambda)
1625 siliconforks 332 {
1626     JSObject *parent;
1627     JSFunction *fun;
1628    
1629     JS_ASSERT((lambda & ~JSFUN_LAMBDA) == 0);
1630 siliconforks 460
1631     /*
1632     * Find the global compilation context in order to pre-set the newborn
1633     * function's parent slot to tc->scopeChain. If the global context is a
1634     * compile-and-go one, we leave the pre-set parent intact; otherwise we
1635     * clear parent and proto.
1636     */
1637     while (tc->parent)
1638     tc = tc->parent;
1639     parent = (tc->flags & TCF_IN_FUNCTION) ? NULL : tc->scopeChain;
1640    
1641     fun = js_NewFunction(context, NULL, NULL, 0, JSFUN_INTERPRETED | lambda,
1642 siliconforks 332 parent, atom);
1643 siliconforks 460
1644 siliconforks 332 if (fun && !(tc->flags & TCF_COMPILE_N_GO)) {
1645     STOBJ_CLEAR_PARENT(FUN_OBJECT(fun));
1646     STOBJ_CLEAR_PROTO(FUN_OBJECT(fun));
1647     }
1648     return fun;
1649     }
1650    
1651 siliconforks 460 static JSBool
1652     MatchOrInsertSemicolon(JSContext *cx, JSTokenStream *ts)
1653     {
1654     JSTokenType tt;
1655    
1656     ts->flags |= TSF_OPERAND;
1657     tt = js_PeekTokenSameLine(cx, ts);
1658     ts->flags &= ~TSF_OPERAND;
1659     if (tt == TOK_ERROR)
1660     return JS_FALSE;
1661     if (tt != TOK_EOF && tt != TOK_EOL && tt != TOK_SEMI && tt != TOK_RC) {
1662     js_ReportCompileErrorNumber(cx, ts, NULL, JSREPORT_ERROR,
1663     JSMSG_SEMI_BEFORE_STMNT);
1664     return JS_FALSE;
1665     }
1666     (void) js_MatchToken(cx, ts, TOK_SEMI);
1667     return JS_TRUE;
1668     }
1669    
1670     bool
1671     JSCompiler::analyzeFunctions(JSFunctionBox *funbox, uint16& tcflags)
1672     {
1673     if (!markFunArgs(funbox, tcflags))
1674     return false;
1675     setFunctionKinds(funbox, tcflags);
1676     return true;
1677     }
1678    
1679     /*
1680     * Mark as funargs any functions that reach up to one or more upvars across an
1681     * already-known funarg. The parser will flag the o_m lambda as a funarg in:
1682     *
1683     * function f(o, p) {
1684     * o.m = function o_m(a) {
1685     * function g() { return p; }
1686     * function h() { return a; }
1687     * return g() + h();
1688     * }
1689     * }
1690     *
1691     * but without this extra marking phase, function g will not be marked as a
1692     * funarg since it is called from within its parent scope. But g reaches up to
1693     * f's parameter p, so if o_m escapes f's activation scope, g does too and
1694     * cannot use JSOP_GETUPVAR to reach p. In contast function h neither escapes
1695     * nor uses an upvar "above" o_m's level.
1696     *
1697     * If function g itself contained lambdas that contained non-lambdas that reach
1698     * up above its level, then those non-lambdas would have to be marked too. This
1699     * process is potentially exponential in the number of functions, but generally
1700     * not so complex. But it can't be done during a single recursive traversal of
1701     * the funbox tree, so we must use a work queue.
1702     *
1703     * Return the minimal "skipmin" for funbox and its siblings. This is the delta
1704     * between the static level of the bodies of funbox and its peers (which must
1705     * be funbox->level + 1), and the static level of the nearest upvar among all
1706     * the upvars contained by funbox and its peers. If there are no upvars, return
1707     * FREE_STATIC_LEVEL. Thus this function never returns 0.
1708     */
1709     static uintN
1710     FindFunArgs(JSFunctionBox *funbox, int level, JSFunctionBoxQueue *queue)
1711     {
1712     uintN allskipmin = FREE_STATIC_LEVEL;
1713    
1714     do {
1715     JSParseNode *fn = funbox->node;
1716     JSFunction *fun = (JSFunction *) funbox->object;
1717     int fnlevel = level;
1718    
1719     /*
1720     * An eval can leak funbox, functions along its ancestor line, and its
1721     * immediate kids. Since FindFunArgs uses DFS and the parser propagates
1722     * TCF_FUN_HEAVYWEIGHT bottom up, funbox's ancestor function nodes have
1723     * already been marked as funargs by this point. Therefore we have to
1724     * flag only funbox->node and funbox->kids' nodes here.
1725     */
1726     if (funbox->tcflags & TCF_FUN_HEAVYWEIGHT) {
1727     fn->setFunArg();
1728     for (JSFunctionBox *kid = funbox->kids; kid; kid = kid->siblings)
1729     kid->node->setFunArg();
1730     }
1731    
1732     /*
1733     * Compute in skipmin the least distance from fun's static level up to
1734     * an upvar, whether used directly by fun, or indirectly by a function
1735     * nested in fun.
1736     */
1737     uintN skipmin = FREE_STATIC_LEVEL;
1738     JSParseNode *pn = fn->pn_body;
1739    
1740     if (pn->pn_type == TOK_UPVARS) {
1741     JSAtomList upvars(pn->pn_names);
1742     JS_ASSERT(upvars.count != 0);
1743    
1744     JSAtomListIterator iter(&upvars);
1745     JSAtomListElement *ale;
1746    
1747     while ((ale = iter()) != NULL) {
1748     JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
1749    
1750     if (!lexdep->isFreeVar()) {
1751     uintN upvarLevel = lexdep->frameLevel();
1752    
1753     if (int(upvarLevel) <= fnlevel)
1754     fn->setFunArg();
1755    
1756     uintN skip = (funbox->level + 1) - upvarLevel;
1757     if (skip < skipmin)
1758     skipmin = skip;
1759     }
1760     }
1761     }
1762    
1763     /*
1764     * If this function escapes, whether directly (the parser detects such
1765     * escapes) or indirectly (because this non-escaping function uses an
1766     * upvar that reaches across an outer function boundary where the outer
1767     * function escapes), enqueue it for further analysis, and bump fnlevel
1768     * to trap any non-escaping children.
1769     */
1770     if (fn->isFunArg()) {
1771     queue->push(funbox);
1772     fnlevel = int(funbox->level);
1773     }
1774    
1775     /*
1776     * Now process the current function's children, and recalibrate their
1777     * cumulative skipmin to be relative to the current static level.
1778     */
1779     if (funbox->kids) {
1780     uintN kidskipmin = FindFunArgs(funbox->kids, fnlevel, queue);
1781    
1782     JS_ASSERT(kidskipmin != 0);
1783     if (kidskipmin != FREE_STATIC_LEVEL) {
1784     --kidskipmin;
1785     if (kidskipmin != 0 && kidskipmin < skipmin)
1786     skipmin = kidskipmin;
1787     }
1788     }
1789    
1790     /*
1791     * Finally, after we've traversed all of the current function's kids,
1792     * minimize fun's skipmin against our accumulated skipmin. Do likewise
1793     * with allskipmin, but minimize across funbox and all of its siblings,
1794     * to compute our return value.
1795     */
1796     if (skipmin != FREE_STATIC_LEVEL) {
1797     fun->u.i.skipmin = skipmin;
1798     if (skipmin < allskipmin)
1799     allskipmin = skipmin;
1800     }
1801     } while ((funbox = funbox->siblings) != NULL);
1802    
1803     return allskipmin;
1804     }
1805    
1806     bool
1807     JSCompiler::markFunArgs(JSFunctionBox *funbox, uintN tcflags)
1808     {
1809     JSFunctionBoxQueue queue;
1810     if (!queue.init(functionCount))
1811     return false;
1812    
1813     FindFunArgs(funbox, -1, &queue);
1814     while ((funbox = queue.pull()) != NULL) {
1815     JSParseNode *fn = funbox->node;
1816     JS_ASSERT(fn->isFunArg());
1817    
1818     JSParseNode *pn = fn->pn_body;
1819     if (pn->pn_type == TOK_UPVARS) {
1820     JSAtomList upvars(pn->pn_names);
1821     JS_ASSERT(upvars.count != 0);
1822    
1823     JSAtomListIterator iter(&upvars);
1824     JSAtomListElement *ale;
1825    
1826     while ((ale = iter()) != NULL) {
1827     JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
1828    
1829     if (!lexdep->isFreeVar() &&
1830     !lexdep->isFunArg() &&
1831     lexdep->kind() == JSDefinition::FUNCTION) {
1832     /*
1833     * Mark this formerly-Algol-like function as an escaping
1834     * function (i.e., as a funarg), because it is used from a
1835     * funarg and therefore can not use JSOP_{GET,CALL}UPVAR to
1836     * access upvars.
1837     *
1838     * Progress is guaranteed because we set the funarg flag
1839     * here, which suppresses revisiting this function (thanks
1840     * to the !lexdep->isFunArg() test just above).
1841     */
1842     lexdep->setFunArg();
1843    
1844     JSFunctionBox *afunbox = lexdep->pn_funbox;
1845     queue.push(afunbox);
1846    
1847     /*
1848     * Walk over nested functions again, now that we have
1849     * changed the level across which it is unsafe to access
1850     * upvars using the runtime dynamic link (frame chain).
1851     */
1852     if (afunbox->kids)
1853     FindFunArgs(afunbox->kids, afunbox->level, &queue);
1854     }
1855     }
1856     }
1857     }
1858     return true;
1859     }
1860    
1861     static uint32
1862     MinBlockId(JSParseNode *fn, uint32 id)
1863     {
1864     if (fn->pn_blockid < id)
1865     return false;
1866     if (fn->pn_defn) {
1867     for (JSParseNode *pn = fn->dn_uses; pn; pn = pn->pn_link) {
1868     if (pn->pn_blockid < id)
1869     return false;
1870     }
1871     }
1872     return true;
1873     }
1874    
1875     static bool
1876     OneBlockId(JSParseNode *fn, uint32 id)
1877     {
1878     if (fn->pn_blockid != id)
1879     return false;
1880     if (fn->pn_defn) {
1881     for (JSParseNode *pn = fn->dn_uses; pn; pn = pn->pn_link) {
1882     if (pn->pn_blockid != id)
1883     return false;
1884     }
1885     }
1886     return true;
1887     }
1888    
1889     void
1890     JSCompiler::setFunctionKinds(JSFunctionBox *funbox, uint16& tcflags)
1891     {
1892     #ifdef JS_FUNCTION_METERING
1893     # define FUN_METER(x) JS_RUNTIME_METER(context->runtime, functionMeter.x)
1894     #else
1895     # define FUN_METER(x) ((void)0)
1896     #endif
1897     JSFunctionBox *parent = funbox->parent;
1898    
1899     for (;;) {
1900     JSParseNode *fn = funbox->node;
1901    
1902     if (funbox->kids)
1903     setFunctionKinds(funbox->kids, tcflags);
1904    
1905     JSParseNode *pn = fn->pn_body;
1906     JSFunction *fun = (JSFunction *) funbox->object;
1907    
1908     FUN_METER(allfun);
1909     if (funbox->tcflags & TCF_FUN_HEAVYWEIGHT) {
1910     FUN_METER(heavy);
1911     JS_ASSERT(FUN_KIND(fun) == JSFUN_INTERPRETED);
1912     } else if (pn->pn_type != TOK_UPVARS) {
1913     /*
1914     * No lexical dependencies => null closure, for best performance.
1915     * A null closure needs no scope chain, but alas we've coupled
1916     * principals-finding to scope (for good fundamental reasons, but
1917     * the implementation overloads the parent slot and we should fix
1918     * that). See, e.g., the JSOP_LAMBDA case in jsinterp.cpp.
1919     *
1920     * In more detail: the ES3 spec allows the implementation to create
1921     * "joined function objects", or not, at its discretion. But real-
1922     * world implementations always create unique function objects for
1923     * closures, and this can be detected via mutation. Open question:
1924     * do popular implementations create unique function objects for
1925     * null closures?
1926     *
1927     * FIXME: bug 476950.
1928     */
1929     FUN_METER(nofreeupvar);
1930     FUN_SET_KIND(fun, JSFUN_NULL_CLOSURE);
1931     } else {
1932     JSAtomList upvars(pn->pn_names);
1933     JS_ASSERT(upvars.count != 0);
1934    
1935     JSAtomListIterator iter(&upvars);
1936     JSAtomListElement *ale;
1937    
1938     if (!fn->isFunArg()) {
1939     /*
1940     * This function is Algol-like, it never escapes. So long as it
1941     * does not assign to outer variables, it needs only an upvars
1942     * array in its script and JSOP_{GET,CALL}UPVAR opcodes in its
1943     * bytecode to reach up the frame stack at runtime based on
1944     * those upvars' cookies.
1945     *
1946     * Any assignments to upvars from functions called by this one
1947     * will be coherent because of the JSOP_{GET,CALL}UPVAR ops,
1948     * which load from stack homes when interpreting or from native
1949     * stack slots when executing a trace.
1950     *
1951     * We could add JSOP_SETUPVAR, etc., but it is uncommon for a
1952     * nested function to assign to an outer lexical variable, so
1953     * we defer adding yet more code footprint in the absence of
1954     * evidence motivating these opcodes.
1955     */
1956     bool mutation = !!(funbox->tcflags & TCF_FUN_SETS_OUTER_NAME);
1957     uintN nupvars = 0;
1958    
1959     /*
1960     * Check that at least one outer lexical binding was assigned
1961     * to (global variables don't count). This is conservative: we
1962     * could limit assignments to those in the current function,
1963     * but that's too much work. As with flat closures (handled
1964     * below), we optimize for the case where outer bindings are
1965     * not reassigned anywhere.
1966     */
1967     while ((ale = iter()) != NULL) {
1968     JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
1969    
1970     if (!lexdep->isFreeVar()) {
1971     JS_ASSERT(lexdep->frameLevel() <= funbox->level);
1972     ++nupvars;
1973     if (lexdep->isAssigned())
1974     break;
1975     }
1976     }
1977     if (!ale)
1978     mutation = false;
1979    
1980     if (nupvars == 0) {
1981     FUN_METER(onlyfreevar);
1982     FUN_SET_KIND(fun, JSFUN_NULL_CLOSURE);
1983     } else if (!mutation && !(funbox->tcflags & TCF_FUN_IS_GENERATOR)) {
1984     /*
1985     * Algol-like functions can read upvars using the dynamic
1986     * link (cx->fp/fp->down). They do not need to entrain and
1987     * search their environment.
1988     */
1989     FUN_METER(display);
1990     FUN_SET_KIND(fun, JSFUN_NULL_CLOSURE);
1991     } else {
1992     if (!(funbox->tcflags & TCF_FUN_IS_GENERATOR))
1993     FUN_METER(setupvar);
1994     }
1995     } else {
1996     uintN nupvars = 0;
1997    
1998     /*
1999     * For each lexical dependency from this closure to an outer
2000     * binding, analyze whether it is safe to copy the binding's
2001     * value into a flat closure slot when the closure is formed.
2002     */
2003     while ((ale = iter()) != NULL) {
2004     JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
2005    
2006     if (!lexdep->isFreeVar()) {
2007     ++nupvars;
2008    
2009     /*
2010     * Consider the current function (the lambda, innermost
2011     * below) using a var x defined two static levels up:
2012     *
2013     * function f() {
2014     * // z = g();
2015     * var x = 42;
2016     * function g() {
2017     * return function () { return x; };
2018     * }
2019     * return g();
2020     * }
2021     *
2022     * So long as (1) the initialization in 'var x = 42'
2023     * dominates all uses of g and (2) x is not reassigned,
2024     * it is safe to optimize the lambda to a flat closure.
2025     * Uncommenting the early call to g makes it unsafe to
2026     * so optimize (z could name a global setter that calls
2027     * its argument).
2028     */
2029     JSFunctionBox *afunbox = funbox;
2030     uintN lexdepLevel = lexdep->frameLevel();
2031    
2032     JS_ASSERT(lexdepLevel <= funbox->level);
2033     while (afunbox->level != lexdepLevel) {
2034     afunbox = afunbox->parent;
2035    
2036     /*
2037     * afunbox can't be null because we are sure
2038     * to find a function box whose level == lexdepLevel
2039     * before walking off the top of the funbox tree.
2040     * See bug 493260 comments 16-18.
2041     *
2042     * Assert but check anyway, to check future changes
2043     * that bind eval upvars in the parser.
2044     */
2045     JS_ASSERT(afunbox);
2046    
2047     /*
2048     * If this function is reaching up across an
2049     * enclosing funarg, we cannot make a flat
2050     * closure. The display stops working once the
2051     * funarg escapes.
2052     */
2053     if (!afunbox || afunbox->node->isFunArg())
2054     goto break2;
2055     }
2056    
2057     /*
2058     * If afunbox's function (which is at the same level as
2059     * lexdep) is in a loop, pessimistically assume the
2060     * variable initializer may be in the same loop. A flat
2061     * closure would then be unsafe, as the captured
2062     * variable could be assigned after the closure is
2063     * created. See bug 493232.
2064     */
2065     if (afunbox->inLoop)
2066     break;
2067    
2068     /*
2069     * with and eval defeat lexical scoping; eval anywhere
2070     * in a variable's scope can assign to it. Both defeat
2071     * the flat closure optimization. The parser detects
2072     * these cases and flags the function heavyweight.
2073     */
2074     if ((afunbox->parent ? afunbox->parent->tcflags : tcflags)
2075     & TCF_FUN_HEAVYWEIGHT) {
2076     break;
2077     }
2078    
2079     /*
2080     * If afunbox's function is not a lambda, it will be
2081     * hoisted, so it could capture the undefined value
2082     * that by default initializes var/let/const
2083     * bindings. And if lexdep is a function that comes at
2084     * (meaning a function refers to its own name) or
2085     * strictly after afunbox, we also break to defeat the
2086     * flat closure optimization.
2087     */
2088     JSFunction *afun = (JSFunction *) afunbox->object;
2089     if (!(afun->flags & JSFUN_LAMBDA)) {
2090     if (lexdep->isBindingForm())
2091     break;
2092     if (lexdep->pn_pos >= afunbox->node->pn_pos)
2093     break;
2094     }
2095    
2096     if (!lexdep->isInitialized())
2097     break;
2098    
2099     JSDefinition::Kind lexdepKind = lexdep->kind();
2100     if (lexdepKind != JSDefinition::CONST) {
2101     if (lexdep->isAssigned())
2102     break;
2103    
2104     /*
2105     * Any formal could be mutated behind our back via
2106     * the arguments object, so deoptimize if the outer
2107     * function uses arguments.
2108     *
2109     * In a Function constructor call where the final
2110     * argument -- the body source for the function to
2111     * create -- contains a nested function definition
2112     * or expression, afunbox->parent will be null. The
2113     * body source might use |arguments| outside of any
2114     * nested functions it may contain, so we have to
2115     * check the tcflags parameter that was passed in
2116     * from JSCompiler::compileFunctionBody.
2117     */
2118     if (lexdepKind == JSDefinition::ARG &&
2119     ((afunbox->parent ? afunbox->parent->tcflags : tcflags) &
2120     TCF_FUN_USES_ARGUMENTS)) {
2121     break;
2122     }
2123     }
2124    
2125     /*
2126     * Check quick-and-dirty dominance relation. Function
2127     * definitions dominate their uses thanks to hoisting.
2128     * Other binding forms hoist as undefined, of course,
2129     * so check forward-reference and blockid relations.
2130     */
2131     if (lexdepKind != JSDefinition::FUNCTION) {
2132     /*
2133     * Watch out for code such as
2134     *
2135     * (function () {
2136     * ...
2137     * var jQuery = ... = function (...) {
2138     * return new jQuery.foo.bar(baz);
2139     * }
2140     * ...
2141     * })();
2142     *
2143     * where the jQuery var is not reassigned, but of
2144     * course is not initialized at the time that the
2145     * would-be-flat closure containing the jQuery
2146     * upvar is formed.
2147     */
2148     if (lexdep->pn_pos.end >= afunbox->node->pn_pos.end)
2149     break;
2150    
2151     if (lexdep->isTopLevel()
2152     ? !MinBlockId(afunbox->node, lexdep->pn_blockid)
2153     : !lexdep->isBlockChild() ||
2154     !afunbox->node->isBlockChild() ||
2155     !OneBlockId(afunbox->node, lexdep->pn_blockid)) {
2156     break;
2157     }
2158     }
2159     }
2160     }
2161    
2162     break2:
2163     if (nupvars == 0) {
2164     FUN_METER(onlyfreevar);
2165     FUN_SET_KIND(fun, JSFUN_NULL_CLOSURE);
2166     } else if (!ale) {
2167     /*
2168     * We made it all the way through the upvar loop, so it's
2169     * safe to optimize to a flat closure.
2170     */
2171     FUN_METER(flat);
2172     FUN_SET_KIND(fun, JSFUN_FLAT_CLOSURE);
2173     switch (PN_OP(fn)) {
2174     case JSOP_DEFFUN:
2175     fn->pn_op = JSOP_DEFFUN_FC;
2176     break;
2177     case JSOP_DEFLOCALFUN:
2178     fn->pn_op = JSOP_DEFLOCALFUN_FC;
2179     break;
2180     case JSOP_LAMBDA:
2181     fn->pn_op = JSOP_LAMBDA_FC;
2182     break;
2183     default:
2184     /* js_EmitTree's case TOK_FUNCTION: will select op. */
2185     JS_ASSERT(PN_OP(fn) == JSOP_NOP);
2186     }
2187     } else {
2188     FUN_METER(badfunarg);
2189     }
2190     }
2191     }
2192    
2193     if (FUN_KIND(fun) == JSFUN_INTERPRETED) {
2194     if (pn->pn_type != TOK_UPVARS) {
2195     if (parent)
2196     parent->tcflags |= TCF_FUN_HEAVYWEIGHT;
2197     } else {
2198     JSAtomList upvars(pn->pn_names);
2199     JS_ASSERT(upvars.count != 0);
2200    
2201     JSAtomListIterator iter(&upvars);
2202     JSAtomListElement *ale;
2203    
2204     /*
2205     * One or more upvars cannot be safely snapshot into a flat
2206     * closure's dslot (see JSOP_GETDSLOT), so we loop again over
2207     * all upvars, and for each non-free upvar, ensure that its
2208     * containing function has been flagged as heavyweight.
2209     *
2210     * The emitter must see TCF_FUN_HEAVYWEIGHT accurately before
2211     * generating any code for a tree of nested functions.
2212     */
2213     while ((ale = iter()) != NULL) {
2214     JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
2215    
2216     if (!lexdep->isFreeVar()) {
2217     JSFunctionBox *afunbox = funbox->parent;
2218     uintN lexdepLevel = lexdep->frameLevel();
2219    
2220     while (afunbox) {
2221     /*
2222     * NB: afunbox->level is the static level of
2223     * the definition or expression of the function
2224     * parsed into afunbox, not the static level of
2225     * its body. Therefore we must add 1 to match
2226     * lexdep's level to find the afunbox whose
2227     * body contains the lexdep definition.
2228     */
2229     if (afunbox->level + 1U == lexdepLevel ||
2230     (lexdepLevel == 0 && lexdep->isLet())) {
2231     afunbox->tcflags |= TCF_FUN_HEAVYWEIGHT;
2232     break;
2233     }
2234     afunbox = afunbox->parent;
2235     }
2236     if (!afunbox && (tcflags & TCF_IN_FUNCTION))
2237     tcflags |= TCF_FUN_HEAVYWEIGHT;
2238     }
2239     }
2240     }
2241     }
2242    
2243     funbox = funbox->siblings;
2244     if (!funbox)
2245     break;
2246     JS_ASSERT(funbox->parent == parent);
2247     }
2248     #undef FUN_METER
2249     }
2250    
2251     const char js_argument_str[] = "argument";
2252     const char js_variable_str[] = "variable";
2253     const char js_unknown_str[] = "unknown";
2254    
2255     const char *
2256     JSDefinition::kindString(Kind kind)
2257     {
2258     static const char *table[] = {
2259     js_var_str, js_const_str, js_let_str,
2260     js_function_str, js_argument_str, js_unknown_str
2261     };
2262    
2263     JS_ASSERT(unsigned(kind) <= unsigned(ARG));
2264     return table[kind];
2265     }
2266    
2267     static JSFunctionBox *
2268     EnterFunction(JSParseNode *fn, JSTreeContext *tc, JSTreeContext *funtc,
2269     JSAtom *funAtom = NULL, uintN lambda = JSFUN_LAMBDA)
2270     {
2271     JSFunction *fun = tc->compiler->newFunction(tc, funAtom, lambda);
2272     if (!fun)
2273     return NULL;
2274    
2275     /* Create box for fun->object early to protect against last-ditch GC. */
2276     JSFunctionBox *funbox = tc->compiler->newFunctionBox(FUN_OBJECT(fun), fn, tc);
2277     if (!funbox)
2278     return NULL;
2279    
2280     /* Initialize non-default members of funtc. */
2281     funtc->flags |= funbox->tcflags;
2282     funtc->blockidGen = tc->blockidGen;
2283     if (!GenerateBlockId(funtc, funtc->bodyid))
2284     return NULL;
2285     funtc->fun = fun;
2286     funtc->funbox = funbox;
2287     funtc->parent = tc;
2288     if (!SetStaticLevel(funtc, tc->staticLevel + 1))
2289     return NULL;
2290    
2291     return funbox;
2292     }
2293    
2294     static bool
2295     LeaveFunction(JSParseNode *fn, JSTreeContext *funtc, JSTreeContext *tc,
2296     JSAtom *funAtom = NULL, uintN lambda = JSFUN_LAMBDA)
2297     {
2298     tc->blockidGen = funtc->blockidGen;
2299    
2300     fn->pn_funbox->tcflags |= funtc->flags & (TCF_FUN_FLAGS | TCF_COMPILE_N_GO);
2301    
2302     fn->pn_dflags |= PND_INITIALIZED;
2303     JS_ASSERT_IF(tc->atTopLevel() && lambda == 0 && funAtom,
2304     fn->pn_dflags & PND_TOPLEVEL);
2305     if (!tc->topStmt || tc->topStmt->type == STMT_BLOCK)
2306     fn->pn_dflags |= PND_BLOCKCHILD;
2307    
2308     /*
2309     * Propagate unresolved lexical names up to tc->lexdeps, and save a copy
2310     * of funtc->lexdeps in a TOK_UPVARS node wrapping the function's formal
2311     * params and body. We do this only if there are lexical dependencies not
2312     * satisfied by the function's declarations, to avoid penalizing functions
2313     * that use only their arguments and other local bindings.
2314     */
2315     if (funtc->lexdeps.count != 0) {
2316     JSAtomListIterator iter(&funtc->lexdeps);
2317     JSAtomListElement *ale;
2318     int foundCallee = 0;
2319    
2320     while ((ale = iter()) != NULL) {
2321     JSAtom *atom = ALE_ATOM(ale);
2322     JSDefinition *dn = ALE_DEFN(ale);
2323     JS_ASSERT(dn->isPlaceholder());
2324    
2325     if (atom == funAtom && lambda != 0) {
2326     dn->pn_op = JSOP_CALLEE;
2327     dn->pn_cookie = MAKE_UPVAR_COOKIE(funtc->staticLevel, CALLEE_UPVAR_SLOT);
2328     dn->pn_dflags |= PND_BOUND;
2329    
2330     /*
2331     * If this named function expression uses its own name other
2332     * than to call itself, flag this function as using arguments,
2333     * as if it had used arguments.callee instead of its own name.
2334     *
2335     * This abuses the plain sense of TCF_FUN_USES_ARGUMENTS, but
2336     * we are out of tcflags bits at the moment. If it deoptimizes
2337     * code unfairly (see JSCompiler::setFunctionKinds, where this
2338     * flag is interpreted in its broader sense, not only to mean
2339     * "this function might leak arguments.callee"), we can perhaps
2340     * try to work harder to add a TCF_FUN_LEAKS_ITSELF flag and
2341     * use that more precisely, both here and for unnamed function
2342     * expressions.
2343     */
2344     if (dn->isFunArg())
2345     fn->pn_funbox->tcflags |= TCF_FUN_USES_ARGUMENTS;
2346     foundCallee = 1;
2347     continue;
2348     }
2349    
2350     if (!(fn->pn_funbox->tcflags & TCF_FUN_SETS_OUTER_NAME) &&
2351     dn->isAssigned()) {
2352     /*
2353     * Make sure we do not fail to set TCF_FUN_SETS_OUTER_NAME if
2354     * any use of dn in funtc assigns. See NoteLValue for the easy
2355     * backward-reference case; this is the hard forward-reference
2356     * case where we pay a higher price.
2357     */
2358     for (JSParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
2359     if (pnu->isAssigned() && pnu->pn_blockid >= funtc->bodyid) {
2360     fn->pn_funbox->tcflags |= TCF_FUN_SETS_OUTER_NAME;
2361     break;
2362     }
2363     }
2364     }
2365    
2366     JSAtomListElement *outer_ale = tc->decls.lookup(atom);
2367     if (!outer_ale)
2368     outer_ale = tc->lexdeps.lookup(atom);
2369     if (outer_ale) {
2370     /*
2371     * Insert dn's uses list at the front of outer_dn's list.
2372     *
2373     * Without loss of generality or correctness, we allow a dn to
2374     * be in inner and outer lexdeps, since the purpose of lexdeps
2375     * is one-pass coordination of name use and definition across
2376     * functions, and if different dn's are used we'll merge lists
2377     * when leaving the inner function.
2378     *
2379     * The dn == outer_dn case arises with generator expressions
2380     * (see CompExprTransplanter::transplant, the PN_FUNC/PN_NAME
2381     * case), and nowhere else, currently.
2382     */
2383     JSDefinition *outer_dn = ALE_DEFN(outer_ale);
2384    
2385     if (dn != outer_dn) {
2386     JSParseNode **pnup = &dn->dn_uses;
2387     JSParseNode *pnu;
2388    
2389     while ((pnu = *pnup) != NULL) {
2390     pnu->pn_lexdef = outer_dn;
2391     pnup = &pnu->pn_link;
2392     }
2393    
2394     /*
2395     * Make dn be a use that redirects to outer_dn, because we
2396     * can't replace dn with outer_dn in all the pn_namesets in
2397     * the AST where it may be. Instead we make it forward to
2398     * outer_dn. See JSDefinition::resolve.
2399     */
2400     *pnup = outer_dn->dn_uses;
2401     outer_dn->dn_uses = dn;
2402     outer_dn->pn_dflags |= dn->pn_dflags & ~PND_PLACEHOLDER;
2403     dn->pn_defn = false;
2404     dn->pn_used = true;
2405     dn->pn_lexdef = outer_dn;
2406     }
2407     } else {
2408     /* Add an outer lexical dependency for ale's definition. */
2409     outer_ale = tc->lexdeps.add(tc->compiler, atom);
2410     if (!outer_ale)
2411     return false;
2412     ALE_SET_DEFN(outer_ale, ALE_DEFN(ale));
2413     }
2414     }
2415    
2416     if (funtc->lexdeps.count - foundCallee != 0) {
2417     JSParseNode *body = fn->pn_body;
2418    
2419     fn->pn_body = NewParseNode(PN_NAMESET, tc);
2420     if (!fn->pn_body)
2421     return false;
2422    
2423     fn->pn_body->pn_type = TOK_UPVARS;
2424     fn->pn_body->pn_pos = body->pn_pos;
2425     if (foundCallee)
2426     funtc->lexdeps.remove(tc->compiler, funAtom);
2427     fn->pn_body->pn_names = funtc->lexdeps;
2428     fn->pn_body->pn_tree = body;
2429     }
2430    
2431     funtc->lexdeps.clear();
2432     }
2433    
2434     return true;
2435     }
2436    
2437 siliconforks 332 static JSParseNode *
2438     FunctionDef(JSContext *cx, JSTokenStream *ts, JSTreeContext *tc,
2439     uintN lambda)
2440     {
2441 siliconforks 460 JSOp op;
2442 siliconforks 332 JSParseNode *pn, *body, *result;
2443     JSTokenType tt;
2444     JSAtom *funAtom;
2445     JSAtomListElement *ale;
2446     #if JS_HAS_DESTRUCTURING
2447     JSParseNode *item, *list = NULL;
2448 siliconforks 460 bool destructuringArg = false, duplicatedArg = false;
2449 siliconforks 332 #endif
2450    
2451     /* Make a TOK_FUNCTION node. */
2452     #if JS_HAS_GETTER_SETTER
2453     op = CURRENT_TOKEN(ts).t_op;
2454     #endif
2455 siliconforks 460 pn = NewParseNode(PN_FUNC, tc);
2456 siliconforks 332 if (!pn)
2457     return NULL;
2458 siliconforks 460 pn->pn_body = NULL;
2459     pn->pn_cookie = FREE_UPVAR_COOKIE;
2460 siliconforks 332
2461 siliconforks 460 /*
2462     * If a lambda, give up on JSOP_{GET,CALL}UPVAR usage unless this function
2463     * is immediately applied (we clear PND_FUNARG if so -- see MemberExpr).
2464     *
2465     * Also treat function sub-statements (non-lambda, non-top-level functions)
2466     * as escaping funargs, since we can't statically analyze their definitions
2467     * and uses.
2468     */
2469     bool topLevel = tc->atTopLevel();
2470     pn->pn_dflags = (lambda || !topLevel) ? PND_FUNARG : 0;
2471    
2472 siliconforks 332 /* Scan the optional function name into funAtom. */
2473     ts->flags |= TSF_KEYWORD_IS_NAME;
2474     tt = js_GetToken(cx, ts);
2475     ts->flags &= ~TSF_KEYWORD_IS_NAME;
2476     if (tt == TOK_NAME) {
2477     funAtom = CURRENT_TOKEN(ts).t_atom;
2478     } else {
2479     if (lambda == 0 && (cx->options & JSOPTION_ANONFUNFIX)) {
2480     js_ReportCompileErrorNumber(cx, ts, NULL, JSREPORT_ERROR,
2481     JSMSG_SYNTAX_ERROR);
2482     return NULL;
2483     }
2484     funAtom = NULL;
2485     js_UngetToken(ts);
2486     }
2487    
2488     /*
2489     * Record names for function statements in tc->decls so we know when to
2490     * avoid optimizing variable references that might name a function.
2491     */
2492     if (lambda == 0 && funAtom) {
2493 siliconforks 460 ale = tc->decls.lookup(funAtom);
2494 siliconforks 332 if (ale) {
2495 siliconforks 460 JSDefinition *dn = ALE_DEFN(ale);
2496     JSDefinition::Kind dn_kind = dn->kind();
2497    
2498     JS_ASSERT(!dn->pn_used);
2499     JS_ASSERT(dn->pn_defn);
2500    
2501     if (JS_HAS_STRICT_OPTION(cx) || dn_kind == JSDefinition::CONST) {
2502 siliconforks 332 const char *name = js_AtomToPrintableString(cx, funAtom);
2503     if (!name ||
2504     !js_ReportCompileErrorNumber(cx, ts, NULL,
2505 siliconforks 460 (dn_kind != JSDefinition::CONST)
2506     ? JSREPORT_WARNING | JSREPORT_STRICT
2507 siliconforks 332 : JSREPORT_ERROR,
2508     JSMSG_REDECLARED_VAR,
2509 siliconforks 460 JSDefinition::kindString(dn_kind),
2510 siliconforks 332 name)) {
2511     return NULL;
2512     }
2513     }
2514 siliconforks 460
2515     if (topLevel) {
2516     ALE_SET_DEFN(ale, pn);
2517     pn->pn_defn = true;
2518     pn->dn_uses = dn; /* dn->dn_uses is now pn_link */
2519    
2520     if (!MakeDefIntoUse(dn, pn, funAtom, tc))
2521     return NULL;
2522     }
2523     } else if (topLevel) {
2524     /*
2525     * If this function was used before it was defined, claim the
2526     * pre-created definition node for this function that PrimaryExpr
2527     * put in tc->lexdeps on first forward reference, and recycle pn.
2528     */
2529     JSHashEntry **hep;
2530    
2531     ale = tc->lexdeps.rawLookup(funAtom, hep);
2532     if (ale) {
2533     JSDefinition *fn = ALE_DEFN(ale);
2534    
2535     JS_ASSERT(fn->pn_defn);
2536     fn->pn_type = TOK_FUNCTION;
2537     fn->pn_arity = PN_FUNC;
2538     fn->pn_pos.begin = pn->pn_pos.begin;
2539     fn->pn_body = NULL;
2540     fn->pn_cookie = FREE_UPVAR_COOKIE;
2541    
2542     tc->lexdeps.rawRemove(tc->compiler, ale, hep);
2543     RecycleTree(pn, tc);
2544     pn = fn;
2545     }
2546    
2547     if (!Define(pn, funAtom, tc))
2548 siliconforks 332 return NULL;
2549     }
2550    
2551     /*
2552     * A function nested at top level inside another's body needs only a
2553     * local variable to bind its name to its value, and not an activation
2554     * object property (it might also need the activation property, if the
2555     * outer function contains with statements, e.g., but the stack slot
2556     * wins when jsemit.c's BindNameToSlot can optimize a JSOP_NAME into a
2557     * JSOP_GETLOCAL bytecode).
2558     */
2559 siliconforks 460 if (topLevel) {
2560     pn->pn_dflags |= PND_TOPLEVEL;
2561 siliconforks 332
2562 siliconforks 460 if (tc->flags & TCF_IN_FUNCTION) {
2563     JSLocalKind localKind;
2564     uintN index;
2565    
2566     /*
2567     * Define a local in the outer function so that BindNameToSlot
2568     * can properly optimize accesses. Note that we need a local
2569     * variable, not an argument, for the function statement. Thus
2570     * we add a variable even if a parameter with the given name
2571     * already exists.
2572     */
2573     localKind = js_LookupLocal(cx, tc->fun, funAtom, &index);
2574     switch (localKind) {
2575     case JSLOCAL_NONE:
2576     case JSLOCAL_ARG:
2577     index = tc->fun->u.i.nvars;
2578     if (!js_AddLocal<