1 |
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
2 |
* vim: set ts=8 sw=4 et tw=78: |
* vim: set ts=8 sw=4 et tw=78: |
3 |
* |
* |
4 |
* ***** BEGIN LICENSE BLOCK ***** |
* ***** BEGIN LICENSE BLOCK ***** |
56 |
#include "jspubtd.h" |
#include "jspubtd.h" |
57 |
#include "jsregexp.h" |
#include "jsregexp.h" |
58 |
#include "jsutil.h" |
#include "jsutil.h" |
59 |
|
#include "jsarray.h" |
60 |
JS_BEGIN_EXTERN_C |
#include "jstask.h" |
61 |
|
|
62 |
/* |
/* |
63 |
* js_GetSrcNote cache to avoid O(n^2) growth in finding a source note for a |
* js_GetSrcNote cache to avoid O(n^2) growth in finding a source note for a |
93 |
|
|
94 |
#ifdef __cplusplus |
#ifdef __cplusplus |
95 |
namespace nanojit { |
namespace nanojit { |
96 |
|
class Assembler; |
97 |
|
class CodeAlloc; |
98 |
class Fragment; |
class Fragment; |
|
class Fragmento; |
|
99 |
class LirBuffer; |
class LirBuffer; |
100 |
|
#ifdef DEBUG |
101 |
|
class LabelMap; |
102 |
|
#endif |
103 |
|
extern "C++" { |
104 |
|
template<typename K> class DefaultHash; |
105 |
|
template<typename K, typename V, typename H> class HashMap; |
106 |
|
template<typename T> class Seq; |
107 |
|
} |
108 |
} |
} |
109 |
|
#if defined(JS_JIT_SPEW) || defined(DEBUG) |
110 |
|
struct FragPI; |
111 |
|
typedef nanojit::HashMap<uint32, FragPI, nanojit::DefaultHash<uint32> > FragStatsMap; |
112 |
|
#endif |
113 |
class TraceRecorder; |
class TraceRecorder; |
114 |
|
class VMAllocator; |
115 |
extern "C++" { template<typename T> class Queue; } |
extern "C++" { template<typename T> class Queue; } |
116 |
typedef Queue<uint16> SlotList; |
typedef Queue<uint16> SlotList; |
117 |
|
|
123 |
#define FRAGMENT_TABLE_SIZE 512 |
#define FRAGMENT_TABLE_SIZE 512 |
124 |
struct VMFragment; |
struct VMFragment; |
125 |
|
|
126 |
|
#ifdef __cplusplus |
127 |
|
struct REHashKey; |
128 |
|
struct REHashFn; |
129 |
|
typedef nanojit::HashMap<REHashKey, nanojit::Fragment*, REHashFn> REHashMap; |
130 |
|
#endif |
131 |
|
|
132 |
#define MONITOR_N_GLOBAL_STATES 4 |
#define MONITOR_N_GLOBAL_STATES 4 |
133 |
struct GlobalState { |
struct GlobalState { |
134 |
JSObject* globalObj; |
JSObject* globalObj; |
148 |
* last-ditch GC and suppress calls to JS_ReportOutOfMemory. |
* last-ditch GC and suppress calls to JS_ReportOutOfMemory. |
149 |
* |
* |
150 |
* !tracecx && !recorder: not on trace |
* !tracecx && !recorder: not on trace |
151 |
* !tracecx && !recorder && prohibitFlush: deep-bailed |
* !tracecx && recorder: recording |
|
* !tracecx && recorder && !recorder->deepAborted: recording |
|
|
* !tracecx && recorder && recorder->deepAborted: deep aborted |
|
152 |
* tracecx && !recorder: executing a trace |
* tracecx && !recorder: executing a trace |
153 |
* tracecx && recorder: executing inner loop, recording outer loop |
* tracecx && recorder: executing inner loop, recording outer loop |
154 |
*/ |
*/ |
155 |
JSContext *tracecx; |
JSContext *tracecx; |
156 |
|
|
157 |
|
CLS(VMAllocator) dataAlloc; /* A chunk allocator for LIR. */ |
158 |
|
CLS(VMAllocator) tempAlloc; /* A temporary chunk allocator. */ |
159 |
|
CLS(nanojit::CodeAlloc) codeAlloc; /* An allocator for native code. */ |
160 |
|
CLS(nanojit::Assembler) assembler; |
161 |
CLS(nanojit::LirBuffer) lirbuf; |
CLS(nanojit::LirBuffer) lirbuf; |
162 |
CLS(nanojit::Fragmento) fragmento; |
CLS(nanojit::LirBuffer) reLirBuf; |
163 |
|
#ifdef DEBUG |
164 |
|
CLS(nanojit::LabelMap) labels; |
165 |
|
#endif |
166 |
|
|
167 |
CLS(TraceRecorder) recorder; |
CLS(TraceRecorder) recorder; |
168 |
jsval *reservedDoublePool; |
jsval *reservedDoublePool; |
169 |
jsval *reservedDoublePoolPtr; |
jsval *reservedDoublePoolPtr; |
182 |
* If nonzero, do not flush the JIT cache after a deep bail. That would |
* If nonzero, do not flush the JIT cache after a deep bail. That would |
183 |
* free JITted code pages that we will later return to. Instead, set the |
* free JITted code pages that we will later return to. Instead, set the |
184 |
* needFlush flag so that it can be flushed later. |
* needFlush flag so that it can be flushed later. |
|
* |
|
|
* NB: needFlush and useReservedObjects are packed together. |
|
185 |
*/ |
*/ |
186 |
uintN prohibitFlush; |
JSBool needFlush; |
|
JSPackedBool needFlush; |
|
187 |
|
|
188 |
/* |
/* |
189 |
* reservedObjects is a linked list (via fslots[0]) of preallocated JSObjects. |
* reservedObjects is a linked list (via fslots[0]) of preallocated JSObjects. |
190 |
* The JIT uses this to ensure that leaving a trace tree can't fail. |
* The JIT uses this to ensure that leaving a trace tree can't fail. |
191 |
*/ |
*/ |
192 |
JSPackedBool useReservedObjects; |
JSBool useReservedObjects; |
193 |
JSObject *reservedObjects; |
JSObject *reservedObjects; |
194 |
|
|
195 |
/* Fragmento for the regular expression compiler. This is logically |
/* |
196 |
* a distinct compiler but needs to be managed in exactly the same |
* Fragment map for the regular expression compiler. |
197 |
* way as the real tracing Fragmento. */ |
*/ |
198 |
CLS(nanojit::LirBuffer) reLirBuf; |
CLS(REHashMap) reFragments; |
199 |
CLS(nanojit::Fragmento) reFragmento; |
|
200 |
|
/* |
201 |
|
* A temporary allocator for RE recording. |
202 |
|
*/ |
203 |
|
CLS(VMAllocator) reTempAlloc; |
204 |
|
|
205 |
|
#ifdef __cplusplus /* Allow inclusion from LiveConnect C files. */ |
206 |
|
|
207 |
|
#ifdef DEBUG |
208 |
|
/* Fields needed for fragment/guard profiling. */ |
209 |
|
CLS(nanojit::Seq<nanojit::Fragment*>) branches; |
210 |
|
uint32 lastFragID; |
211 |
|
/* |
212 |
|
* profAlloc has a lifetime which spans exactly from js_InitJIT to |
213 |
|
* js_FinishJIT. |
214 |
|
*/ |
215 |
|
CLS(VMAllocator) profAlloc; |
216 |
|
CLS(FragStatsMap) profTab; |
217 |
|
#endif |
218 |
|
|
219 |
|
/* Flush the JIT cache. */ |
220 |
|
void flush(); |
221 |
|
|
222 |
/* Keep a list of recorders we need to abort on cache flush. */ |
/* Mark all objects baked into native code in the code cache. */ |
223 |
CLS(TraceRecorder) abortStack; |
void mark(JSTracer *trc); |
224 |
|
|
225 |
|
#endif |
226 |
}; |
}; |
227 |
|
|
228 |
typedef struct InterpStruct InterpStruct; |
typedef struct InterpStruct InterpStruct; |
284 |
/* Property cache for faster call/get/set invocation. */ |
/* Property cache for faster call/get/set invocation. */ |
285 |
JSPropertyCache propertyCache; |
JSPropertyCache propertyCache; |
286 |
|
|
287 |
|
/* Random number generator state, used by jsmath.cpp. */ |
288 |
|
int64 rngSeed; |
289 |
|
|
290 |
#ifdef JS_TRACER |
#ifdef JS_TRACER |
291 |
/* Trace-tree JIT recorder/interpreter state. */ |
/* Trace-tree JIT recorder/interpreter state. */ |
292 |
JSTraceMonitor traceMonitor; |
JSTraceMonitor traceMonitor; |
298 |
#ifdef JS_EVAL_CACHE_METERING |
#ifdef JS_EVAL_CACHE_METERING |
299 |
JSEvalCacheMeter evalCacheMeter; |
JSEvalCacheMeter evalCacheMeter; |
300 |
#endif |
#endif |
301 |
|
|
302 |
|
/* |
303 |
|
* Thread-local version of JSRuntime.gcMallocBytes to avoid taking |
304 |
|
* locks on each JS_malloc. |
305 |
|
*/ |
306 |
|
size_t gcMallocBytes; |
307 |
|
|
308 |
|
#ifdef __cplusplus /* Allow inclusion from LiveConnect C files. */ |
309 |
|
|
310 |
|
/* |
311 |
|
* Cache of reusable JSNativeEnumerators mapped by shape identifiers (as |
312 |
|
* stored in scope->shape). This cache is nulled by the GC and protected |
313 |
|
* by gcLock. |
314 |
|
*/ |
315 |
|
#define NATIVE_ENUM_CACHE_LOG2 8 |
316 |
|
#define NATIVE_ENUM_CACHE_MASK JS_BITMASK(NATIVE_ENUM_CACHE_LOG2) |
317 |
|
#define NATIVE_ENUM_CACHE_SIZE JS_BIT(NATIVE_ENUM_CACHE_LOG2) |
318 |
|
|
319 |
|
#define NATIVE_ENUM_CACHE_HASH(shape) \ |
320 |
|
((((shape) >> NATIVE_ENUM_CACHE_LOG2) ^ (shape)) & NATIVE_ENUM_CACHE_MASK) |
321 |
|
|
322 |
|
jsuword nativeEnumCache[NATIVE_ENUM_CACHE_SIZE]; |
323 |
|
|
324 |
|
#ifdef JS_THREADSAFE |
325 |
|
/* |
326 |
|
* Deallocator task for this thread. |
327 |
|
*/ |
328 |
|
JSFreePointerListTask *deallocatorTask; |
329 |
|
#endif |
330 |
|
|
331 |
|
void mark(JSTracer *trc) { |
332 |
|
#ifdef JS_TRACER |
333 |
|
traceMonitor.mark(trc); |
334 |
|
#endif |
335 |
|
} |
336 |
|
|
337 |
|
#endif /* __cplusplus */ |
338 |
}; |
}; |
339 |
|
|
340 |
#ifdef JS_THREADSAFE |
#ifdef JS_THREADSAFE |
350 |
/* Opaque thread-id, from NSPR's PR_GetCurrentThread(). */ |
/* Opaque thread-id, from NSPR's PR_GetCurrentThread(). */ |
351 |
jsword id; |
jsword id; |
352 |
|
|
|
/* |
|
|
* Thread-local version of JSRuntime.gcMallocBytes to avoid taking |
|
|
* locks on each JS_malloc. |
|
|
*/ |
|
|
uint32 gcMallocBytes; |
|
|
|
|
353 |
/* Indicates that the thread is waiting in ClaimTitle from jslock.cpp. */ |
/* Indicates that the thread is waiting in ClaimTitle from jslock.cpp. */ |
354 |
JSTitle *titleToShare; |
JSTitle *titleToShare; |
355 |
|
|
356 |
|
JSGCThing *gcFreeLists[GC_NUM_FREELISTS]; |
357 |
|
|
358 |
/* Factored out of JSThread for !JS_THREADSAFE embedding in JSRuntime. */ |
/* Factored out of JSThread for !JS_THREADSAFE embedding in JSRuntime. */ |
359 |
JSThreadData data; |
JSThreadData data; |
360 |
}; |
}; |
366 |
JSThread *thread; |
JSThread *thread; |
367 |
}; |
}; |
368 |
|
|
369 |
|
extern JSThread * |
370 |
|
js_CurrentThread(JSRuntime *rt); |
371 |
|
|
372 |
/* |
/* |
373 |
* The function takes the GC lock and does not release in successful return. |
* The function takes the GC lock and does not release in successful return. |
374 |
* On error (out of memory) the function releases the lock but delegates |
* On error (out of memory) the function releases the lock but delegates |
402 |
typedef enum JSBuiltinFunctionId { |
typedef enum JSBuiltinFunctionId { |
403 |
JSBUILTIN_ObjectToIterator, |
JSBUILTIN_ObjectToIterator, |
404 |
JSBUILTIN_CallIteratorNext, |
JSBUILTIN_CallIteratorNext, |
|
JSBUILTIN_GetProperty, |
|
|
JSBUILTIN_GetElement, |
|
|
JSBUILTIN_SetProperty, |
|
|
JSBUILTIN_SetElement, |
|
405 |
JSBUILTIN_LIMIT |
JSBUILTIN_LIMIT |
406 |
} JSBuiltinFunctionId; |
} JSBuiltinFunctionId; |
407 |
|
|
445 |
JSGCChunkInfo *gcChunkList; |
JSGCChunkInfo *gcChunkList; |
446 |
JSGCArenaList gcArenaList[GC_NUM_FREELISTS]; |
JSGCArenaList gcArenaList[GC_NUM_FREELISTS]; |
447 |
JSGCDoubleArenaList gcDoubleArenaList; |
JSGCDoubleArenaList gcDoubleArenaList; |
|
JSGCFreeListSet *gcFreeListsPool; |
|
448 |
JSDHashTable gcRootsHash; |
JSDHashTable gcRootsHash; |
449 |
JSDHashTable *gcLocksHash; |
JSDHashTable *gcLocksHash; |
450 |
jsrefcount gcKeepAtoms; |
jsrefcount gcKeepAtoms; |
451 |
uint32 gcBytes; |
size_t gcBytes; |
452 |
uint32 gcLastBytes; |
size_t gcLastBytes; |
453 |
uint32 gcMaxBytes; |
size_t gcMaxBytes; |
454 |
uint32 gcMaxMallocBytes; |
size_t gcMaxMallocBytes; |
455 |
uint32 gcEmptyArenaPoolLifespan; |
uint32 gcEmptyArenaPoolLifespan; |
456 |
uint32 gcLevel; |
uint32 gcLevel; |
457 |
uint32 gcNumber; |
uint32 gcNumber; |
458 |
JSTracer *gcMarkingTracer; |
JSTracer *gcMarkingTracer; |
459 |
uint32 gcTriggerFactor; |
uint32 gcTriggerFactor; |
460 |
|
size_t gcTriggerBytes; |
461 |
volatile JSBool gcIsNeeded; |
volatile JSBool gcIsNeeded; |
462 |
|
volatile JSBool gcFlushCodeCaches; |
463 |
|
|
464 |
/* |
/* |
465 |
* NB: do not pack another flag here by claiming gcPadding unless the new |
* NB: do not pack another flag here by claiming gcPadding unless the new |
469 |
*/ |
*/ |
470 |
JSPackedBool gcPoke; |
JSPackedBool gcPoke; |
471 |
JSPackedBool gcRunning; |
JSPackedBool gcRunning; |
472 |
uint16 gcPadding; |
JSPackedBool gcRegenShapes; |
473 |
|
|
474 |
|
/* |
475 |
|
* During gc, if rt->gcRegenShapes && |
476 |
|
* (scope->flags & JSScope::SHAPE_REGEN) == rt->gcRegenShapesScopeFlag, |
477 |
|
* then the scope's shape has already been regenerated during this GC. |
478 |
|
* To avoid having to sweep JSScopes, the bit's meaning toggles with each |
479 |
|
* shape-regenerating GC. |
480 |
|
* |
481 |
|
* FIXME Once scopes are GC'd (bug 505004), this will be obsolete. |
482 |
|
*/ |
483 |
|
uint8 gcRegenShapesScopeFlag; |
484 |
|
|
485 |
#ifdef JS_GC_ZEAL |
#ifdef JS_GC_ZEAL |
486 |
jsrefcount gcZeal; |
jsrefcount gcZeal; |
487 |
#endif |
#endif |
488 |
|
|
489 |
JSGCCallback gcCallback; |
JSGCCallback gcCallback; |
490 |
uint32 gcMallocBytes; |
size_t gcMallocBytes; |
491 |
JSGCArenaInfo *gcUntracedArenaStackTop; |
JSGCArenaInfo *gcUntracedArenaStackTop; |
492 |
#ifdef DEBUG |
#ifdef DEBUG |
493 |
size_t gcTraceLaterCount; |
size_t gcTraceLaterCount; |
514 |
*/ |
*/ |
515 |
JSSetSlotRequest *setSlotRequests; |
JSSetSlotRequest *setSlotRequests; |
516 |
|
|
|
/* Random number generator state, used by jsmath.c. */ |
|
|
JSBool rngInitialized; |
|
|
int64 rngMultiplier; |
|
|
int64 rngAddend; |
|
|
int64 rngMask; |
|
|
int64 rngSeed; |
|
|
jsdouble rngDscale; |
|
|
|
|
517 |
/* Well-known numbers held for use by this runtime's contexts. */ |
/* Well-known numbers held for use by this runtime's contexts. */ |
518 |
jsdouble *jsNaN; |
jsdouble *jsNaN; |
519 |
jsdouble *jsNegativeInfinity; |
jsdouble *jsNegativeInfinity; |
527 |
uint32 deflatedStringCacheBytes; |
uint32 deflatedStringCacheBytes; |
528 |
#endif |
#endif |
529 |
|
|
|
/* |
|
|
* Empty and unit-length strings held for use by this runtime's contexts. |
|
|
* The unitStrings array and its elements are created on demand. |
|
|
*/ |
|
530 |
JSString *emptyString; |
JSString *emptyString; |
|
JSString **unitStrings; |
|
531 |
|
|
532 |
/* |
/* |
533 |
* Builtin functions, lazily created and held for use by the trace recorder. |
* Builtin functions, lazily created and held for use by the trace recorder. |
543 |
/* Per runtime debug hooks -- see jsprvtd.h and jsdbgapi.h. */ |
/* Per runtime debug hooks -- see jsprvtd.h and jsdbgapi.h. */ |
544 |
JSDebugHooks globalDebugHooks; |
JSDebugHooks globalDebugHooks; |
545 |
|
|
546 |
|
#ifdef JS_TRACER |
547 |
|
/* True if any debug hooks not supported by the JIT are enabled. */ |
548 |
|
bool debuggerInhibitsJIT() const { |
549 |
|
return (globalDebugHooks.interruptHandler || |
550 |
|
globalDebugHooks.callHook || |
551 |
|
globalDebugHooks.objectHook); |
552 |
|
} |
553 |
|
#endif |
554 |
|
|
555 |
/* More debugging state, see jsdbgapi.c. */ |
/* More debugging state, see jsdbgapi.c. */ |
556 |
JSCList trapList; |
JSCList trapList; |
557 |
JSCList watchPointList; |
JSCList watchPointList; |
618 |
|
|
619 |
/* |
/* |
620 |
* Shared scope property tree, and arena-pool for allocating its nodes. |
* Shared scope property tree, and arena-pool for allocating its nodes. |
621 |
* The propertyRemovals counter is incremented for every js_ClearScope, |
* The propertyRemovals counter is incremented for every JSScope::clear, |
622 |
* and for each js_RemoveScopeProperty that frees a slot in an object. |
* and for each JSScope::remove method call that frees a slot in an object. |
623 |
* See js_NativeGet and js_NativeSet in jsobj.c. |
* See js_NativeGet and js_NativeSet in jsobj.c. |
624 |
*/ |
*/ |
625 |
JSDHashTable propertyTreeHash; |
JSDHashTable propertyTreeHash; |
649 |
JSObject *anynameObject; |
JSObject *anynameObject; |
650 |
JSObject *functionNamespaceObject; |
JSObject *functionNamespaceObject; |
651 |
|
|
|
/* |
|
|
* A helper list for the GC, so it can mark native iterator states. See |
|
|
* js_TraceNativeEnumerators for details. |
|
|
*/ |
|
|
JSNativeEnumerator *nativeEnumerators; |
|
|
|
|
652 |
#ifndef JS_THREADSAFE |
#ifndef JS_THREADSAFE |
653 |
JSThreadData threadData; |
JSThreadData threadData; |
654 |
|
|
674 |
JSAtomState atomState; |
JSAtomState atomState; |
675 |
|
|
676 |
/* |
/* |
|
* Cache of reusable JSNativeEnumerators mapped by shape identifiers (as |
|
|
* stored in scope->shape). This cache is nulled by the GC and protected |
|
|
* by gcLock. |
|
|
*/ |
|
|
#define NATIVE_ENUM_CACHE_LOG2 8 |
|
|
#define NATIVE_ENUM_CACHE_MASK JS_BITMASK(NATIVE_ENUM_CACHE_LOG2) |
|
|
#define NATIVE_ENUM_CACHE_SIZE JS_BIT(NATIVE_ENUM_CACHE_LOG2) |
|
|
|
|
|
#define NATIVE_ENUM_CACHE_HASH(shape) \ |
|
|
((((shape) >> NATIVE_ENUM_CACHE_LOG2) ^ (shape)) & NATIVE_ENUM_CACHE_MASK) |
|
|
|
|
|
jsuword nativeEnumCache[NATIVE_ENUM_CACHE_SIZE]; |
|
|
|
|
|
/* |
|
677 |
* Various metering fields are defined at the end of JSRuntime. In this |
* Various metering fields are defined at the end of JSRuntime. In this |
678 |
* way there is no need to recompile all the code that refers to other |
* way there is no need to recompile all the code that refers to other |
679 |
* fields of JSRuntime after enabling the corresponding metering macro. |
* fields of JSRuntime after enabling the corresponding metering macro. |
751 |
JSFunctionMeter functionMeter; |
JSFunctionMeter functionMeter; |
752 |
char lastScriptFilename[1024]; |
char lastScriptFilename[1024]; |
753 |
#endif |
#endif |
754 |
|
|
755 |
|
#ifdef __cplusplus /* Allow inclusion from LiveConnect C files. */ |
756 |
|
|
757 |
|
void setGCTriggerFactor(uint32 factor); |
758 |
|
void setGCLastBytes(size_t lastBytes); |
759 |
|
|
760 |
|
inline void* malloc(size_t bytes) { |
761 |
|
return ::js_malloc(bytes); |
762 |
|
} |
763 |
|
|
764 |
|
inline void* calloc(size_t bytes) { |
765 |
|
return ::js_calloc(bytes); |
766 |
|
} |
767 |
|
|
768 |
|
inline void* realloc(void* p, size_t bytes) { |
769 |
|
return ::js_realloc(p, bytes); |
770 |
|
} |
771 |
|
|
772 |
|
inline void free(void* p) { |
773 |
|
::js_free(p); |
774 |
|
} |
775 |
|
|
776 |
|
#ifdef JS_THREADSAFE |
777 |
|
JSBackgroundThread *deallocatorThread; |
778 |
|
#endif |
779 |
|
|
780 |
|
#endif /* __cplusplus */ |
781 |
}; |
}; |
782 |
|
|
783 |
/* Common macros to access thread-local caches in JSThread or JSRuntime. */ |
/* Common macros to access thread-local caches in JSThread or JSRuntime. */ |
880 |
* the following constants: |
* the following constants: |
881 |
*/ |
*/ |
882 |
#define JSTVU_SINGLE (-1) /* u.value or u.<gcthing> is single jsval |
#define JSTVU_SINGLE (-1) /* u.value or u.<gcthing> is single jsval |
883 |
or GC-thing */ |
or non-JSString GC-thing pointer */ |
884 |
#define JSTVU_TRACE (-2) /* u.trace is a hook to trace a custom |
#define JSTVU_TRACE (-2) /* u.trace is a hook to trace a custom |
885 |
* structure */ |
* structure */ |
886 |
#define JSTVU_SPROP (-3) /* u.sprop roots property tree node */ |
#define JSTVU_SPROP (-3) /* u.sprop roots property tree node */ |
887 |
#define JSTVU_WEAK_ROOTS (-4) /* u.weakRoots points to saved weak roots */ |
#define JSTVU_WEAK_ROOTS (-4) /* u.weakRoots points to saved weak roots */ |
888 |
#define JSTVU_COMPILER (-5) /* u.compiler roots JSCompiler* */ |
#define JSTVU_COMPILER (-5) /* u.compiler roots JSCompiler* */ |
889 |
#define JSTVU_SCRIPT (-6) /* u.script roots JSScript* */ |
#define JSTVU_SCRIPT (-6) /* u.script roots JSScript* */ |
890 |
|
#define JSTVU_ENUMERATOR (-7) /* a pointer to JSTempValueRooter points |
891 |
|
to an instance of JSAutoEnumStateRooter |
892 |
|
with u.object storing the enumeration |
893 |
|
object */ |
894 |
|
|
895 |
/* |
/* |
896 |
* Here single JSTVU_SINGLE covers both jsval and pointers to any GC-thing via |
* Here single JSTVU_SINGLE covers both jsval and pointers to almost (see note |
897 |
* reinterpreting the thing as JSVAL_OBJECT. It works because the GC-thing is |
* below) any GC-thing via reinterpreting the thing as JSVAL_OBJECT. This works |
898 |
* aligned on a 0 mod 8 boundary, and object has the 0 jsval tag. So any |
* because the GC-thing is aligned on a 0 mod 8 boundary, and object has the 0 |
899 |
* GC-thing may be tagged as if it were an object and untagged, if it's then |
* jsval tag. So any GC-heap-allocated thing pointer may be tagged as if it |
900 |
* used only as an opaque pointer until discriminated by other means than tag |
* were an object and untagged, if it's then used only as an opaque pointer |
901 |
* bits. This is how, for example, js_GetGCThingTraceKind uses its |thing| |
* until discriminated by other means than tag bits. This is how, for example, |
902 |
* parameter -- it consults GC-thing flags stored separately from the thing to |
* js_GetGCThingTraceKind uses its |thing| parameter -- it consults GC-thing |
903 |
* decide the kind of thing. |
* flags stored separately from the thing to decide the kind of thing. |
904 |
|
* |
905 |
|
* Note well that JSStrings may be statically allocated (see the intStringTable |
906 |
|
* and unitStringTable static arrays), so this hack does not work for arbitrary |
907 |
|
* GC-thing pointers. |
908 |
*/ |
*/ |
909 |
#define JS_PUSH_TEMP_ROOT_COMMON(cx,x,tvr,cnt,kind) \ |
#define JS_PUSH_TEMP_ROOT_COMMON(cx,x,tvr,cnt,kind) \ |
910 |
JS_BEGIN_MACRO \ |
JS_BEGIN_MACRO \ |
934 |
JS_PUSH_TEMP_ROOT_COMMON(cx, obj, tvr, JSTVU_SINGLE, object) |
JS_PUSH_TEMP_ROOT_COMMON(cx, obj, tvr, JSTVU_SINGLE, object) |
935 |
|
|
936 |
#define JS_PUSH_TEMP_ROOT_STRING(cx,str,tvr) \ |
#define JS_PUSH_TEMP_ROOT_STRING(cx,str,tvr) \ |
937 |
JS_PUSH_TEMP_ROOT_COMMON(cx, str, tvr, JSTVU_SINGLE, string) |
JS_PUSH_SINGLE_TEMP_ROOT(cx, str ? STRING_TO_JSVAL(str) : JSVAL_NULL, tvr) |
938 |
|
|
939 |
#define JS_PUSH_TEMP_ROOT_XML(cx,xml_,tvr) \ |
#define JS_PUSH_TEMP_ROOT_XML(cx,xml_,tvr) \ |
940 |
JS_PUSH_TEMP_ROOT_COMMON(cx, xml_, tvr, JSTVU_SINGLE, xml) |
JS_PUSH_TEMP_ROOT_COMMON(cx, xml_, tvr, JSTVU_SINGLE, xml) |
954 |
#define JS_PUSH_TEMP_ROOT_SCRIPT(cx,script_,tvr) \ |
#define JS_PUSH_TEMP_ROOT_SCRIPT(cx,script_,tvr) \ |
955 |
JS_PUSH_TEMP_ROOT_COMMON(cx, script_, tvr, JSTVU_SCRIPT, script) |
JS_PUSH_TEMP_ROOT_COMMON(cx, script_, tvr, JSTVU_SCRIPT, script) |
956 |
|
|
|
|
|
957 |
#define JSRESOLVE_INFER 0xffff /* infer bits from current bytecode */ |
#define JSRESOLVE_INFER 0xffff /* infer bits from current bytecode */ |
958 |
|
|
959 |
struct JSContext { |
struct JSContext { |
1035 |
size_t scriptStackQuota; |
size_t scriptStackQuota; |
1036 |
|
|
1037 |
/* Data shared by threads in an address space. */ |
/* Data shared by threads in an address space. */ |
1038 |
JSRuntime *runtime; |
JSRuntime * const runtime; |
1039 |
|
|
1040 |
|
explicit JSContext(JSRuntime *rt) : runtime(rt) {} |
1041 |
|
|
1042 |
/* Stack arena pool and frame pointer register. */ |
/* Stack arena pool and frame pointer register. */ |
1043 |
JS_REQUIRES_STACK |
JS_REQUIRES_STACK |
1060 |
|
|
1061 |
/* State for object and array toSource conversion. */ |
/* State for object and array toSource conversion. */ |
1062 |
JSSharpObjectMap sharpObjectMap; |
JSSharpObjectMap sharpObjectMap; |
1063 |
|
JSHashTable *busyArrayTable; |
1064 |
|
|
1065 |
/* Argument formatter support for JS_{Convert,Push}Arguments{,VA}. */ |
/* Argument formatter support for JS_{Convert,Push}Arguments{,VA}. */ |
1066 |
JSArgumentFormatMap *argumentFormatMap; |
JSArgumentFormatMap *argumentFormatMap; |
1109 |
/* Stack of thread-stack-allocated temporary GC roots. */ |
/* Stack of thread-stack-allocated temporary GC roots. */ |
1110 |
JSTempValueRooter *tempValueRooters; |
JSTempValueRooter *tempValueRooters; |
1111 |
|
|
|
#ifdef JS_THREADSAFE |
|
|
JSGCFreeListSet *gcLocalFreeLists; |
|
|
#endif |
|
|
|
|
1112 |
/* List of pre-allocated doubles. */ |
/* List of pre-allocated doubles. */ |
1113 |
JSGCDoubleCell *doubleFreeList; |
JSGCDoubleCell *doubleFreeList; |
1114 |
|
|
1115 |
/* Debug hooks associated with the current context. */ |
/* Debug hooks associated with the current context. */ |
1116 |
JSDebugHooks *debugHooks; |
const JSDebugHooks *debugHooks; |
1117 |
|
|
1118 |
/* Security callbacks that override any defined on the runtime. */ |
/* Security callbacks that override any defined on the runtime. */ |
1119 |
JSSecurityCallbacks *securityCallbacks; |
JSSecurityCallbacks *securityCallbacks; |
1133 |
InterpState *interpState; |
InterpState *interpState; |
1134 |
VMSideExit *bailExit; |
VMSideExit *bailExit; |
1135 |
|
|
1136 |
/* Used when calling natives from trace to root the vp vector. */ |
/* |
1137 |
uintN nativeVpLen; |
* True if traces may be executed. Invariant: The value of jitEnabled is |
1138 |
jsval *nativeVp; |
* always equal to the expression in updateJITEnabled below. |
1139 |
|
* |
1140 |
|
* This flag and the fields accessed by updateJITEnabled are written only |
1141 |
|
* in runtime->gcLock, to avoid race conditions that would leave the wrong |
1142 |
|
* value in jitEnabled. (But the interpreter reads this without |
1143 |
|
* locking. That can race against another thread setting debug hooks, but |
1144 |
|
* we always read cx->debugHooks without locking anyway.) |
1145 |
|
*/ |
1146 |
|
bool jitEnabled; |
1147 |
|
#endif |
1148 |
|
|
1149 |
|
#ifdef __cplusplus /* Allow inclusion from LiveConnect C files, */ |
1150 |
|
|
1151 |
|
/* Caller must be holding runtime->gcLock. */ |
1152 |
|
void updateJITEnabled() { |
1153 |
|
#ifdef JS_TRACER |
1154 |
|
jitEnabled = ((options & JSOPTION_JIT) && |
1155 |
|
!runtime->debuggerInhibitsJIT() && |
1156 |
|
debugHooks == &runtime->globalDebugHooks); |
1157 |
|
#endif |
1158 |
|
} |
1159 |
|
|
1160 |
|
|
1161 |
|
#ifdef JS_THREADSAFE |
1162 |
|
inline void createDeallocatorTask() { |
1163 |
|
JSThreadData* tls = JS_THREAD_DATA(this); |
1164 |
|
JS_ASSERT(!tls->deallocatorTask); |
1165 |
|
if (runtime->deallocatorThread && !runtime->deallocatorThread->busy()) |
1166 |
|
tls->deallocatorTask = new JSFreePointerListTask(); |
1167 |
|
} |
1168 |
|
|
1169 |
|
inline void submitDeallocatorTask() { |
1170 |
|
JSThreadData* tls = JS_THREAD_DATA(this); |
1171 |
|
if (tls->deallocatorTask) { |
1172 |
|
runtime->deallocatorThread->schedule(tls->deallocatorTask); |
1173 |
|
tls->deallocatorTask = NULL; |
1174 |
|
} |
1175 |
|
} |
1176 |
|
#endif |
1177 |
|
|
1178 |
|
/* Call this after succesful malloc of memory for GC-related things. */ |
1179 |
|
inline void updateMallocCounter(size_t nbytes) { |
1180 |
|
size_t *pbytes, bytes; |
1181 |
|
|
1182 |
|
pbytes = &JS_THREAD_DATA(this)->gcMallocBytes; |
1183 |
|
bytes = *pbytes; |
1184 |
|
*pbytes = (size_t(-1) - bytes <= nbytes) ? size_t(-1) : bytes + nbytes; |
1185 |
|
} |
1186 |
|
|
1187 |
|
inline void* malloc(size_t bytes) { |
1188 |
|
JS_ASSERT(bytes != 0); |
1189 |
|
void *p = runtime->malloc(bytes); |
1190 |
|
if (!p) { |
1191 |
|
JS_ReportOutOfMemory(this); |
1192 |
|
return NULL; |
1193 |
|
} |
1194 |
|
updateMallocCounter(bytes); |
1195 |
|
return p; |
1196 |
|
} |
1197 |
|
|
1198 |
|
inline void* mallocNoReport(size_t bytes) { |
1199 |
|
JS_ASSERT(bytes != 0); |
1200 |
|
void *p = runtime->malloc(bytes); |
1201 |
|
if (!p) |
1202 |
|
return NULL; |
1203 |
|
updateMallocCounter(bytes); |
1204 |
|
return p; |
1205 |
|
} |
1206 |
|
|
1207 |
|
inline void* calloc(size_t bytes) { |
1208 |
|
JS_ASSERT(bytes != 0); |
1209 |
|
void *p = runtime->calloc(bytes); |
1210 |
|
if (!p) { |
1211 |
|
JS_ReportOutOfMemory(this); |
1212 |
|
return NULL; |
1213 |
|
} |
1214 |
|
updateMallocCounter(bytes); |
1215 |
|
return p; |
1216 |
|
} |
1217 |
|
|
1218 |
|
inline void* realloc(void* p, size_t bytes) { |
1219 |
|
void *orig = p; |
1220 |
|
p = runtime->realloc(p, bytes); |
1221 |
|
if (!p) { |
1222 |
|
JS_ReportOutOfMemory(this); |
1223 |
|
return NULL; |
1224 |
|
} |
1225 |
|
if (!orig) |
1226 |
|
updateMallocCounter(bytes); |
1227 |
|
return p; |
1228 |
|
} |
1229 |
|
|
1230 |
|
#ifdef JS_THREADSAFE |
1231 |
|
inline void free(void* p) { |
1232 |
|
if (!p) |
1233 |
|
return; |
1234 |
|
if (thread) { |
1235 |
|
JSFreePointerListTask* task = JS_THREAD_DATA(this)->deallocatorTask; |
1236 |
|
if (task) { |
1237 |
|
task->add(p); |
1238 |
|
return; |
1239 |
|
} |
1240 |
|
} |
1241 |
|
runtime->free(p); |
1242 |
|
} |
1243 |
|
#else |
1244 |
|
inline void free(void* p) { |
1245 |
|
if (!p) |
1246 |
|
return; |
1247 |
|
runtime->free(p); |
1248 |
|
} |
1249 |
#endif |
#endif |
1250 |
|
|
1251 |
|
/* |
1252 |
|
* In the common case that we'd like to allocate the memory for an object |
1253 |
|
* with cx->malloc/free, we cannot use overloaded C++ operators (no |
1254 |
|
* placement delete). Factor the common workaround into one place. |
1255 |
|
*/ |
1256 |
|
#define CREATE_BODY(parms) \ |
1257 |
|
void *memory = this->malloc(sizeof(T)); \ |
1258 |
|
if (!memory) { \ |
1259 |
|
JS_ReportOutOfMemory(this); \ |
1260 |
|
return NULL; \ |
1261 |
|
} \ |
1262 |
|
return new(memory) T parms; |
1263 |
|
|
1264 |
|
template <class T> |
1265 |
|
JS_ALWAYS_INLINE T *create() { |
1266 |
|
CREATE_BODY(()) |
1267 |
|
} |
1268 |
|
|
1269 |
|
template <class T, class P1> |
1270 |
|
JS_ALWAYS_INLINE T *create(const P1 &p1) { |
1271 |
|
CREATE_BODY((p1)) |
1272 |
|
} |
1273 |
|
|
1274 |
|
template <class T, class P1, class P2> |
1275 |
|
JS_ALWAYS_INLINE T *create(const P1 &p1, const P2 &p2) { |
1276 |
|
CREATE_BODY((p1, p2)) |
1277 |
|
} |
1278 |
|
|
1279 |
|
template <class T, class P1, class P2, class P3> |
1280 |
|
JS_ALWAYS_INLINE T *create(const P1 &p1, const P2 &p2, const P3 &p3) { |
1281 |
|
CREATE_BODY((p1, p2, p3)) |
1282 |
|
} |
1283 |
|
#undef CREATE_BODY |
1284 |
|
|
1285 |
|
template <class T> |
1286 |
|
JS_ALWAYS_INLINE void destroy(T *p) { |
1287 |
|
p->~T(); |
1288 |
|
this->free(p); |
1289 |
|
} |
1290 |
|
|
1291 |
|
#endif /* __cplusplus */ |
1292 |
}; |
}; |
1293 |
|
|
1294 |
#ifdef JS_THREADSAFE |
#ifdef JS_THREADSAFE |
1309 |
class JSAutoTempValueRooter |
class JSAutoTempValueRooter |
1310 |
{ |
{ |
1311 |
public: |
public: |
1312 |
JSAutoTempValueRooter(JSContext *cx, size_t len, jsval *vec) |
JSAutoTempValueRooter(JSContext *cx, size_t len, jsval *vec |
1313 |
|
JS_GUARD_OBJECT_NOTIFIER_PARAM) |
1314 |
: mContext(cx) { |
: mContext(cx) { |
1315 |
|
JS_GUARD_OBJECT_NOTIFIER_INIT; |
1316 |
JS_PUSH_TEMP_ROOT(mContext, len, vec, &mTvr); |
JS_PUSH_TEMP_ROOT(mContext, len, vec, &mTvr); |
1317 |
} |
} |
1318 |
explicit JSAutoTempValueRooter(JSContext *cx, jsval v = JSVAL_NULL) |
explicit JSAutoTempValueRooter(JSContext *cx, jsval v = JSVAL_NULL |
1319 |
|
JS_GUARD_OBJECT_NOTIFIER_PARAM) |
1320 |
: mContext(cx) { |
: mContext(cx) { |
1321 |
|
JS_GUARD_OBJECT_NOTIFIER_INIT; |
1322 |
JS_PUSH_SINGLE_TEMP_ROOT(mContext, v, &mTvr); |
JS_PUSH_SINGLE_TEMP_ROOT(mContext, v, &mTvr); |
1323 |
} |
} |
1324 |
JSAutoTempValueRooter(JSContext *cx, JSString *str) |
JSAutoTempValueRooter(JSContext *cx, JSString *str |
1325 |
|
JS_GUARD_OBJECT_NOTIFIER_PARAM) |
1326 |
: mContext(cx) { |
: mContext(cx) { |
1327 |
|
JS_GUARD_OBJECT_NOTIFIER_INIT; |
1328 |
JS_PUSH_TEMP_ROOT_STRING(mContext, str, &mTvr); |
JS_PUSH_TEMP_ROOT_STRING(mContext, str, &mTvr); |
1329 |
} |
} |
1330 |
JSAutoTempValueRooter(JSContext *cx, JSObject *obj) |
JSAutoTempValueRooter(JSContext *cx, JSObject *obj |
1331 |
|
JS_GUARD_OBJECT_NOTIFIER_PARAM) |
1332 |
: mContext(cx) { |
: mContext(cx) { |
1333 |
|
JS_GUARD_OBJECT_NOTIFIER_INIT; |
1334 |
JS_PUSH_TEMP_ROOT_OBJECT(mContext, obj, &mTvr); |
JS_PUSH_TEMP_ROOT_OBJECT(mContext, obj, &mTvr); |
1335 |
} |
} |
1336 |
|
|
1351 |
#endif |
#endif |
1352 |
|
|
1353 |
JSTempValueRooter mTvr; |
JSTempValueRooter mTvr; |
1354 |
|
JS_DECL_USE_GUARD_OBJECT_NOTIFIER |
1355 |
}; |
}; |
1356 |
|
|
1357 |
class JSAutoTempIdRooter |
class JSAutoTempIdRooter |
1358 |
{ |
{ |
1359 |
public: |
public: |
1360 |
explicit JSAutoTempIdRooter(JSContext *cx, jsid id = INT_TO_JSID(0)) |
explicit JSAutoTempIdRooter(JSContext *cx, jsid id = INT_TO_JSID(0) |
1361 |
|
JS_GUARD_OBJECT_NOTIFIER_PARAM) |
1362 |
: mContext(cx) { |
: mContext(cx) { |
1363 |
|
JS_GUARD_OBJECT_NOTIFIER_INIT; |
1364 |
JS_PUSH_SINGLE_TEMP_ROOT(mContext, ID_TO_VALUE(id), &mTvr); |
JS_PUSH_SINGLE_TEMP_ROOT(mContext, ID_TO_VALUE(id), &mTvr); |
1365 |
} |
} |
1366 |
|
|
1371 |
jsid id() { return (jsid) mTvr.u.value; } |
jsid id() { return (jsid) mTvr.u.value; } |
1372 |
jsid * addr() { return (jsid *) &mTvr.u.value; } |
jsid * addr() { return (jsid *) &mTvr.u.value; } |
1373 |
|
|
1374 |
private: |
private: |
1375 |
JSContext *mContext; |
JSContext *mContext; |
1376 |
JSTempValueRooter mTvr; |
JSTempValueRooter mTvr; |
1377 |
|
JS_DECL_USE_GUARD_OBJECT_NOTIFIER |
1378 |
|
}; |
1379 |
|
|
1380 |
|
/* The auto-root for enumeration object and its state. */ |
1381 |
|
class JSAutoEnumStateRooter : public JSTempValueRooter |
1382 |
|
{ |
1383 |
|
public: |
1384 |
|
JSAutoEnumStateRooter(JSContext *cx, JSObject *obj, jsval *statep |
1385 |
|
JS_GUARD_OBJECT_NOTIFIER_PARAM) |
1386 |
|
: mContext(cx), mStatep(statep) |
1387 |
|
{ |
1388 |
|
JS_GUARD_OBJECT_NOTIFIER_INIT; |
1389 |
|
JS_ASSERT(obj); |
1390 |
|
JS_ASSERT(statep); |
1391 |
|
JS_PUSH_TEMP_ROOT_COMMON(cx, obj, this, JSTVU_ENUMERATOR, object); |
1392 |
|
} |
1393 |
|
|
1394 |
|
~JSAutoEnumStateRooter() { |
1395 |
|
JS_POP_TEMP_ROOT(mContext, this); |
1396 |
|
} |
1397 |
|
|
1398 |
|
void mark(JSTracer *trc) { |
1399 |
|
JS_CALL_OBJECT_TRACER(trc, u.object, "enumerator_obj"); |
1400 |
|
js_MarkEnumeratorState(trc, u.object, *mStatep); |
1401 |
|
} |
1402 |
|
|
1403 |
|
private: |
1404 |
|
JSContext *mContext; |
1405 |
|
jsval *mStatep; |
1406 |
|
JS_DECL_USE_GUARD_OBJECT_NOTIFIER |
1407 |
}; |
}; |
1408 |
|
|
1409 |
class JSAutoResolveFlags |
class JSAutoResolveFlags |
1410 |
{ |
{ |
1411 |
public: |
public: |
1412 |
JSAutoResolveFlags(JSContext *cx, uintN flags) |
JSAutoResolveFlags(JSContext *cx, uintN flags |
1413 |
|
JS_GUARD_OBJECT_NOTIFIER_PARAM) |
1414 |
: mContext(cx), mSaved(cx->resolveFlags) { |
: mContext(cx), mSaved(cx->resolveFlags) { |
1415 |
|
JS_GUARD_OBJECT_NOTIFIER_INIT; |
1416 |
cx->resolveFlags = flags; |
cx->resolveFlags = flags; |
1417 |
} |
} |
1418 |
|
|
1421 |
private: |
private: |
1422 |
JSContext *mContext; |
JSContext *mContext; |
1423 |
uintN mSaved; |
uintN mSaved; |
1424 |
|
JS_DECL_USE_GUARD_OBJECT_NOTIFIER |
1425 |
}; |
}; |
1426 |
|
|
1427 |
#endif /* __cpluscplus */ |
#endif /* __cplusplus */ |
1428 |
|
|
1429 |
/* |
/* |
1430 |
* Slightly more readable macros for testing per-context option settings (also |
* Slightly more readable macros for testing per-context option settings (also |
1469 |
#define JS_HAS_XML_OPTION(cx) ((cx)->version & JSVERSION_HAS_XML || \ |
#define JS_HAS_XML_OPTION(cx) ((cx)->version & JSVERSION_HAS_XML || \ |
1470 |
JSVERSION_NUMBER(cx) >= JSVERSION_1_6) |
JSVERSION_NUMBER(cx) >= JSVERSION_1_6) |
1471 |
|
|
1472 |
|
extern JSThreadData * |
1473 |
|
js_CurrentThreadData(JSRuntime *rt); |
1474 |
|
|
1475 |
extern JSBool |
extern JSBool |
1476 |
js_InitThreads(JSRuntime *rt); |
js_InitThreads(JSRuntime *rt); |
1477 |
|
|
1481 |
extern void |
extern void |
1482 |
js_PurgeThreads(JSContext *cx); |
js_PurgeThreads(JSContext *cx); |
1483 |
|
|
1484 |
|
extern void |
1485 |
|
js_TraceThreads(JSRuntime *rt, JSTracer *trc); |
1486 |
|
|
1487 |
/* |
/* |
1488 |
* Ensures the JSOPTION_XML and JSOPTION_ANONFUNFIX bits of cx->options are |
* Ensures the JSOPTION_XML and JSOPTION_ANONFUNFIX bits of cx->options are |
1489 |
* reflected in cx->version, since each bit must travel with a script that has |
* reflected in cx->version, since each bit must travel with a script that has |
1765 |
extern jsbytecode* |
extern jsbytecode* |
1766 |
js_GetCurrentBytecodePC(JSContext* cx); |
js_GetCurrentBytecodePC(JSContext* cx); |
1767 |
|
|
1768 |
|
#ifdef __cplusplus /* Allow inclusion from LiveConnect C files. */ |
1769 |
|
extern bool |
1770 |
|
#else |
1771 |
|
extern JSBool |
1772 |
|
#endif |
1773 |
|
js_CurrentPCIsInImacro(JSContext *cx); |
1774 |
|
|
1775 |
#ifdef JS_TRACER |
#ifdef JS_TRACER |
1776 |
/* |
/* |
1777 |
* Reconstruct the JS stack and clear cx->tracecx. We must be currently in a |
* Reconstruct the JS stack and clear cx->tracecx. We must be currently in a |
1833 |
static JS_INLINE uint32 |
static JS_INLINE uint32 |
1834 |
js_RegenerateShapeForGC(JSContext *cx) |
js_RegenerateShapeForGC(JSContext *cx) |
1835 |
{ |
{ |
|
uint32 shape; |
|
|
|
|
1836 |
JS_ASSERT(cx->runtime->gcRunning); |
JS_ASSERT(cx->runtime->gcRunning); |
1837 |
|
JS_ASSERT(cx->runtime->gcRegenShapes); |
1838 |
|
|
1839 |
/* |
/* |
1840 |
* Under the GC, compared with js_GenerateShape, we don't need to use |
* Under the GC, compared with js_GenerateShape, we don't need to use |
1841 |
* atomic increments but we still must make sure that after an overflow |
* atomic increments but we still must make sure that after an overflow |
1842 |
* the shape stays such. |
* the shape stays such. |
1843 |
*/ |
*/ |
1844 |
shape = cx->runtime->shapeGen; |
uint32 shape = cx->runtime->shapeGen; |
1845 |
shape = (shape + 1) | (shape & SHAPE_OVERFLOW_BIT); |
shape = (shape + 1) | (shape & SHAPE_OVERFLOW_BIT); |
1846 |
cx->runtime->shapeGen = shape; |
cx->runtime->shapeGen = shape; |
1847 |
return shape; |
return shape; |
1848 |
} |
} |
1849 |
|
|
|
JS_END_EXTERN_C |
|
|
|
|
1850 |
#endif /* jscntxt_h___ */ |
#endif /* jscntxt_h___ */ |