Merge tracemonkey to mozilla-central.

This commit is contained in:
Robert Sayre 2009-01-28 10:56:08 -08:00
commit 78d557da18
17 changed files with 474 additions and 206 deletions

View file

@ -177,7 +177,7 @@ static PRUint32 sCCollectCount;
static PRBool sUserIsActive; static PRBool sUserIsActive;
static PRTime sPreviousCCTime; static PRTime sPreviousCCTime;
static PRUint32 sCollectedObjectsCounts; static PRUint32 sCollectedObjectsCounts;
static PRUint32 sGCCount; static PRUint32 sSavedGCCount;
static PRUint32 sCCSuspectChanges; static PRUint32 sCCSuspectChanges;
static PRUint32 sCCSuspectedCount; static PRUint32 sCCSuspectedCount;
static nsITimer *sGCTimer; static nsITimer *sGCTimer;
@ -868,7 +868,6 @@ MaybeGC(JSContext *cx)
|| cx->runtime->gcZeal > 0 || cx->runtime->gcZeal > 0
#endif #endif
) { ) {
++sGCCount;
JS_GC(cx); JS_GC(cx);
} }
} }
@ -3409,12 +3408,12 @@ nsJSContext::CC()
#endif #endif
sPreviousCCTime = PR_Now(); sPreviousCCTime = PR_Now();
sDelayedCCollectCount = 0; sDelayedCCollectCount = 0;
sGCCount = 0;
sCCSuspectChanges = 0; sCCSuspectChanges = 0;
// nsCycleCollector_collect() will run a ::JS_GC() indirectly, so // nsCycleCollector_collect() will run a ::JS_GC() indirectly, so
// we do not explicitly call ::JS_GC() here. // we do not explicitly call ::JS_GC() here.
sCollectedObjectsCounts = nsCycleCollector_collect(); sCollectedObjectsCounts = nsCycleCollector_collect();
sCCSuspectedCount = nsCycleCollector_suspectedCount(); sCCSuspectedCount = nsCycleCollector_suspectedCount();
sSavedGCCount = JS_GetGCParameter(nsJSRuntime::sRuntime, JSGC_NUMBER);
#ifdef DEBUG_smaug #ifdef DEBUG_smaug
printf("Collected %u objects, %u suspected objects, took %lldms\n", printf("Collected %u objects, %u suspected objects, took %lldms\n",
sCollectedObjectsCounts, sCCSuspectedCount, sCollectedObjectsCounts, sCCSuspectedCount,
@ -3422,6 +3421,21 @@ nsJSContext::CC()
#endif #endif
} }
static inline uint32
GetGCRunsSinceLastCC()
{
// To avoid crash if nsJSRuntime is not properly initialized.
// See the bug 474586
if (!nsJSRuntime::sRuntime)
return 0;
// Since JS_GetGCParameter() and sSavedGCCount are unsigned, the following
// gives the correct result even when the GC counter wraps around
// UINT32_MAX since the last call to JS_GetGCParameter().
return JS_GetGCParameter(nsJSRuntime::sRuntime, JSGC_NUMBER) -
sSavedGCCount;
}
//static //static
PRBool PRBool
nsJSContext::MaybeCC(PRBool aHigherProbability) nsJSContext::MaybeCC(PRBool aHigherProbability)
@ -3430,7 +3444,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability)
// Don't check suspected count if CC will be called anyway. // Don't check suspected count if CC will be called anyway.
if (sCCSuspectChanges <= NS_MIN_SUSPECT_CHANGES || if (sCCSuspectChanges <= NS_MIN_SUSPECT_CHANGES ||
sGCCount <= NS_MAX_GC_COUNT) { GetGCRunsSinceLastCC() <= NS_MAX_GC_COUNT) {
#ifdef DEBUG_smaug #ifdef DEBUG_smaug
PRTime now = PR_Now(); PRTime now = PR_Now();
#endif #endif
@ -3447,8 +3461,8 @@ nsJSContext::MaybeCC(PRBool aHigherProbability)
} }
} }
#ifdef DEBUG_smaug #ifdef DEBUG_smaug
printf("sCCSuspectChanges %u, sGCCount %u\n", printf("sCCSuspectChanges %u, GC runs %u\n",
sCCSuspectChanges, sGCCount); sCCSuspectChanges, GetGCRunsSinceLastCC());
#endif #endif
// Increase the probability also if the previous call to cycle collector // Increase the probability also if the previous call to cycle collector
@ -3461,7 +3475,7 @@ nsJSContext::MaybeCC(PRBool aHigherProbability)
if (!sGCTimer && if (!sGCTimer &&
(sDelayedCCollectCount > NS_MAX_DELAYED_CCOLLECT) && (sDelayedCCollectCount > NS_MAX_DELAYED_CCOLLECT) &&
((sCCSuspectChanges > NS_MIN_SUSPECT_CHANGES && ((sCCSuspectChanges > NS_MIN_SUSPECT_CHANGES &&
sGCCount > NS_MAX_GC_COUNT) || GetGCRunsSinceLastCC() > NS_MAX_GC_COUNT) ||
(sCCSuspectChanges > NS_MAX_SUSPECT_CHANGES))) { (sCCSuspectChanges > NS_MAX_SUSPECT_CHANGES))) {
if ((PR_Now() - sPreviousCCTime) >= if ((PR_Now() - sPreviousCCTime) >=
PRTime(NS_MIN_CC_INTERVAL * PR_USEC_PER_MSEC)) { PRTime(NS_MIN_CC_INTERVAL * PR_USEC_PER_MSEC)) {
@ -3691,7 +3705,7 @@ nsJSRuntime::Startup()
sUserIsActive = PR_FALSE; sUserIsActive = PR_FALSE;
sPreviousCCTime = 0; sPreviousCCTime = 0;
sCollectedObjectsCounts = 0; sCollectedObjectsCounts = 0;
sGCCount = 0; sSavedGCCount = 0;
sCCSuspectChanges = 0; sCCSuspectChanges = 0;
sCCSuspectedCount = 0; sCCSuspectedCount = 0;
sGCTimer = nsnull; sGCTimer = nsnull;
@ -3798,6 +3812,8 @@ nsJSRuntime::Init()
NS_ASSERTION(!gOldJSGCCallback, NS_ASSERTION(!gOldJSGCCallback,
"nsJSRuntime initialized more than once"); "nsJSRuntime initialized more than once");
sSavedGCCount = JS_GetGCParameter(nsJSRuntime::sRuntime, JSGC_NUMBER);
// Save the old GC callback to chain to it, for GC-observing generality. // Save the old GC callback to chain to it, for GC-observing generality.
gOldJSGCCallback = ::JS_SetGCCallbackRT(sRuntime, DOMGCCallback); gOldJSGCCallback = ::JS_SetGCCallbackRT(sRuntime, DOMGCCallback);

View file

@ -81,8 +81,6 @@ BUILTIN3(extern, JSVAL, js_Any_getprop, CONTEXT, OBJECT, STRING,
BUILTIN4(extern, BOOL, js_Any_setprop, CONTEXT, OBJECT, STRING, JSVAL, 0, 0) BUILTIN4(extern, BOOL, js_Any_setprop, CONTEXT, OBJECT, STRING, JSVAL, 0, 0)
BUILTIN3(extern, JSVAL, js_Any_getelem, CONTEXT, OBJECT, INT32, 0, 0) BUILTIN3(extern, JSVAL, js_Any_getelem, CONTEXT, OBJECT, INT32, 0, 0)
BUILTIN4(extern, BOOL, js_Any_setelem, CONTEXT, OBJECT, INT32, JSVAL, 0, 0) BUILTIN4(extern, BOOL, js_Any_setelem, CONTEXT, OBJECT, INT32, JSVAL, 0, 0)
BUILTIN3(extern, OBJECT, js_FastValueToIterator, CONTEXT, UINT32, JSVAL, 0, 0)
BUILTIN2(extern, JSVAL, js_FastCallIteratorNext, CONTEXT, OBJECT, 0, 0)
BUILTIN2(FRIEND, BOOL, js_CloseIterator, CONTEXT, JSVAL, 0, 0) BUILTIN2(FRIEND, BOOL, js_CloseIterator, CONTEXT, JSVAL, 0, 0)
BUILTIN2(extern, SIDEEXIT, js_CallTree, INTERPSTATE, FRAGMENT, 0, 0) BUILTIN2(extern, SIDEEXIT, js_CallTree, INTERPSTATE, FRAGMENT, 0, 0)
BUILTIN2(extern, OBJECT, js_FastNewObject, CONTEXT, OBJECT, 0, 0) BUILTIN2(extern, OBJECT, js_FastNewObject, CONTEXT, OBJECT, 0, 0)

View file

@ -98,22 +98,42 @@ function formatoffset(n, w) {
function immediate(op) { function immediate(op) {
let info = op.info; let info = op.info;
let imm1Expr = /^\(/.test(op.imm1);
if (info.flags.indexOf("JOF_ATOM") >= 0) { if (info.flags.indexOf("JOF_ATOM") >= 0) {
if (/^(?:void|object|function|string|number|boolean)$/.test(op.imm1)) if (/^(?:void|object|function|string|number|boolean)$/.test(op.imm1))
return "0, COMMON_TYPE_ATOM_INDEX(JSTYPE_" + op.imm1.toUpperCase() + ")"; return "0, COMMON_TYPE_ATOM_INDEX(JSTYPE_" + op.imm1.toUpperCase() + ")";
return "0, COMMON_ATOM_INDEX(" + op.imm1 + ")"; return "0, COMMON_ATOM_INDEX(" + op.imm1 + ")";
} }
if (info.flags.indexOf("JOF_JUMP") >= 0) if (info.flags.indexOf("JOF_JUMP") >= 0) {
ASSERT(!imm1Expr);
return ((op.target >> 8) & 0xff) + ", " + (op.target & 0xff); return ((op.target >> 8) & 0xff) + ", " + (op.target & 0xff);
}
if (info.flags.indexOf("JOF_UINT8") >= 0 || if (info.flags.indexOf("JOF_UINT8") >= 0 ||
info.flags.indexOf("JOF_INT8") >= 0) { info.flags.indexOf("JOF_INT8") >= 0) {
if (imm1Expr)
return op.imm1;
if (isNaN(Number(op.imm1)) || Number(op.imm1) != parseInt(op.imm1))
throw new Error("invalid 8-bit operand: " + op.imm1);
return (op.imm1 & 0xff); return (op.imm1 & 0xff);
} }
if (info.flags.indexOf("JOF_UINT16") >= 0) if (info.flags.indexOf("JOF_UINT16") >= 0) {
if (imm1Expr)
return '(_ & 0xff00) >> 8, (_ & 0xff)'.replace(/_/g, op.imm1);
return ((op.imm1 & 0xff00) >> 8) + ", " + (op.imm1 & 0xff); return ((op.imm1 & 0xff00) >> 8) + ", " + (op.imm1 & 0xff);
}
throw new Error(info.jsop + " format not yet implemented"); throw new Error(info.jsop + " format not yet implemented");
} }
const line_regexp_parts = [
"^(?:(\\w+):)?",
"\\s*(\\.?\\w+)",
"(?:\\s+(\\w+|\\([^)]*\\)))?",
"(?:\\s+([\\w-]+|\\([^)]*\\)))?",
"(?:\\s*(?:#.*))?$"
];
const line_regexp = new RegExp(line_regexp_parts.join(""));
/* /*
* Syntax (spaces are significant only to delimit tokens): * Syntax (spaces are significant only to delimit tokens):
* *
@ -121,10 +141,13 @@ function immediate(op) {
* Directive ::= (name ':')? Operation * Directive ::= (name ':')? Operation
* Operation ::= opname Operands? * Operation ::= opname Operands?
* Operands ::= Operand (',' Operand)* * Operands ::= Operand (',' Operand)*
* Operand ::= name | number * Operand ::= name | number | '(' Expr ')'
* Expr ::= a constant-expression in the C++ language
* containing no parentheses
* *
* We simplify given line structure and the maximum of one immediate operand, * We simplify given line structure and the maximum of one immediate operand,
* by parsing using split and regexps. * by parsing using split and regexps. For ease of parsing, parentheses are
* banned in an Expr for now, even in quotes or a C++ comment.
* *
* Pseudo-ops start with . and include .igroup and .imacro, terminated by .end. * Pseudo-ops start with . and include .igroup and .imacro, terminated by .end.
* .imacro must nest in .igroup, neither nests in itself. See imacros.jsasm for * .imacro must nest in .igroup, neither nests in itself. See imacros.jsasm for
@ -141,7 +164,7 @@ function assemble(filename) {
for (let i = 0; i < a.length; i++) { for (let i = 0; i < a.length; i++) {
if (/^\s*(?:#.*)?$/.test(a[i])) if (/^\s*(?:#.*)?$/.test(a[i]))
continue; continue;
let m = /(?:(\w+):)?\s*(\.?\w+)(?:\s+(\w+))?(?:\s+([\w-]+))?(?:\s*(?:#.*))?$/.exec(a[i]); let m = line_regexp.exec(a[i]);
if (!m) if (!m)
throw new Error(a[i]); throw new Error(a[i]);

View file

@ -536,6 +536,64 @@ static struct {
/* 6*/ JSOP_STOP, /* 6*/ JSOP_STOP,
}, },
}; };
static struct {
jsbytecode for_in[10];
jsbytecode for_each[10];
jsbytecode for_in_native[10];
jsbytecode for_each_native[10];
} iter_imacros = {
{
/* 0*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator),
/* 3*/ JSOP_INT8, (JSITER_ENUMERATE),
/* 5*/ JSOP_CALL, 0, 1,
/* 8*/ JSOP_PUSH,
/* 9*/ JSOP_STOP,
},
{
/* 0*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator),
/* 3*/ JSOP_INT8, (JSITER_ENUMERATE|JSITER_FOREACH),
/* 5*/ JSOP_CALL, 0, 1,
/* 8*/ JSOP_PUSH,
/* 9*/ JSOP_STOP,
},
{
/* 0*/ JSOP_CALLBUILTIN, ((JSBUILTIN_ObjectToIterator) & 0xff00) >> 8, ((JSBUILTIN_ObjectToIterator) & 0xff),
/* 3*/ JSOP_INT8, (JSITER_ENUMERATE),
/* 5*/ JSOP_CALL, 0, 1,
/* 8*/ JSOP_PUSH,
/* 9*/ JSOP_STOP,
},
{
/* 0*/ JSOP_CALLBUILTIN, ((JSBUILTIN_ObjectToIterator) & 0xff00) >> 8, ((JSBUILTIN_ObjectToIterator) & 0xff),
/* 3*/ JSOP_INT8, (JSITER_ENUMERATE|JSITER_FOREACH),
/* 5*/ JSOP_CALL, 0, 1,
/* 8*/ JSOP_PUSH,
/* 9*/ JSOP_STOP,
},
};
static struct {
jsbytecode custom_iter_next[10];
jsbytecode native_iter_next[12];
} nextiter_imacros = {
{
/* 0*/ JSOP_POP,
/* 1*/ JSOP_DUP,
/* 2*/ JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(next),
/* 5*/ JSOP_CALL, 0, 0,
/* 8*/ JSOP_TRUE,
/* 9*/ JSOP_STOP,
},
{
/* 0*/ JSOP_POP,
/* 1*/ JSOP_DUP,
/* 2*/ JSOP_CALLBUILTIN, ((JSBUILTIN_CallIteratorNext) & 0xff00) >> 8, ((JSBUILTIN_CallIteratorNext) & 0xff),
/* 5*/ JSOP_CALL, 0, 0,
/* 8*/ JSOP_DUP,
/* 9*/ JSOP_HOLE,
/*10*/ JSOP_STRICTNE,
/*11*/ JSOP_STOP,
},
};
uint8 js_opcode2extra[JSOP_LIMIT] = { uint8 js_opcode2extra[JSOP_LIMIT] = {
0, /* JSOP_NOP */ 0, /* JSOP_NOP */
0, /* JSOP_PUSH */ 0, /* JSOP_PUSH */
@ -612,8 +670,8 @@ uint8 js_opcode2extra[JSOP_LIMIT] = {
0, /* JSOP_STRICTEQ */ 0, /* JSOP_STRICTEQ */
0, /* JSOP_STRICTNE */ 0, /* JSOP_STRICTNE */
0, /* JSOP_NULLTHIS */ 0, /* JSOP_NULLTHIS */
0, /* JSOP_ITER */ 3, /* JSOP_ITER */
0, /* JSOP_NEXTITER */ 2, /* JSOP_NEXTITER */
0, /* JSOP_ENDITER */ 0, /* JSOP_ENDITER */
7, /* JSOP_APPLY */ 7, /* JSOP_APPLY */
0, /* JSOP_SWAP */ 0, /* JSOP_SWAP */
@ -763,7 +821,7 @@ uint8 js_opcode2extra[JSOP_LIMIT] = {
0, /* JSOP_CALLGVAR */ 0, /* JSOP_CALLGVAR */
0, /* JSOP_CALLLOCAL */ 0, /* JSOP_CALLLOCAL */
0, /* JSOP_CALLARG */ 0, /* JSOP_CALLARG */
0, /* JSOP_UNUSED226 */ 0, /* JSOP_CALLBUILTIN */
0, /* JSOP_INT8 */ 0, /* JSOP_INT8 */
0, /* JSOP_INT32 */ 0, /* JSOP_INT32 */
0, /* JSOP_LENGTH */ 0, /* JSOP_LENGTH */

View file

@ -575,3 +575,63 @@
.end # .end #
.end .end
.igroup iter JSOP_ITER
.imacro for_in # obj
callprop iterator # fun obj
int8 (JSITER_ENUMERATE) # fun obj flags
call 1 # iterobj
push # iterobj undef
stop
.end
.imacro for_each # obj
callprop iterator # fun obj
int8 (JSITER_ENUMERATE|JSITER_FOREACH) # fun obj flags
call 1 # iterobj
push # iterobj undef
stop
.end
.imacro for_in_native # obj
callbuiltin (JSBUILTIN_ObjectToIterator) # fun obj
int8 (JSITER_ENUMERATE) # fun obj flags
call 1 # iterobj
push # iterobj undef
stop
.end
.imacro for_each_native # obj
callbuiltin (JSBUILTIN_ObjectToIterator) # fun obj
int8 (JSITER_ENUMERATE|JSITER_FOREACH) # fun obj flags
call 1 # iterobj
push # iterobj undef
stop
.end
.end
.igroup nextiter JSOP_NEXTITER
.imacro custom_iter_next # iterobj prevval
pop # iterobj
dup # iterobj iterobj
callprop next # iterobj fun iterobj
call 0 # iterobj nextval
true # iterobj nextval true
stop
.end
.imacro native_iter_next # iterobj prevval
pop # iterobj
dup # iterobj iterobj
callbuiltin (JSBUILTIN_CallIteratorNext) # iterobj fun iterobj
call 0 # iterobj nextval?
dup # iterobj nextval? nextval?
hole # iterobj nextval? nextval? hole
strictne # iterobj nextval? boolean
stop
.end
.end

View file

@ -243,23 +243,6 @@ js_Any_setelem(JSContext* cx, JSObject* obj, int32 index, jsval v)
return OBJ_SET_PROPERTY(cx, obj, id, &v); return OBJ_SET_PROPERTY(cx, obj, id, &v);
} }
JSObject* FASTCALL
js_FastValueToIterator(JSContext* cx, jsuint flags, jsval v)
{
if (!js_ValueToIterator(cx, flags, &v))
return NULL;
return JSVAL_TO_OBJECT(v);
}
jsval FASTCALL
js_FastCallIteratorNext(JSContext* cx, JSObject* iterobj)
{
jsval v;
if (!js_CallIteratorNext(cx, iterobj, &v))
return JSVAL_ERROR_COOKIE;
return v;
}
SideExit* FASTCALL SideExit* FASTCALL
js_CallTree(InterpState* state, Fragment* f) js_CallTree(InterpState* state, Fragment* f)
{ {

View file

@ -153,10 +153,8 @@ typedef struct JSTraceMonitor {
#ifdef JS_TRACER #ifdef JS_TRACER
# define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).onTrace) # define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).onTrace)
# define JS_EXECUTING_TRACE(cx) (JS_ON_TRACE(cx) && !JS_TRACE_MONITOR(cx).recorder)
#else #else
# define JS_ON_TRACE(cx) JS_FALSE # define JS_ON_TRACE(cx) JS_FALSE
# define JS_EXECUTING_TRACE(cx) JS_FALSE
#endif #endif
#ifdef DEBUG #ifdef DEBUG
@ -256,6 +254,12 @@ typedef enum JSRuntimeState {
JSRTS_LANDING JSRTS_LANDING
} JSRuntimeState; } JSRuntimeState;
typedef enum JSBuiltinFunctionId {
JSBUILTIN_ObjectToIterator,
JSBUILTIN_CallIteratorNext,
JSBUILTIN_LIMIT
} JSBuiltinFunctionId;
typedef struct JSPropertyTreeEntry { typedef struct JSPropertyTreeEntry {
JSDHashEntryHdr hdr; JSDHashEntryHdr hdr;
JSScopeProperty *child; JSScopeProperty *child;
@ -365,6 +369,14 @@ struct JSRuntime {
JSString *emptyString; JSString *emptyString;
JSString **unitStrings; JSString **unitStrings;
/*
* Builtin functions, lazily created and held for use by the trace recorder.
*
* This field would be #ifdef JS_TRACER, but XPConnect is compiled without
* -DJS_TRACER and includes this header.
*/
JSObject *builtinFunctions[JSBUILTIN_LIMIT];
/* List of active contexts sharing this runtime; protected by gcLock. */ /* List of active contexts sharing this runtime; protected by gcLock. */
JSCList contextList; JSCList contextList;

View file

@ -304,7 +304,7 @@ js_fileBaseName(JSContext *cx, const char *pathname)
index = strlen(pathname)-1; index = strlen(pathname)-1;
/* Chop off trailing seperators. */ /* Chop off trailing separators. */
while (index > 0 && (pathname[index]==FILESEPARATOR || while (index > 0 && (pathname[index]==FILESEPARATOR ||
pathname[index]==FILESEPARATOR2)) { pathname[index]==FILESEPARATOR2)) {
--index; --index;

View file

@ -3131,6 +3131,11 @@ js_TraceRuntime(JSTracer *trc, JSBool allAtoms)
rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData); rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData);
#ifdef JS_TRACER #ifdef JS_TRACER
for (int i = 0; i < JSBUILTIN_LIMIT; i++) {
if (rt->builtinFunctions[i])
JS_CALL_OBJECT_TRACER(trc, rt->builtinFunctions[i], "builtin function");
}
#ifdef JS_THREADSAFE #ifdef JS_THREADSAFE
/* Trace the loop table(s) which can contain pointers to code objects. */ /* Trace the loop table(s) which can contain pointers to code objects. */
while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) { while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) {

View file

@ -2574,22 +2574,16 @@ js_Interpret(JSContext *cx)
#ifdef JS_TRACER #ifdef JS_TRACER
/* We had better not be entering the interpreter from JIT-compiled code. */ /* We had better not be entering the interpreter from JIT-compiled code. */
TraceRecorder *tr = NULL; TraceRecorder *tr = TRACE_RECORDER(cx);
if (JS_ON_TRACE(cx)) {
tr = TRACE_RECORDER(cx);
SET_TRACE_RECORDER(cx, NULL); SET_TRACE_RECORDER(cx, NULL);
JS_TRACE_MONITOR(cx).onTrace = JS_FALSE; /* If a recorder is pending and we try to re-enter the interpreter, flag
/* the recorder to be destroyed when we return. */
* ON_TRACE means either recording or coming from traced code.
* If there's no recorder (the latter case), don't care.
*/
if (tr) { if (tr) {
if (tr->wasDeepAborted()) if (tr->wasDeepAborted())
tr->removeFragmentoReferences(); tr->removeFragmentoReferences();
else else
tr->pushAbortStack(); tr->pushAbortStack();
} }
}
#endif #endif
/* Check for too deep of a native thread stack. */ /* Check for too deep of a native thread stack. */
@ -3221,7 +3215,6 @@ js_Interpret(JSContext *cx)
CHECK_INTERRUPT_HANDLER(); CHECK_INTERRUPT_HANDLER();
rval = BOOLEAN_TO_JSVAL(regs.sp[-1] != JSVAL_HOLE); rval = BOOLEAN_TO_JSVAL(regs.sp[-1] != JSVAL_HOLE);
PUSH(rval); PUSH(rval);
TRACE_0(IteratorNextComplete);
END_CASE(JSOP_NEXTITER) END_CASE(JSOP_NEXTITER)
BEGIN_CASE(JSOP_ENDITER) BEGIN_CASE(JSOP_ENDITER)
@ -5616,6 +5609,10 @@ js_Interpret(JSContext *cx)
* JSOP_SETGVAR has arity 1: [rval], not arity 2: [obj, rval] * JSOP_SETGVAR has arity 1: [rval], not arity 2: [obj, rval]
* as JSOP_SETNAME does, where [obj] is due to JSOP_BINDNAME. * as JSOP_SETNAME does, where [obj] is due to JSOP_BINDNAME.
*/ */
#ifdef JS_TRACER
if (TRACE_RECORDER(cx))
js_AbortRecording(cx, "SETGVAR with NULL slot");
#endif
LOAD_ATOM(0); LOAD_ATOM(0);
id = ATOM_TO_JSID(atom); id = ATOM_TO_JSID(atom);
if (!OBJ_SET_PROPERTY(cx, obj, id, &rval)) if (!OBJ_SET_PROPERTY(cx, obj, id, &rval))
@ -6726,6 +6723,19 @@ js_Interpret(JSContext *cx)
} }
END_CASE(JSOP_LEAVEBLOCK) END_CASE(JSOP_LEAVEBLOCK)
BEGIN_CASE(JSOP_CALLBUILTIN)
#ifdef JS_TRACER
obj = js_GetBuiltinFunction(cx, GET_INDEX(regs.pc));
if (!obj)
goto error;
rval = FETCH_OPND(-1);
PUSH_OPND(rval);
STORE_OPND(-2, OBJECT_TO_JSVAL(obj));
#else
goto bad_opcode; /* This is an imacro-only opcode. */
#endif
END_CASE(JSOP_CALLBUILTIN)
#if JS_HAS_GENERATORS #if JS_HAS_GENERATORS
BEGIN_CASE(JSOP_GENERATOR) BEGIN_CASE(JSOP_GENERATOR)
ASSERT_NOT_THROWING(cx); ASSERT_NOT_THROWING(cx);
@ -6835,10 +6845,12 @@ js_Interpret(JSContext *cx)
L_JSOP_UNUSED208: L_JSOP_UNUSED208:
L_JSOP_UNUSED209: L_JSOP_UNUSED209:
L_JSOP_UNUSED219: L_JSOP_UNUSED219:
L_JSOP_UNUSED226:
#else /* !JS_THREADED_INTERP */ #else /* !JS_THREADED_INTERP */
default: default:
#endif
#ifndef JS_TRACER
bad_opcode:
#endif #endif
{ {
char numBuf[12]; char numBuf[12];
@ -6857,7 +6869,8 @@ js_Interpret(JSContext *cx)
if (fp->imacpc && cx->throwing) { if (fp->imacpc && cx->throwing) {
// To keep things simple, we hard-code imacro exception handlers here. // To keep things simple, we hard-code imacro exception handlers here.
if (*fp->imacpc == JSOP_NEXTITER) { if (*fp->imacpc == JSOP_NEXTITER) {
JS_ASSERT(*regs.pc == JSOP_CALL); // pc may point to JSOP_DUP here due to bug 474854.
JS_ASSERT(*regs.pc == JSOP_CALL || *regs.pc == JSOP_DUP);
if (js_ValueIsStopIteration(cx->exception)) { if (js_ValueIsStopIteration(cx->exception)) {
cx->throwing = JS_FALSE; cx->throwing = JS_FALSE;
cx->exception = JSVAL_VOID; cx->exception = JSVAL_VOID;
@ -7089,7 +7102,6 @@ js_Interpret(JSContext *cx)
#ifdef JS_TRACER #ifdef JS_TRACER
if (tr) { if (tr) {
JS_TRACE_MONITOR(cx).onTrace = JS_TRUE;
SET_TRACE_RECORDER(cx, tr); SET_TRACE_RECORDER(cx, tr);
if (!tr->wasDeepAborted()) { if (!tr->wasDeepAborted()) {
tr->popAbortStack(); tr->popAbortStack();

View file

@ -3706,7 +3706,7 @@ js_FindPropertyHelper(JSContext *cx, jsid id, JSObject **objp,
JSProperty *prop; JSProperty *prop;
JSScopeProperty *sprop; JSScopeProperty *sprop;
JS_ASSERT_IF(entryp, !JS_EXECUTING_TRACE(cx)); JS_ASSERT_IF(entryp, !JS_ON_TRACE(cx));
obj = js_GetTopStackFrame(cx)->scopeChain; obj = js_GetTopStackFrame(cx)->scopeChain;
shape = OBJ_SHAPE(obj); shape = OBJ_SHAPE(obj);
for (scopeIndex = 0; ; scopeIndex++) { for (scopeIndex = 0; ; scopeIndex++) {

View file

@ -545,7 +545,7 @@ OPDEF(JSOP_INDEXBASE3, 222,"atombase3", NULL, 1, 0, 0, 0, JOF_BYTE |
OPDEF(JSOP_CALLGVAR, 223, "callgvar", NULL, 3, 0, 2, 19, JOF_ATOM|JOF_NAME|JOF_CALLOP) OPDEF(JSOP_CALLGVAR, 223, "callgvar", NULL, 3, 0, 2, 19, JOF_ATOM|JOF_NAME|JOF_CALLOP)
OPDEF(JSOP_CALLLOCAL, 224, "calllocal", NULL, 3, 0, 2, 19, JOF_LOCAL|JOF_NAME|JOF_CALLOP) OPDEF(JSOP_CALLLOCAL, 224, "calllocal", NULL, 3, 0, 2, 19, JOF_LOCAL|JOF_NAME|JOF_CALLOP)
OPDEF(JSOP_CALLARG, 225, "callarg", NULL, 3, 0, 2, 19, JOF_QARG |JOF_NAME|JOF_CALLOP) OPDEF(JSOP_CALLARG, 225, "callarg", NULL, 3, 0, 2, 19, JOF_QARG |JOF_NAME|JOF_CALLOP)
OPDEF(JSOP_UNUSED226, 226, "unused226", NULL, 1, 0, 1, 1, JOF_BYTE) OPDEF(JSOP_CALLBUILTIN, 226, "callbuiltin", NULL, 3, 0, 2, 0, JOF_UINT16)
/* /*
* Opcodes to hold 8-bit and 32-bit immediate integer operands. * Opcodes to hold 8-bit and 32-bit immediate integer operands.

View file

@ -3831,11 +3831,16 @@ MatchRegExp(REGlobalData *gData, REMatchState *x)
(native = GetNativeRegExp(gData->cx, gData->regexp))) { (native = GetNativeRegExp(gData->cx, gData->regexp))) {
gData->skipped = (ptrdiff_t) x->cp; gData->skipped = (ptrdiff_t) x->cp;
#ifdef JS_JIT_SPEW
{
JSStackFrame *caller = js_GetScriptedCaller(gData->cx, NULL);
debug_only_v(printf("entering REGEXP trace at %s:%u@%u, code: %p\n", debug_only_v(printf("entering REGEXP trace at %s:%u@%u, code: %p\n",
gData->cx->fp->script->filename, caller ? caller->script->filename : "<unknown>",
js_FramePCToLineNumber(gData->cx, gData->cx->fp), caller ? js_FramePCToLineNumber(gData->cx, caller) : 0,
FramePCOffset(gData->cx->fp), caller ? FramePCOffset(caller) : 0,
native);); (void *) native););
}
#endif
#if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32) #if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
SIMULATE_FASTCALL(result, x, gData, native); SIMULATE_FASTCALL(result, x, gData, native);

View file

@ -55,14 +55,14 @@ inline JS_FORCES_STACK void VOUCH_DOES_NOT_REQUIRE_STACK() {}
inline JS_FORCES_STACK void inline JS_FORCES_STACK void
JS_ASSERT_NOT_EXECUTING_TRACE(JSContext *cx) JS_ASSERT_NOT_EXECUTING_TRACE(JSContext *cx)
{ {
JS_ASSERT(!JS_EXECUTING_TRACE(cx)); JS_ASSERT(!JS_ON_TRACE(cx));
} }
#else #else
#define MUST_FLOW_THROUGH(label) ((void) 0) #define MUST_FLOW_THROUGH(label) ((void) 0)
#define MUST_FLOW_LABEL(label) #define MUST_FLOW_LABEL(label)
#define VOUCH_DOES_NOT_REQUIRE_STACK() ((void) 0) #define VOUCH_DOES_NOT_REQUIRE_STACK() ((void) 0)
#define JS_ASSERT_NOT_EXECUTING_TRACE(cx) JS_ASSERT(!JS_EXECUTING_TRACE(cx)) #define JS_ASSERT_NOT_EXECUTING_TRACE(cx) JS_ASSERT(!JS_ON_TRACE(cx))
#endif #endif
#endif /* jsstaticcheck_h___ */ #endif /* jsstaticcheck_h___ */

View file

@ -1722,6 +1722,7 @@ TraceRecorder::import(LIns* base, ptrdiff_t offset, jsval* p, uint8& t,
ins = lir->insLoad(LIR_ldp, base, offset); ins = lir->insLoad(LIR_ldp, base, offset);
} }
} }
checkForGlobalObjectReallocation();
tracker.set(p, ins); tracker.set(p, ins);
#ifdef DEBUG #ifdef DEBUG
char name[64]; char name[64];
@ -1809,7 +1810,7 @@ TraceRecorder::lazilyImportGlobalSlot(unsigned slot)
if (slot != uint16(slot)) /* we use a table of 16-bit ints, bail out if that's not enough */ if (slot != uint16(slot)) /* we use a table of 16-bit ints, bail out if that's not enough */
return false; return false;
jsval* vp = &STOBJ_GET_SLOT(globalObj, slot); jsval* vp = &STOBJ_GET_SLOT(globalObj, slot);
if (tracker.has(vp)) if (known(vp))
return true; /* we already have it */ return true; /* we already have it */
unsigned index = traceMonitor->globalSlots->length(); unsigned index = traceMonitor->globalSlots->length();
/* If this the first global we are adding, remember the shape of the global object. */ /* If this the first global we are adding, remember the shape of the global object. */
@ -1841,7 +1842,8 @@ TraceRecorder::writeBack(LIns* i, LIns* base, ptrdiff_t offset)
JS_REQUIRES_STACK void JS_REQUIRES_STACK void
TraceRecorder::set(jsval* p, LIns* i, bool initializing) TraceRecorder::set(jsval* p, LIns* i, bool initializing)
{ {
JS_ASSERT(initializing || tracker.has(p)); JS_ASSERT(initializing || known(p));
checkForGlobalObjectReallocation();
tracker.set(p, i); tracker.set(p, i);
/* If we are writing to this location for the first time, calculate the offset into the /* If we are writing to this location for the first time, calculate the offset into the
native frame manually, otherwise just look up the last load or store associated with native frame manually, otherwise just look up the last load or store associated with
@ -1873,11 +1875,43 @@ TraceRecorder::set(jsval* p, LIns* i, bool initializing)
} }
JS_REQUIRES_STACK LIns* JS_REQUIRES_STACK LIns*
TraceRecorder::get(jsval* p) const TraceRecorder::get(jsval* p)
{ {
checkForGlobalObjectReallocation();
return tracker.get(p); return tracker.get(p);
} }
JS_REQUIRES_STACK bool
TraceRecorder::known(jsval* p)
{
checkForGlobalObjectReallocation();
return tracker.has(p);
}
/*
* The dslots of the global object are sometimes reallocated by the interpreter.
* This function check for that condition and re-maps the entries of the tracker
* accordingly.
*/
JS_REQUIRES_STACK void
TraceRecorder::checkForGlobalObjectReallocation()
{
if (global_dslots != globalObj->dslots) {
debug_only_v(printf("globalObj->dslots relocated, updating tracker\n");)
jsval* src = global_dslots;
jsval* dst = globalObj->dslots;
jsuint length = globalObj->dslots[-1] - JS_INITIAL_NSLOTS;
LIns** map = (LIns**)alloca(sizeof(LIns*) * length);
for (jsuint n = 0; n < length; ++n) {
map[n] = tracker.get(src);
tracker.set(src++, NULL);
}
for (jsuint n = 0; n < length; ++n)
tracker.set(dst++, map[n]);
global_dslots = globalObj->dslots;
}
}
/* Determine whether the current branch instruction terminates the loop. */ /* Determine whether the current branch instruction terminates the loop. */
static bool static bool
js_IsLoopExit(jsbytecode* pc, jsbytecode* header) js_IsLoopExit(jsbytecode* pc, jsbytecode* header)
@ -1997,7 +2031,7 @@ TraceRecorder::adjustCallerTypes(Fragment* f)
} }
JS_REQUIRES_STACK uint8 JS_REQUIRES_STACK uint8
TraceRecorder::determineSlotType(jsval* vp) const TraceRecorder::determineSlotType(jsval* vp)
{ {
uint8 m; uint8 m;
LIns* i = get(vp); LIns* i = get(vp);
@ -2072,7 +2106,7 @@ TraceRecorder::snapshot(ExitType exitType)
bool resumeAfter = (pendingTraceableNative && bool resumeAfter = (pendingTraceableNative &&
JSTN_ERRTYPE(pendingTraceableNative) == FAIL_JSVAL); JSTN_ERRTYPE(pendingTraceableNative) == FAIL_JSVAL);
if (resumeAfter) { if (resumeAfter) {
JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEXTITER); JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY);
pc += cs.length; pc += cs.length;
regs->pc = pc; regs->pc = pc;
MUST_FLOW_THROUGH("restore_pc"); MUST_FLOW_THROUGH("restore_pc");
@ -2099,11 +2133,10 @@ TraceRecorder::snapshot(ExitType exitType)
); );
JS_ASSERT(unsigned(m - typemap) == ngslots + stackSlots); JS_ASSERT(unsigned(m - typemap) == ngslots + stackSlots);
/* If we are capturing the stack state on a specific instruction, the value on or near /* If we are capturing the stack state on a specific instruction, the value on
the top of the stack is a boxed value. Either pc[-cs.length] is JSOP_NEXTITER and we the top of the stack is a boxed value. */
want one below top of stack, or else it's JSOP_CALL and we want top of stack. */
if (resumeAfter) { if (resumeAfter) {
typemap[stackSlots + ((pc[-cs.length] == JSOP_NEXTITER) ? -2 : -1)] = JSVAL_BOXED; typemap[stackSlots - 1] = JSVAL_BOXED;
/* Now restore the the original pc (after which early returns are ok). */ /* Now restore the the original pc (after which early returns are ok). */
MUST_FLOW_LABEL(restore_pc); MUST_FLOW_LABEL(restore_pc);
@ -2298,8 +2331,6 @@ TraceRecorder::deduceTypeStability(Fragment* root_peer, Fragment** stable_peer,
if (stable_peer) if (stable_peer)
*stable_peer = NULL; *stable_peer = NULL;
demote = false;
/* /*
* Rather than calculate all of this stuff twice, it gets cached locally. The "stage" buffers * Rather than calculate all of this stuff twice, it gets cached locally. The "stage" buffers
* are for calls to set() that will change the exit types. * are for calls to set() that will change the exit types.
@ -2935,9 +2966,6 @@ js_DeleteRecorder(JSContext* cx)
JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
/* Aborting and completing a trace end up here. */ /* Aborting and completing a trace end up here. */
JS_ASSERT(tm->onTrace);
tm->onTrace = false;
delete tm->recorder; delete tm->recorder;
tm->recorder = NULL; tm->recorder = NULL;
} }
@ -2965,15 +2993,6 @@ js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti,
{ {
JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx); JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
/*
* Emulate on-trace semantics and avoid rooting headaches while recording,
* by suppressing last-ditch GC attempts while recording a trace. This does
* means that trace recording must not nest or the following assertion will
* botch.
*/
JS_ASSERT(!tm->onTrace);
tm->onTrace = true;
/* start recording if no exception during construction */ /* start recording if no exception during construction */
tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti, tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti,
stackSlots, ngslots, typeMap, stackSlots, ngslots, typeMap,
@ -3477,7 +3496,7 @@ js_CloseLoop(JSContext* cx)
return false; return false;
} }
bool demote; bool demote = false;
Fragment* f = r->getFragment(); Fragment* f = r->getFragment();
r->closeLoop(tm, demote); r->closeLoop(tm, demote);
js_DeleteRecorder(cx); js_DeleteRecorder(cx);
@ -3867,15 +3886,12 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
#endif #endif
#endif #endif
/* /* Set a flag that indicates to the runtime system that we are running in native code
* We may be called from js_MonitorLoopEdge while not recording, or while now and we don't want automatic GC to happen. Instead we will get a silent failure,
* recording. Rather than over-generalize by using a counter instead of a which will cause a trace exit at which point the interpreter re-tries the operation
* flag, we simply sample and update tm->onTrace if necessary. and eventually triggers the GC. */
*/ JS_ASSERT(!tm->onTrace);
bool onTrace = tm->onTrace;
if (!onTrace)
tm->onTrace = true; tm->onTrace = true;
VMSideExit* lr;
debug_only(fflush(NULL);) debug_only(fflush(NULL);)
GuardRecord* rec; GuardRecord* rec;
@ -3884,13 +3900,13 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
#else #else
rec = u.func(&state, NULL); rec = u.func(&state, NULL);
#endif #endif
lr = (VMSideExit*)rec->exit; VMSideExit* lr = (VMSideExit*)rec->exit;
AUDIT(traceTriggered); AUDIT(traceTriggered);
JS_ASSERT(lr->exitType != LOOP_EXIT || !lr->calldepth); JS_ASSERT(lr->exitType != LOOP_EXIT || !lr->calldepth);
tm->onTrace = onTrace; tm->onTrace = false;
/* Except if we find that this is a nested bailout, the guard the call returned is the /* Except if we find that this is a nested bailout, the guard the call returned is the
one we have to use to adjust pc and sp. */ one we have to use to adjust pc and sp. */
@ -4159,12 +4175,6 @@ TraceRecorder::monitorRecording(JSContext* cx, TraceRecorder* tr, JSOp op)
// opcode-case-guts record hook (record_FastNativeCallComplete). // opcode-case-guts record hook (record_FastNativeCallComplete).
tr->pendingTraceableNative = NULL; tr->pendingTraceableNative = NULL;
// In the future, handle dslots realloc by computing an offset from dslots instead.
if (tr->global_dslots != tr->globalObj->dslots) {
js_AbortRecording(cx, "globalObj->dslots reallocated");
return JSMRS_STOP;
}
jsbytecode* pc = cx->fp->regs->pc; jsbytecode* pc = cx->fp->regs->pc;
/* If we hit a break, end the loop and generate an always taken loop exit guard. For other /* If we hit a break, end the loop and generate an always taken loop exit guard. For other
@ -4460,7 +4470,7 @@ js_FlushJITCache(JSContext* cx)
JS_FORCES_STACK JSStackFrame * JS_FORCES_STACK JSStackFrame *
js_GetTopStackFrame(JSContext *cx) js_GetTopStackFrame(JSContext *cx)
{ {
if (JS_EXECUTING_TRACE(cx)) { if (JS_ON_TRACE(cx)) {
/* /*
* TODO: If executing a tree, synthesize stack frames and bail off * TODO: If executing a tree, synthesize stack frames and bail off
* trace. See bug 462027. * trace. See bug 462027.
@ -5989,9 +5999,9 @@ TraceRecorder::record_JSOP_PICK()
jsval* sp = cx->fp->regs->sp; jsval* sp = cx->fp->regs->sp;
jsint n = cx->fp->regs->pc[1]; jsint n = cx->fp->regs->pc[1];
JS_ASSERT(sp - (n+1) >= StackBase(cx->fp)); JS_ASSERT(sp - (n+1) >= StackBase(cx->fp));
LIns* top = tracker.get(sp - (n+1)); LIns* top = get(sp - (n+1));
for (jsint i = 0; i < n; ++i) for (jsint i = 0; i < n; ++i)
set(sp - (n+1) + i, tracker.get(sp - n + i)); set(sp - (n+1) + i, get(sp - n + i));
set(&sp[-1], top); set(&sp[-1], top);
return true; return true;
} }
@ -7674,114 +7684,40 @@ TraceRecorder::record_JSOP_IMACOP()
return true; return true;
} }
static struct {
jsbytecode for_in[10];
jsbytecode for_each[10];
} iter_imacros = {
{
JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator),
JSOP_INT8, JSITER_ENUMERATE,
JSOP_CALL, 0, 1,
JSOP_PUSH,
JSOP_STOP
},
{
JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(iterator),
JSOP_INT8, JSITER_ENUMERATE | JSITER_FOREACH,
JSOP_CALL, 0, 1,
JSOP_PUSH,
JSOP_STOP
}
};
JS_STATIC_ASSERT(sizeof(iter_imacros) < IMACRO_PC_ADJ_LIMIT);
JS_REQUIRES_STACK bool JS_REQUIRES_STACK bool
TraceRecorder::record_JSOP_ITER() TraceRecorder::record_JSOP_ITER()
{ {
jsval& v = stackval(-1); jsval& v = stackval(-1);
if (!JSVAL_IS_PRIMITIVE(v)) { if (JSVAL_IS_PRIMITIVE(v))
ABORT_TRACE("for-in on a primitive value");
jsuint flags = cx->fp->regs->pc[1]; jsuint flags = cx->fp->regs->pc[1];
if (!hasIteratorMethod(JSVAL_TO_OBJECT(v))) { if (hasIteratorMethod(JSVAL_TO_OBJECT(v))) {
LIns* args[] = { get(&v), INS_CONST(flags), cx_ins };
LIns* v_ins = lir->insCall(&js_FastValueToIterator_ci, args);
guard(false, lir->ins_eq0(v_ins), MISMATCH_EXIT);
set(&v, v_ins);
LIns* void_ins = INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID));
stack(0, void_ins);
return true;
}
if (flags == JSITER_ENUMERATE) if (flags == JSITER_ENUMERATE)
return call_imacro(iter_imacros.for_in); return call_imacro(iter_imacros.for_in);
if (flags == (JSITER_ENUMERATE | JSITER_FOREACH)) if (flags == (JSITER_ENUMERATE | JSITER_FOREACH))
return call_imacro(iter_imacros.for_each); return call_imacro(iter_imacros.for_each);
ABORT_TRACE("unimplemented JSITER_* flags"); } else {
if (flags == JSITER_ENUMERATE)
return call_imacro(iter_imacros.for_in_native);
if (flags == (JSITER_ENUMERATE | JSITER_FOREACH))
return call_imacro(iter_imacros.for_each_native);
} }
ABORT_TRACE("unimplemented JSITER_* flags");
ABORT_TRACE("for-in on a primitive value");
} }
static JSTraceableNative js_FastCallIteratorNext_tn = {
NULL, // JSFastNative native;
&js_FastCallIteratorNext_ci, // const nanojit::CallInfo *builtin;
"C", // const char *prefix;
"o", // const char *argtypes;
FAIL_JSVAL // uintN flags;
};
static jsbytecode nextiter_imacro[] = {
JSOP_POP,
JSOP_DUP,
JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(next),
JSOP_CALL, 0, 0,
JSOP_TRUE,
JSOP_STOP
};
JS_STATIC_ASSERT(sizeof(nextiter_imacro) < IMACRO_PC_ADJ_LIMIT);
JS_REQUIRES_STACK bool JS_REQUIRES_STACK bool
TraceRecorder::record_JSOP_NEXTITER() TraceRecorder::record_JSOP_NEXTITER()
{ {
jsval& iterobj_val = stackval(-2); jsval& iterobj_val = stackval(-2);
if (!JSVAL_IS_PRIMITIVE(iterobj_val)) { if (JSVAL_IS_PRIMITIVE(iterobj_val))
LIns* iterobj_ins = get(&iterobj_val);
if (guardClass(JSVAL_TO_OBJECT(iterobj_val), iterobj_ins, &js_IteratorClass, BRANCH_EXIT)) {
LIns* args[] = { iterobj_ins, cx_ins };
LIns* v_ins = lir->insCall(&js_FastCallIteratorNext_ci, args);
guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)), OOM_EXIT);
LIns* flag_ins = lir->ins_eq0(lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_HOLE)));
stack(-1, v_ins);
stack(0, flag_ins);
pendingTraceableNative = &js_FastCallIteratorNext_tn;
return true;
}
// Custom iterator, possibly a generator.
return call_imacro(nextiter_imacro);
}
ABORT_TRACE("for-in on a primitive value"); ABORT_TRACE("for-in on a primitive value");
}
JS_REQUIRES_STACK bool LIns* iterobj_ins = get(&iterobj_val);
TraceRecorder::record_IteratorNextComplete() if (guardClass(JSVAL_TO_OBJECT(iterobj_val), iterobj_ins, &js_IteratorClass, BRANCH_EXIT))
{ return call_imacro(nextiter_imacros.native_iter_next);
JS_ASSERT(*cx->fp->regs->pc == JSOP_NEXTITER); return call_imacro(nextiter_imacros.custom_iter_next);
JS_ASSERT(pendingTraceableNative == &js_FastCallIteratorNext_tn);
jsval& v = stackval(-2);
LIns* v_ins = get(&v);
unbox_jsval(v, v_ins);
set(&v, v_ins);
return true;
} }
JS_REQUIRES_STACK bool JS_REQUIRES_STACK bool
@ -8810,6 +8746,97 @@ TraceRecorder::record_JSOP_CALLARG()
return true; return true;
} }
/* Functions for use with JSOP_CALLBUILTIN. */
static JSBool
ObjectToIterator(JSContext *cx, uintN argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
JS_ASSERT(JSVAL_IS_INT(argv[0]));
JS_SET_RVAL(cx, vp, JS_THIS(cx, vp));
return js_ValueToIterator(cx, JSVAL_TO_INT(argv[0]), &JS_RVAL(cx, vp));
}
static JSObject* FASTCALL
ObjectToIterator_tn(JSContext* cx, JSObject *obj, int32 flags)
{
jsval v = OBJECT_TO_JSVAL(obj);
if (!js_ValueToIterator(cx, flags, &v))
return NULL;
return JSVAL_TO_OBJECT(v);
}
static JSBool
CallIteratorNext(JSContext *cx, uintN argc, jsval *vp)
{
return js_CallIteratorNext(cx, JS_THIS_OBJECT(cx, vp), &JS_RVAL(cx, vp));
}
static jsval FASTCALL
CallIteratorNext_tn(JSContext* cx, JSObject* iterobj)
{
jsval v;
if (!js_CallIteratorNext(cx, iterobj, &v))
return JSVAL_ERROR_COOKIE;
return v;
}
JS_DEFINE_TRCINFO_1(ObjectToIterator,
(3, (static, OBJECT_FAIL_NULL, ObjectToIterator_tn, CONTEXT, THIS, INT32, 0, 0)))
JS_DEFINE_TRCINFO_1(CallIteratorNext,
(2, (static, JSVAL_FAIL, CallIteratorNext_tn, CONTEXT, THIS, 0, 0)))
static const struct BuiltinFunctionInfo {
JSTraceableNative *tn;
int nargs;
} builtinFunctionInfo[JSBUILTIN_LIMIT] = {
{ObjectToIterator_trcinfo, 1},
{CallIteratorNext_trcinfo, 0}
};
JSObject *
js_GetBuiltinFunction(JSContext *cx, uintN index)
{
JSRuntime *rt = cx->runtime;
JSObject *funobj = rt->builtinFunctions[index];
if (!funobj) {
/* Use NULL parent and atom. Builtin functions never escape to scripts. */
JSFunction *fun = js_NewFunction(cx,
NULL,
(JSNative) builtinFunctionInfo[index].tn,
builtinFunctionInfo[index].nargs,
JSFUN_FAST_NATIVE | JSFUN_TRACEABLE,
NULL,
NULL);
if (fun) {
funobj = FUN_OBJECT(fun);
STOBJ_CLEAR_PROTO(funobj);
STOBJ_CLEAR_PARENT(funobj);
JS_LOCK_GC(rt);
if (!rt->builtinFunctions[index]) /* retest now that the lock is held */
rt->builtinFunctions[index] = funobj;
else
funobj = rt->builtinFunctions[index];
JS_UNLOCK_GC(rt);
}
}
return funobj;
}
JS_REQUIRES_STACK bool
TraceRecorder::record_JSOP_CALLBUILTIN()
{
JSObject *obj = js_GetBuiltinFunction(cx, GET_INDEX(cx->fp->regs->pc));
if (!obj)
return false;
stack(0, get(&stackval(-1)));
stack(-1, INS_CONSTPTR(obj));
return true;
}
JS_REQUIRES_STACK bool JS_REQUIRES_STACK bool
TraceRecorder::record_JSOP_NULLTHIS() TraceRecorder::record_JSOP_NULLTHIS()
{ {
@ -8968,7 +8995,7 @@ static void
InitIMacroCode() InitIMacroCode()
{ {
if (imacro_code[JSOP_NEXTITER]) { if (imacro_code[JSOP_NEXTITER]) {
JS_ASSERT(imacro_code[JSOP_NEXTITER] == nextiter_imacro - 1); JS_ASSERT(imacro_code[JSOP_NEXTITER] == (jsbytecode*)&nextiter_imacros - 1);
return; return;
} }
@ -8979,7 +9006,7 @@ InitIMacroCode()
imacro_code[JSOP_ADD] = (jsbytecode*)&add_imacros - 1; imacro_code[JSOP_ADD] = (jsbytecode*)&add_imacros - 1;
imacro_code[JSOP_ITER] = (jsbytecode*)&iter_imacros - 1; imacro_code[JSOP_ITER] = (jsbytecode*)&iter_imacros - 1;
imacro_code[JSOP_NEXTITER] = nextiter_imacro - 1; imacro_code[JSOP_NEXTITER] = (jsbytecode*)&nextiter_imacros - 1;
imacro_code[JSOP_APPLY] = (jsbytecode*)&apply_imacros - 1; imacro_code[JSOP_APPLY] = (jsbytecode*)&apply_imacros - 1;
imacro_code[JSOP_NEG] = (jsbytecode*)&unary_imacros - 1; imacro_code[JSOP_NEG] = (jsbytecode*)&unary_imacros - 1;
@ -9005,4 +9032,3 @@ UNUSED(207)
UNUSED(208) UNUSED(208)
UNUSED(209) UNUSED(209)
UNUSED(219) UNUSED(219)
UNUSED(226)

View file

@ -373,9 +373,11 @@ class TraceRecorder : public avmplus::GCObject {
nanojit::LIns* addName(nanojit::LIns* ins, const char* name); nanojit::LIns* addName(nanojit::LIns* ins, const char* name);
JS_REQUIRES_STACK nanojit::LIns* get(jsval* p) const;
nanojit::LIns* writeBack(nanojit::LIns* i, nanojit::LIns* base, ptrdiff_t offset); nanojit::LIns* writeBack(nanojit::LIns* i, nanojit::LIns* base, ptrdiff_t offset);
JS_REQUIRES_STACK void set(jsval* p, nanojit::LIns* l, bool initializing = false); JS_REQUIRES_STACK void set(jsval* p, nanojit::LIns* l, bool initializing = false);
JS_REQUIRES_STACK nanojit::LIns* get(jsval* p);
JS_REQUIRES_STACK bool known(jsval* p);
JS_REQUIRES_STACK void checkForGlobalObjectReallocation();
JS_REQUIRES_STACK bool checkType(jsval& v, uint8 t, jsval*& stage_val, JS_REQUIRES_STACK bool checkType(jsval& v, uint8 t, jsval*& stage_val,
nanojit::LIns*& stage_ins, unsigned& stage_count); nanojit::LIns*& stage_ins, unsigned& stage_count);
@ -492,7 +494,7 @@ public:
static JS_REQUIRES_STACK JSMonitorRecordingStatus monitorRecording(JSContext* cx, TraceRecorder* tr, JSOp op); static JS_REQUIRES_STACK JSMonitorRecordingStatus monitorRecording(JSContext* cx, TraceRecorder* tr, JSOp op);
JS_REQUIRES_STACK uint8 determineSlotType(jsval* vp) const; JS_REQUIRES_STACK uint8 determineSlotType(jsval* vp);
JS_REQUIRES_STACK nanojit::LIns* snapshot(ExitType exitType); JS_REQUIRES_STACK nanojit::LIns* snapshot(ExitType exitType);
nanojit::Fragment* getFragment() const { return fragment; } nanojit::Fragment* getFragment() const { return fragment; }
JS_REQUIRES_STACK bool isLoopHeader(JSContext* cx) const; JS_REQUIRES_STACK bool isLoopHeader(JSContext* cx) const;
@ -518,7 +520,6 @@ public:
JS_REQUIRES_STACK bool record_SetPropMiss(JSPropCacheEntry* entry); JS_REQUIRES_STACK bool record_SetPropMiss(JSPropCacheEntry* entry);
JS_REQUIRES_STACK bool record_DefLocalFunSetSlot(uint32 slot, JSObject* obj); JS_REQUIRES_STACK bool record_DefLocalFunSetSlot(uint32 slot, JSObject* obj);
JS_REQUIRES_STACK bool record_FastNativeCallComplete(); JS_REQUIRES_STACK bool record_FastNativeCallComplete();
JS_REQUIRES_STACK bool record_IteratorNextComplete();
nanojit::Fragment* getOuterToBlacklist() { return outerToBlacklist; } nanojit::Fragment* getOuterToBlacklist() { return outerToBlacklist; }
void deepAbort() { deepAborted = true; } void deepAbort() { deepAborted = true; }
@ -575,6 +576,9 @@ js_FlushJITCache(JSContext* cx);
extern void extern void
js_FlushJITOracle(JSContext* cx); js_FlushJITOracle(JSContext* cx);
extern JSObject *
js_GetBuiltinFunction(JSContext *cx, uintN index);
#else /* !JS_TRACER */ #else /* !JS_TRACER */
#define TRACE_0(x) ((void)0) #define TRACE_0(x) ((void)0)

View file

@ -2545,6 +2545,42 @@ function testApply() {
testApply.expected = "5,5,5,5,5,5,5,5,5,5"; testApply.expected = "5,5,5,5,5,5,5,5,5,5";
test(testApply); test(testApply);
function testNestedForIn() {
var a = {x: 1, y: 2, z: 3};
var s = '';
for (var p1 in a)
for (var p2 in a)
s += p1 + p2 + ' ';
return s;
}
testNestedForIn.expected = 'xx xy xz yx yy yz zx zy zz ';
test(testNestedForIn);
function testForEach() {
var r;
var a = ["zero", "one", "two", "three"];
for (var i = 0; i < RUNLOOP; i++) {
r = "";
for each (var s in a)
r += s + " ";
}
return r;
}
testForEach.expected = "zero one two three ";
test(testForEach);
function testThinForEach() {
var a = ["red"];
var n = 0;
for (var i = 0; i < 10; i++)
for each (var v in a)
if (v)
n++;
return n;
}
testThinForEach.expected = 10;
test(testThinForEach);
function testComparisons() function testComparisons()
{ {
// All the special values from each of the types in // All the special values from each of the types in
@ -4061,6 +4097,36 @@ testBug458838.jitstats = {
}; };
test(testBug458838); test(testBug458838);
function testInterpreterReentry() {
this.__defineSetter__('x', function(){})
for (var j = 0; j < 5; ++j) { x = 3; }
return 1;
}
testInterpreterReentry.expected = 1;
test(testInterpreterReentry);
function testInterpreterReentry2() {
var a = false;
var b = {};
var c = false;
var d = {};
this.__defineGetter__('e', function(){});
for (let f in this) print(f);
[1 for each (g in this) for each (h in [])]
return 1;
}
testInterpreterReentry2.expected = 1;
test(testInterpreterReentry2);
function testInterpreterReentry3() {
for (let i=0;i<5;++i) this["y" + i] = function(){};
this.__defineGetter__('e', function (x2) { yield; });
[1 for each (a in this) for (b in {})];
return 1;
}
testInterpreterReentry3.expected = 1;
test(testInterpreterReentry3);
/***************************************************************************** /*****************************************************************************
* * * *
* _____ _ _ _____ ______ _____ _______ * * _____ _ _ _____ ______ _____ _______ *