30 #if defined(V8_TARGET_ARCH_X64)
45 #define __ ACCESS_MASM(masm_)
50 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
52 info_emitted_ =
false;
57 ASSERT(patch_site_.is_bound() == info_emitted_);
60 void EmitJumpIfNotSmi(Register reg,
62 Label::Distance near_jump = Label::kFar) {
67 void EmitJumpIfSmi(Register reg,
69 Label::Distance near_jump = Label::kFar) {
71 EmitJump(
carry, target, near_jump);
74 void EmitPatchInfo() {
75 if (patch_site_.is_bound()) {
76 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
78 __ testl(
rax, Immediate(delta_to_patch_site));
89 void EmitJump(
Condition cc, Label* target, Label::Distance near_jump) {
90 ASSERT(!patch_site_.is_bound() && !info_emitted_);
92 __ bind(&patch_site_);
93 __ j(cc, target, near_jump);
96 MacroAssembler* masm_;
117 void FullCodeGenerator::Generate() {
118 CompilationInfo* info = info_;
120 isolate()->factory()->NewFixedArray(
function()->handler_count(),
TENURED);
121 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
123 SetFunctionPosition(
function());
124 Comment cmnt(masm_,
"[ function compiled by full code generator");
127 if (strlen(FLAG_stop_at) > 0 &&
128 info->function()->name()->IsEqualTo(
CStrVector(FLAG_stop_at))) {
137 if (!info->is_classic_mode() || info->is_native()) {
140 __ j(
zero, &ok, Label::kNear);
142 int receiver_offset = (info->scope()->num_parameters() + 1) *
kPointerSize;
151 FrameScope frame_scope(masm_, StackFrame::MANUAL);
158 { Comment cmnt(masm_,
"[ Allocate locals");
159 int locals_count = info->scope()->num_stack_slots();
160 if (locals_count == 1) {
161 __ PushRoot(Heap::kUndefinedValueRootIndex);
162 }
else if (locals_count > 1) {
163 __ LoadRoot(
rdx, Heap::kUndefinedValueRootIndex);
164 for (
int i = 0; i < locals_count; i++) {
170 bool function_in_register =
true;
174 if (heap_slots > 0) {
175 Comment cmnt(masm_,
"[ Allocate local context");
179 FastNewContextStub stub(heap_slots);
182 __ CallRuntime(Runtime::kNewFunctionContext, 1);
184 function_in_register =
false;
190 int num_parameters = info->scope()->num_parameters();
191 for (
int i = 0; i < num_parameters; i++) {
193 if (var->IsContextSlot()) {
197 __ movq(
rax, Operand(
rbp, parameter_offset));
200 __ movq(Operand(
rsi, context_offset),
rax);
202 __ RecordWriteContextSlot(
209 Variable* arguments = scope()->
arguments();
210 if (arguments !=
NULL) {
213 Comment cmnt(masm_,
"[ Allocate arguments object");
214 if (function_in_register) {
220 int num_parameters = info->scope()->num_parameters();
231 if (!is_classic_mode()) {
233 }
else if (
function()->has_duplicate_parameters()) {
238 ArgumentsAccessStub stub(type);
245 __ CallRuntime(Runtime::kTraceEnter, 0);
250 if (scope()->HasIllegalRedeclaration()) {
251 Comment cmnt(masm_,
"[ Declarations");
256 { Comment cmnt(masm_,
"[ Declarations");
259 if (scope()->is_function_scope() && scope()->
function() !=
NULL) {
260 VariableDeclaration*
function = scope()->
function();
261 ASSERT(function->proxy()->var()->mode() ==
CONST ||
264 VisitVariableDeclaration(
function);
266 VisitDeclarations(scope()->declarations());
269 { Comment cmnt(masm_,
"[ Stack check");
272 __ CompareRoot(
rsp, Heap::kStackLimitRootIndex);
279 { Comment cmnt(masm_,
"[ Body");
280 ASSERT(loop_depth() == 0);
281 VisitStatements(
function()->body());
282 ASSERT(loop_depth() == 0);
288 { Comment cmnt(masm_,
"[ return <undefined>;");
289 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
290 EmitReturnSequence();
295 void FullCodeGenerator::ClearAccumulator() {
300 void FullCodeGenerator::EmitProfilingCounterDecrement(
int delta) {
301 __ movq(
rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
307 void FullCodeGenerator::EmitProfilingCounterReset() {
308 int reset_value = FLAG_interrupt_budget;
309 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
313 if (isolate()->IsDebuggerActive()) {
317 __ movq(
rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
326 static const int kMaxBackEdgeWeight = 127;
327 static const int kBackEdgeDistanceDivisor = 162;
330 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
331 Label* back_edge_target) {
332 Comment cmnt(masm_,
"[ Stack check");
335 if (FLAG_count_based_interrupts) {
337 if (FLAG_weighted_back_edges) {
338 ASSERT(back_edge_target->is_bound());
340 weight =
Min(kMaxBackEdgeWeight,
341 Max(1, distance / kBackEdgeDistanceDivisor));
343 EmitProfilingCounterDecrement(weight);
348 __ CompareRoot(
rsp, Heap::kStackLimitRootIndex);
357 RecordStackCheck(stmt->OsrEntryId());
366 if (FLAG_count_based_interrupts) {
367 EmitProfilingCounterReset();
375 PrepareForBailoutForId(stmt->OsrEntryId(),
NO_REGISTERS);
379 void FullCodeGenerator::EmitReturnSequence() {
380 Comment cmnt(masm_,
"[ Return sequence");
381 if (return_label_.is_bound()) {
382 __ jmp(&return_label_);
384 __ bind(&return_label_);
387 __ CallRuntime(Runtime::kTraceExit, 1);
389 if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
392 if (info_->ShouldSelfOptimize()) {
393 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
394 }
else if (FLAG_weighted_back_edges) {
396 weight =
Min(kMaxBackEdgeWeight,
397 Max(1, distance = kBackEdgeDistanceDivisor));
399 EmitProfilingCounterDecrement(weight);
403 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
405 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
411 EmitProfilingCounterReset();
416 Label check_exit_codesize;
417 masm_->
bind(&check_exit_codesize);
426 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
427 __ Ret(arguments_bytes,
rcx);
429 #ifdef ENABLE_DEBUGGER_SUPPORT
434 for (
int i = 0; i < kPadding; ++i) {
446 void FullCodeGenerator::EffectContext::Plug(Variable* var)
const {
447 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
451 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var)
const {
452 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
453 codegen()->GetVar(result_register(), var);
457 void FullCodeGenerator::StackValueContext::Plug(Variable* var)
const {
458 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
459 MemOperand operand = codegen()->VarOperand(var, result_register());
464 void FullCodeGenerator::TestContext::Plug(Variable* var)
const {
465 codegen()->GetVar(result_register(), var);
466 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
467 codegen()->DoTest(
this);
475 void FullCodeGenerator::AccumulatorValueContext::Plug(
477 __ LoadRoot(result_register(), index);
481 void FullCodeGenerator::StackValueContext::Plug(
488 codegen()->PrepareForBailoutBeforeSplit(condition(),
492 if (index == Heap::kUndefinedValueRootIndex ||
493 index == Heap::kNullValueRootIndex ||
494 index == Heap::kFalseValueRootIndex) {
495 if (false_label_ != fall_through_)
__ jmp(false_label_);
496 }
else if (index == Heap::kTrueValueRootIndex) {
497 if (true_label_ != fall_through_)
__ jmp(true_label_);
499 __ LoadRoot(result_register(), index);
500 codegen()->DoTest(
this);
505 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit)
const {
509 void FullCodeGenerator::AccumulatorValueContext::Plug(
510 Handle<Object> lit)
const {
511 __ Move(result_register(), lit);
515 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit)
const {
520 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit)
const {
521 codegen()->PrepareForBailoutBeforeSplit(condition(),
525 ASSERT(!lit->IsUndetectableObject());
526 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
527 if (false_label_ != fall_through_)
__ jmp(false_label_);
528 }
else if (lit->IsTrue() || lit->IsJSObject()) {
529 if (true_label_ != fall_through_)
__ jmp(true_label_);
530 }
else if (lit->IsString()) {
532 if (false_label_ != fall_through_)
__ jmp(false_label_);
534 if (true_label_ != fall_through_)
__ jmp(true_label_);
536 }
else if (lit->IsSmi()) {
538 if (false_label_ != fall_through_)
__ jmp(false_label_);
540 if (true_label_ != fall_through_)
__ jmp(true_label_);
544 __ Move(result_register(), lit);
545 codegen()->DoTest(
this);
550 void FullCodeGenerator::EffectContext::DropAndPlug(
int count,
551 Register reg)
const {
557 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
559 Register reg)
const {
562 __ Move(result_register(), reg);
566 void FullCodeGenerator::StackValueContext::DropAndPlug(
int count,
567 Register reg)
const {
569 if (count > 1)
__ Drop(count - 1);
570 __ movq(Operand(
rsp, 0), reg);
574 void FullCodeGenerator::TestContext::DropAndPlug(
int count,
575 Register reg)
const {
579 __ Move(result_register(), reg);
580 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
581 codegen()->DoTest(
this);
585 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
586 Label* materialize_false)
const {
587 ASSERT(materialize_true == materialize_false);
588 __ bind(materialize_true);
592 void FullCodeGenerator::AccumulatorValueContext::Plug(
593 Label* materialize_true,
594 Label* materialize_false)
const {
596 __ bind(materialize_true);
597 __ Move(result_register(), isolate()->factory()->true_value());
598 __ jmp(&done, Label::kNear);
599 __ bind(materialize_false);
600 __ Move(result_register(), isolate()->factory()->false_value());
605 void FullCodeGenerator::StackValueContext::Plug(
606 Label* materialize_true,
607 Label* materialize_false)
const {
609 __ bind(materialize_true);
610 __ Push(isolate()->factory()->true_value());
611 __ jmp(&done, Label::kNear);
612 __ bind(materialize_false);
613 __ Push(isolate()->factory()->false_value());
618 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
619 Label* materialize_false)
const {
620 ASSERT(materialize_true == true_label_);
621 ASSERT(materialize_false == false_label_);
625 void FullCodeGenerator::EffectContext::Plug(
bool flag)
const {
629 void FullCodeGenerator::AccumulatorValueContext::Plug(
bool flag)
const {
631 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
632 __ LoadRoot(result_register(), value_root_index);
636 void FullCodeGenerator::StackValueContext::Plug(
bool flag)
const {
638 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
639 __ PushRoot(value_root_index);
643 void FullCodeGenerator::TestContext::Plug(
bool flag)
const {
644 codegen()->PrepareForBailoutBeforeSplit(condition(),
649 if (true_label_ != fall_through_)
__ jmp(true_label_);
651 if (false_label_ != fall_through_)
__ jmp(false_label_);
656 void FullCodeGenerator::DoTest(Expression* condition,
659 Label* fall_through) {
660 ToBooleanStub stub(result_register());
661 __ push(result_register());
662 __ CallStub(&stub, condition->test_id());
663 __ testq(result_register(), result_register());
665 Split(
not_zero, if_true, if_false, fall_through);
672 Label* fall_through) {
673 if (if_false == fall_through) {
675 }
else if (if_true == fall_through) {
684 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
685 ASSERT(var->IsStackAllocated());
689 if (var->IsParameter()) {
690 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
694 return Operand(
rbp, offset);
698 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
699 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
700 if (var->IsContextSlot()) {
702 __ LoadContext(scratch, context_chain_length);
705 return StackOperand(var);
710 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
711 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
713 __ movq(dest, location);
717 void FullCodeGenerator::SetVar(Variable* var,
721 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
722 ASSERT(!scratch0.is(src));
723 ASSERT(!scratch0.is(scratch1));
724 ASSERT(!scratch1.is(src));
725 MemOperand location = VarOperand(var, scratch0);
726 __ movq(location, src);
729 if (var->IsContextSlot()) {
736 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
737 bool should_normalize,
743 if (!context()->IsTest() || !info_->IsOptimizable())
return;
746 if (should_normalize)
__ jmp(&skip, Label::kNear);
747 PrepareForBailout(expr,
TOS_REG);
748 if (should_normalize) {
749 __ CompareRoot(
rax, Heap::kTrueValueRootIndex);
756 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
759 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
760 if (FLAG_debug_code) {
763 __ CompareRoot(
rbx, Heap::kWithContextMapRootIndex);
764 __ Check(
not_equal,
"Declaration in with context.");
765 __ CompareRoot(
rbx, Heap::kCatchContextMapRootIndex);
766 __ Check(
not_equal,
"Declaration in catch context.");
771 void FullCodeGenerator::VisitVariableDeclaration(
772 VariableDeclaration* declaration) {
776 VariableProxy* proxy = declaration->proxy();
778 Variable* variable = proxy->var();
780 switch (variable->location()) {
782 globals_->
Add(variable->name(),
zone());
783 globals_->
Add(variable->binding_needs_init()
784 ? isolate()->factory()->the_hole_value()
785 : isolate()->factory()->undefined_value(),
792 Comment cmnt(masm_,
"[ VariableDeclaration");
800 Comment cmnt(masm_,
"[ VariableDeclaration");
801 EmitDebugCheckDeclarationContext(variable);
810 Comment cmnt(masm_,
"[ VariableDeclaration");
812 __ Push(variable->name());
824 __ PushRoot(Heap::kTheHoleValueRootIndex);
828 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
835 void FullCodeGenerator::VisitFunctionDeclaration(
836 FunctionDeclaration* declaration) {
837 VariableProxy* proxy = declaration->proxy();
838 Variable* variable = proxy->var();
839 switch (variable->location()) {
841 globals_->
Add(variable->name(),
zone());
842 Handle<SharedFunctionInfo>
function =
845 if (
function.is_null())
return SetStackOverflow();
846 globals_->
Add(
function,
zone());
852 Comment cmnt(masm_,
"[ FunctionDeclaration");
853 VisitForAccumulatorValue(declaration->fun());
854 __ movq(StackOperand(variable), result_register());
859 Comment cmnt(masm_,
"[ FunctionDeclaration");
860 EmitDebugCheckDeclarationContext(variable);
861 VisitForAccumulatorValue(declaration->fun());
865 __ RecordWriteContextSlot(
rsi,
877 Comment cmnt(masm_,
"[ FunctionDeclaration");
879 __ Push(variable->name());
881 VisitForStackValue(declaration->fun());
882 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
889 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
890 VariableProxy* proxy = declaration->proxy();
891 Variable* variable = proxy->var();
892 Handle<JSModule> instance = declaration->module()->interface()->Instance();
893 ASSERT(!instance.is_null());
895 switch (variable->location()) {
897 Comment cmnt(masm_,
"[ ModuleDeclaration");
898 globals_->
Add(variable->name(),
zone());
899 globals_->
Add(instance,
zone());
900 Visit(declaration->module());
905 Comment cmnt(masm_,
"[ ModuleDeclaration");
906 EmitDebugCheckDeclarationContext(variable);
908 Visit(declaration->module());
920 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
921 VariableProxy* proxy = declaration->proxy();
922 Variable* variable = proxy->var();
923 switch (variable->location()) {
929 Comment cmnt(masm_,
"[ ImportDeclaration");
930 EmitDebugCheckDeclarationContext(variable);
943 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
948 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
953 __ CallRuntime(Runtime::kDeclareGlobals, 3);
958 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
959 Comment cmnt(masm_,
"[ SwitchStatement");
960 Breakable nested_statement(
this, stmt);
961 SetStatementPosition(stmt);
964 VisitForStackValue(stmt->tag());
967 ZoneList<CaseClause*>* clauses = stmt->cases();
968 CaseClause* default_clause =
NULL;
972 for (
int i = 0; i < clauses->length(); i++) {
973 CaseClause* clause = clauses->at(i);
974 clause->body_target()->Unuse();
977 if (clause->is_default()) {
978 default_clause = clause;
982 Comment cmnt(masm_,
"[ Case comparison");
987 VisitForAccumulatorValue(clause->label());
991 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
992 JumpPatchSite patch_site(masm_);
993 if (inline_smi_code) {
997 patch_site.EmitJumpIfNotSmi(
rcx, &slow_case, Label::kNear);
1002 __ jmp(clause->body_target());
1003 __ bind(&slow_case);
1007 SetSourcePosition(clause->position());
1009 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1010 patch_site.EmitPatchInfo();
1015 __ jmp(clause->body_target());
1020 __ bind(&next_test);
1022 if (default_clause ==
NULL) {
1023 __ jmp(nested_statement.break_label());
1025 __ jmp(default_clause->body_target());
1029 for (
int i = 0; i < clauses->length(); i++) {
1030 Comment cmnt(masm_,
"[ Case body");
1031 CaseClause* clause = clauses->at(i);
1032 __ bind(clause->body_target());
1033 PrepareForBailoutForId(clause->EntryId(),
NO_REGISTERS);
1034 VisitStatements(clause->statements());
1037 __ bind(nested_statement.break_label());
1042 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1043 Comment cmnt(masm_,
"[ ForInStatement");
1044 SetStatementPosition(stmt);
1047 ForIn loop_statement(
this, stmt);
1048 increment_loop_depth();
1053 VisitForAccumulatorValue(stmt->enumerable());
1054 __ CompareRoot(
rax, Heap::kUndefinedValueRootIndex);
1056 Register null_value =
rdi;
1057 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1058 __ cmpq(
rax, null_value);
1061 PrepareForBailoutForId(stmt->PrepareId(),
TOS_REG);
1064 Label convert, done_convert;
1065 __ JumpIfSmi(
rax, &convert);
1071 __ bind(&done_convert);
1084 __ CheckEnumCache(null_value, &call_runtime);
1090 __ jmp(&use_cache, Label::kNear);
1093 __ bind(&call_runtime);
1095 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1102 Heap::kMetaMapRootIndex);
1106 __ bind(&use_cache);
1107 __ LoadInstanceDescriptors(
rax,
rcx);
1121 __ bind(&fixed_array);
1123 Handle<JSGlobalPropertyCell> cell =
1124 isolate()->factory()->NewJSGlobalPropertyCell(
1127 RecordTypeFeedbackCell(stmt->PrepareId(), cell);
1128 __ LoadHeapObject(
rbx, cell);
1133 __ movq(
rcx, Operand(
rsp, 0 * kPointerSize));
1138 __ bind(&non_proxy);
1148 __ movq(
rax, Operand(
rsp, 0 * kPointerSize));
1149 __ cmpq(
rax, Operand(
rsp, 1 * kPointerSize));
1153 __ movq(
rbx, Operand(
rsp, 2 * kPointerSize));
1162 __ movq(
rdx, Operand(
rsp, 3 * kPointerSize));
1167 __ movq(
rcx, Operand(
rsp, 4 * kPointerSize));
1169 __ j(
equal, &update_each, Label::kNear);
1174 __ j(
equal, &update_each, Label::kNear);
1183 __ j(
equal, loop_statement.continue_label());
1188 __ bind(&update_each);
1189 __ movq(result_register(),
rbx);
1191 { EffectContext context(
this);
1192 EmitAssignment(stmt->each());
1196 Visit(stmt->body());
1200 __ bind(loop_statement.continue_label());
1203 EmitStackCheck(stmt, &loop);
1207 __ bind(loop_statement.break_label());
1208 __ addq(
rsp, Immediate(5 * kPointerSize));
1213 decrement_loop_depth();
1217 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1225 if (!FLAG_always_opt &&
1226 !FLAG_prepare_always_opt &&
1228 scope()->is_function_scope() &&
1229 info->num_literals() == 0) {
1230 FastNewClosureStub stub(info->language_mode());
1237 ? isolate()->factory()->true_value()
1238 : isolate()->factory()->false_value());
1239 __ CallRuntime(Runtime::kNewClosure, 3);
1241 context()->Plug(
rax);
1245 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1246 Comment cmnt(masm_,
"[ VariableProxy");
1247 EmitVariableLoad(expr);
1251 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1254 Register context =
rsi;
1255 Register temp =
rdx;
1259 if (s->num_heap_slots() > 0) {
1260 if (s->calls_non_strict_eval()) {
1274 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope())
break;
1275 s = s->outer_scope();
1278 if (s !=
NULL && s->is_eval_scope()) {
1282 if (!context.is(temp)) {
1283 __ movq(temp, context);
1290 __ j(
equal, &fast, Label::kNear);
1303 __ Move(
rcx, var->name());
1304 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1306 ? RelocInfo::CODE_TARGET
1307 : RelocInfo::CODE_TARGET_CONTEXT;
1312 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1314 ASSERT(var->IsContextSlot());
1315 Register context =
rsi;
1316 Register temp =
rbx;
1318 for (Scope* s = scope(); s != var->scope(); s = s->
outer_scope()) {
1319 if (s->num_heap_slots() > 0) {
1320 if (s->calls_non_strict_eval()) {
1342 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1352 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1355 Variable* local = var->local_if_not_shadowed();
1356 __ movq(
rax, ContextSlotOperandCheckExtensions(local, slow));
1357 if (local->mode() ==
CONST ||
1359 local->mode() ==
LET) {
1360 __ CompareRoot(
rax, Heap::kTheHoleValueRootIndex);
1362 if (local->mode() ==
CONST) {
1363 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
1365 __ Push(var->name());
1366 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1374 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1376 SetSourcePosition(proxy->position());
1377 Variable* var = proxy->var();
1381 switch (var->location()) {
1383 Comment cmnt(masm_,
"Global variable");
1386 __ Move(
rcx, var->name());
1388 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1389 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1390 context()->Plug(
rax);
1397 Comment cmnt(masm_, var->IsContextSlot() ?
"Context slot" :
"Stack slot");
1398 if (var->binding_needs_init()) {
1422 bool skip_init_check;
1424 skip_init_check =
false;
1427 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1428 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1429 skip_init_check = var->mode() !=
CONST &&
1430 var->initializer_position() < proxy->position();
1433 if (!skip_init_check) {
1437 __ CompareRoot(
rax, Heap::kTheHoleValueRootIndex);
1442 __ Push(var->name());
1443 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1447 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
1450 context()->Plug(
rax);
1454 context()->Plug(var);
1464 Comment cmnt(masm_,
"Lookup slot");
1466 __ Push(var->name());
1467 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1469 context()->Plug(
rax);
1476 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1477 Comment cmnt(masm_,
"[ RegExpLiteral");
1486 int literal_offset =
1489 __ CompareRoot(
rbx, Heap::kUndefinedValueRootIndex);
1496 __ Push(expr->pattern());
1497 __ Push(expr->flags());
1498 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1501 __ bind(&materialized);
1503 Label allocated, runtime_allocate;
1507 __ bind(&runtime_allocate);
1510 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1513 __ bind(&allocated);
1522 if ((size % (2 * kPointerSize)) != 0) {
1526 context()->Plug(
rax);
1530 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1531 if (expression ==
NULL) {
1532 __ PushRoot(Heap::kNullValueRootIndex);
1534 VisitForStackValue(expression);
1539 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1540 Comment cmnt(masm_,
"[ ObjectLiteral");
1541 Handle<FixedArray> constant_properties = expr->constant_properties();
1545 __ Push(constant_properties);
1546 int flags = expr->fast_elements()
1549 flags |= expr->has_function()
1553 int properties_count = constant_properties->length() / 2;
1554 if (expr->depth() > 1) {
1555 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1558 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1560 FastCloneShallowObjectStub stub(properties_count);
1566 bool result_saved =
false;
1571 expr->CalculateEmitStore(
zone());
1573 AccessorTable accessor_table(isolate()->
zone());
1574 for (
int i = 0; i < expr->properties()->length(); i++) {
1575 ObjectLiteral::Property*
property = expr->properties()->at(i);
1576 if (property->IsCompileTimeValue())
continue;
1578 Literal* key =
property->key();
1579 Expression* value =
property->value();
1580 if (!result_saved) {
1582 result_saved =
true;
1584 switch (property->kind()) {
1591 if (key->handle()->IsSymbol()) {
1592 if (property->emit_store()) {
1593 VisitForAccumulatorValue(value);
1594 __ Move(
rcx, key->handle());
1596 Handle<Code> ic = is_classic_mode()
1597 ? isolate()->builtins()->StoreIC_Initialize()
1598 : isolate()->builtins()->StoreIC_Initialize_Strict();
1599 CallIC(ic, RelocInfo::CODE_TARGET, key->id());
1602 VisitForEffect(value);
1608 __ push(Operand(
rsp, 0));
1609 VisitForStackValue(key);
1610 VisitForStackValue(value);
1611 if (property->emit_store()) {
1613 __ CallRuntime(Runtime::kSetProperty, 4);
1619 accessor_table.lookup(key)->second->getter = value;
1622 accessor_table.lookup(key)->second->setter = value;
1629 for (AccessorTable::Iterator it = accessor_table.begin();
1630 it != accessor_table.end();
1632 __ push(Operand(
rsp, 0));
1633 VisitForStackValue(it->first);
1634 EmitAccessor(it->second->getter);
1635 EmitAccessor(it->second->setter);
1637 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1640 if (expr->has_function()) {
1642 __ push(Operand(
rsp, 0));
1643 __ CallRuntime(Runtime::kToFastProperties, 1);
1647 context()->PlugTOS();
1649 context()->Plug(
rax);
1654 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1655 Comment cmnt(masm_,
"[ ArrayLiteral");
1657 ZoneList<Expression*>* subexprs = expr->values();
1658 int length = subexprs->length();
1659 Handle<FixedArray> constant_elements = expr->constant_elements();
1660 ASSERT_EQ(2, constant_elements->length());
1663 bool has_constant_fast_elements =
1665 Handle<FixedArrayBase> constant_elements_values(
1671 __ Push(constant_elements);
1672 Heap* heap = isolate()->heap();
1673 if (has_constant_fast_elements &&
1674 constant_elements_values->map() == heap->fixed_cow_array_map()) {
1677 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1678 FastCloneShallowArrayStub stub(
1682 }
else if (expr->depth() > 1) {
1683 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1685 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1688 FLAG_smi_only_arrays);
1694 FastCloneShallowArrayStub stub(mode, length);
1698 bool result_saved =
false;
1702 for (
int i = 0; i < length; i++) {
1703 Expression* subexpr = subexprs->at(i);
1706 if (subexpr->AsLiteral() !=
NULL ||
1711 if (!result_saved) {
1713 result_saved =
true;
1715 VisitForAccumulatorValue(subexpr);
1726 __ RecordWriteField(
rbx, offset, result_register(),
rcx,
1736 StoreArrayLiteralElementStub stub;
1740 PrepareForBailoutForId(expr->GetIdForElement(i),
NO_REGISTERS);
1744 context()->PlugTOS();
1746 context()->Plug(
rax);
1751 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1752 Comment cmnt(masm_,
"[ Assignment");
1755 if (!expr->target()->IsValidLeftHandSide()) {
1756 VisitForEffect(expr->target());
1762 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1763 LhsKind assign_type = VARIABLE;
1764 Property*
property = expr->target()->AsProperty();
1765 if (property !=
NULL) {
1766 assign_type = (
property->key()->IsPropertyName())
1772 switch (assign_type) {
1776 case NAMED_PROPERTY:
1777 if (expr->is_compound()) {
1779 VisitForAccumulatorValue(property->obj());
1780 __ push(result_register());
1782 VisitForStackValue(property->obj());
1785 case KEYED_PROPERTY: {
1786 if (expr->is_compound()) {
1787 VisitForStackValue(property->obj());
1788 VisitForAccumulatorValue(property->key());
1792 VisitForStackValue(property->obj());
1793 VisitForStackValue(property->key());
1801 if (expr->is_compound()) {
1802 { AccumulatorValueContext context(
this);
1803 switch (assign_type) {
1805 EmitVariableLoad(expr->target()->AsVariableProxy());
1806 PrepareForBailout(expr->target(),
TOS_REG);
1808 case NAMED_PROPERTY:
1809 EmitNamedPropertyLoad(property);
1810 PrepareForBailoutForId(expr->CompoundLoadId(),
TOS_REG);
1812 case KEYED_PROPERTY:
1813 EmitKeyedPropertyLoad(property);
1814 PrepareForBailoutForId(expr->CompoundLoadId(),
TOS_REG);
1821 VisitForAccumulatorValue(expr->value());
1823 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1826 SetSourcePosition(expr->position() + 1);
1827 AccumulatorValueContext context(
this);
1828 if (ShouldInlineSmiCase(op)) {
1829 EmitInlineSmiBinaryOp(expr->binary_operation(),
1835 EmitBinaryOp(expr->binary_operation(), op, mode);
1838 PrepareForBailout(expr->binary_operation(),
TOS_REG);
1840 VisitForAccumulatorValue(expr->value());
1844 SetSourcePosition(expr->position());
1847 switch (assign_type) {
1849 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1851 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
1852 context()->Plug(
rax);
1854 case NAMED_PROPERTY:
1855 EmitNamedPropertyAssignment(expr);
1857 case KEYED_PROPERTY:
1858 EmitKeyedPropertyAssignment(expr);
1864 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1865 SetSourcePosition(prop->position());
1866 Literal* key = prop->key()->AsLiteral();
1867 __ Move(
rcx, key->handle());
1868 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1869 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1873 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1874 SetSourcePosition(prop->position());
1875 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1876 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1880 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1884 Expression* right) {
1888 Label done, stub_call, smi_case;
1892 JumpPatchSite patch_site(masm_);
1893 patch_site.EmitJumpIfSmi(
rax, &smi_case, Label::kNear);
1895 __ bind(&stub_call);
1897 BinaryOpStub stub(op, mode);
1898 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1899 patch_site.EmitPatchInfo();
1900 __ jmp(&done, Label::kNear);
1911 __ SmiShiftLogicalRight(
rax,
rdx,
rcx, &stub_call);
1925 case Token::BIT_AND:
1928 case Token::BIT_XOR:
1937 context()->Plug(
rax);
1941 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1945 BinaryOpStub stub(op, mode);
1946 JumpPatchSite patch_site(masm_);
1947 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1948 patch_site.EmitPatchInfo();
1949 context()->Plug(
rax);
1953 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1956 if (!expr->IsValidLeftHandSide()) {
1957 VisitForEffect(expr);
1963 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1964 LhsKind assign_type = VARIABLE;
1965 Property* prop = expr->AsProperty();
1967 assign_type = (prop->key()->IsPropertyName())
1972 switch (assign_type) {
1974 Variable* var = expr->AsVariableProxy()->var();
1975 EffectContext context(
this);
1976 EmitVariableAssignment(var, Token::ASSIGN);
1979 case NAMED_PROPERTY: {
1981 VisitForAccumulatorValue(prop->obj());
1984 __ Move(
rcx, prop->key()->AsLiteral()->handle());
1985 Handle<Code> ic = is_classic_mode()
1986 ? isolate()->builtins()->StoreIC_Initialize()
1987 : isolate()->builtins()->StoreIC_Initialize_Strict();
1991 case KEYED_PROPERTY: {
1993 VisitForStackValue(prop->obj());
1994 VisitForAccumulatorValue(prop->key());
1998 Handle<Code> ic = is_classic_mode()
1999 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2000 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2005 context()->Plug(
rax);
2009 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2011 if (var->IsUnallocated()) {
2013 __ Move(
rcx, var->name());
2015 Handle<Code> ic = is_classic_mode()
2016 ? isolate()->builtins()->StoreIC_Initialize()
2017 : isolate()->builtins()->StoreIC_Initialize_Strict();
2018 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2019 }
else if (op == Token::INIT_CONST) {
2021 ASSERT(!var->IsParameter());
2022 if (var->IsStackLocal()) {
2024 __ movq(
rdx, StackOperand(var));
2025 __ CompareRoot(
rdx, Heap::kTheHoleValueRootIndex);
2027 __ movq(StackOperand(var),
rax);
2030 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2038 __ Push(var->name());
2039 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2042 }
else if (var->mode() ==
LET && op != Token::INIT_LET) {
2044 if (var->IsLookupSlot()) {
2047 __ Push(var->name());
2049 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2051 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2054 __ movq(
rdx, location);
2055 __ CompareRoot(
rdx, Heap::kTheHoleValueRootIndex);
2057 __ Push(var->name());
2058 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2060 __ movq(location,
rax);
2061 if (var->IsContextSlot()) {
2063 __ RecordWriteContextSlot(
2068 }
else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2071 if (var->IsStackAllocated() || var->IsContextSlot()) {
2073 if (FLAG_debug_code && op == Token::INIT_LET) {
2075 __ movq(
rdx, location);
2076 __ CompareRoot(
rdx, Heap::kTheHoleValueRootIndex);
2077 __ Check(
equal,
"Let binding re-initialization.");
2080 __ movq(location,
rax);
2081 if (var->IsContextSlot()) {
2083 __ RecordWriteContextSlot(
2087 ASSERT(var->IsLookupSlot());
2090 __ Push(var->name());
2092 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2099 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2101 Property* prop = expr->target()->AsProperty();
2108 if (expr->starts_initialization_block()) {
2109 __ push(result_register());
2110 __ push(Operand(
rsp, kPointerSize));
2111 __ CallRuntime(Runtime::kToSlowProperties, 1);
2112 __ pop(result_register());
2116 SetSourcePosition(expr->position());
2117 __ Move(
rcx, prop->key()->AsLiteral()->handle());
2118 if (expr->ends_initialization_block()) {
2123 Handle<Code> ic = is_classic_mode()
2124 ? isolate()->builtins()->StoreIC_Initialize()
2125 : isolate()->builtins()->StoreIC_Initialize_Strict();
2126 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2129 if (expr->ends_initialization_block()) {
2131 __ push(Operand(
rsp, kPointerSize));
2132 __ CallRuntime(Runtime::kToFastProperties, 1);
2136 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2137 context()->Plug(
rax);
2141 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2147 if (expr->starts_initialization_block()) {
2148 __ push(result_register());
2150 __ push(Operand(
rsp, 2 * kPointerSize));
2151 __ CallRuntime(Runtime::kToSlowProperties, 1);
2152 __ pop(result_register());
2156 if (expr->ends_initialization_block()) {
2162 SetSourcePosition(expr->position());
2163 Handle<Code> ic = is_classic_mode()
2164 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2165 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2166 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2169 if (expr->ends_initialization_block()) {
2173 __ CallRuntime(Runtime::kToFastProperties, 1);
2177 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2178 context()->Plug(
rax);
2182 void FullCodeGenerator::VisitProperty(Property* expr) {
2183 Comment cmnt(masm_,
"[ Property");
2184 Expression* key = expr->key();
2186 if (key->IsPropertyName()) {
2187 VisitForAccumulatorValue(expr->obj());
2188 EmitNamedPropertyLoad(expr);
2189 context()->Plug(
rax);
2191 VisitForStackValue(expr->obj());
2192 VisitForAccumulatorValue(expr->key());
2194 EmitKeyedPropertyLoad(expr);
2195 context()->Plug(
rax);
2200 void FullCodeGenerator::CallIC(Handle<Code> code,
2201 RelocInfo::Mode rmode,
2204 __ call(code, rmode, ast_id);
2208 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2209 Handle<Object>
name,
2210 RelocInfo::Mode mode) {
2212 ZoneList<Expression*>* args = expr->arguments();
2213 int arg_count = args->length();
2214 { PreservePositionScope scope(masm()->positions_recorder());
2215 for (
int i = 0; i < arg_count; i++) {
2216 VisitForStackValue(args->at(i));
2221 SetSourcePosition(expr->position());
2224 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2225 CallIC(ic, mode, expr->id());
2226 RecordJSReturnSite(expr);
2229 context()->Plug(
rax);
2233 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2236 VisitForAccumulatorValue(key);
2245 ZoneList<Expression*>* args = expr->arguments();
2246 int arg_count = args->length();
2247 { PreservePositionScope scope(masm()->positions_recorder());
2248 for (
int i = 0; i < arg_count; i++) {
2249 VisitForStackValue(args->at(i));
2253 SetSourcePosition(expr->position());
2256 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2257 __ movq(
rcx, Operand(
rsp, (arg_count + 1) * kPointerSize));
2258 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2259 RecordJSReturnSite(expr);
2262 context()->DropAndPlug(1,
rax);
2268 ZoneList<Expression*>* args = expr->arguments();
2269 int arg_count = args->length();
2270 { PreservePositionScope scope(masm()->positions_recorder());
2271 for (
int i = 0; i < arg_count; i++) {
2272 VisitForStackValue(args->at(i));
2276 SetSourcePosition(expr->position());
2281 Handle<Object> uninitialized =
2283 Handle<JSGlobalPropertyCell> cell =
2284 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2285 RecordTypeFeedbackCell(expr->id(), cell);
2289 CallFunctionStub stub(arg_count, flags);
2290 __ movq(
rdi, Operand(
rsp, (arg_count + 1) * kPointerSize));
2291 __ CallStub(&stub, expr->id());
2292 RecordJSReturnSite(expr);
2296 context()->DropAndPlug(1,
rax);
2300 void FullCodeGenerator::EmitResolvePossiblyDirectEval(
int arg_count) {
2302 if (arg_count > 0) {
2303 __ push(Operand(
rsp, arg_count * kPointerSize));
2305 __ PushRoot(Heap::kUndefinedValueRootIndex);
2309 __ push(Operand(
rbp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2318 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2322 void FullCodeGenerator::VisitCall(Call* expr) {
2326 expr->return_is_recorded_ =
false;
2329 Comment cmnt(masm_,
"[ Call");
2330 Expression* callee = expr->expression();
2331 VariableProxy* proxy = callee->AsVariableProxy();
2332 Property*
property = callee->AsProperty();
2334 if (proxy !=
NULL && proxy->var()->is_possibly_eval()) {
2338 ZoneList<Expression*>* args = expr->arguments();
2339 int arg_count = args->length();
2340 { PreservePositionScope pos_scope(masm()->positions_recorder());
2341 VisitForStackValue(callee);
2342 __ PushRoot(Heap::kUndefinedValueRootIndex);
2345 for (
int i = 0; i < arg_count; i++) {
2346 VisitForStackValue(args->at(i));
2351 __ push(Operand(
rsp, (arg_count + 1) * kPointerSize));
2352 EmitResolvePossiblyDirectEval(arg_count);
2356 __ movq(Operand(
rsp, (arg_count + 0) * kPointerSize),
rdx);
2357 __ movq(Operand(
rsp, (arg_count + 1) * kPointerSize),
rax);
2360 SetSourcePosition(expr->position());
2362 __ movq(
rdi, Operand(
rsp, (arg_count + 1) * kPointerSize));
2364 RecordJSReturnSite(expr);
2367 context()->DropAndPlug(1,
rax);
2368 }
else if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
2372 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2373 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
2377 { PreservePositionScope scope(masm()->positions_recorder());
2385 __ push(context_register());
2386 __ Push(proxy->name());
2387 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2393 if (done.is_linked()) {
2395 __ jmp(&call, Label::kNear);
2401 __ PushRoot(Heap::kTheHoleValueRootIndex);
2409 }
else if (property !=
NULL) {
2410 { PreservePositionScope scope(masm()->positions_recorder());
2411 VisitForStackValue(property->obj());
2413 if (property->key()->IsPropertyName()) {
2414 EmitCallWithIC(expr,
2415 property->key()->AsLiteral()->handle(),
2416 RelocInfo::CODE_TARGET);
2418 EmitKeyedCallWithIC(expr, property->key());
2422 { PreservePositionScope scope(masm()->positions_recorder());
2423 VisitForStackValue(callee);
2434 ASSERT(expr->return_is_recorded_);
2439 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2440 Comment cmnt(masm_,
"[ CallNew");
2448 VisitForStackValue(expr->expression());
2451 ZoneList<Expression*>* args = expr->arguments();
2452 int arg_count = args->length();
2453 for (
int i = 0; i < arg_count; i++) {
2454 VisitForStackValue(args->at(i));
2459 SetSourcePosition(expr->position());
2462 __ Set(
rax, arg_count);
2463 __ movq(
rdi, Operand(
rsp, arg_count * kPointerSize));
2469 Handle<Object> uninitialized =
2471 Handle<JSGlobalPropertyCell> cell =
2472 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2473 RecordTypeFeedbackCell(expr->id(), cell);
2479 CallConstructStub stub(flags);
2480 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2481 PrepareForBailoutForId(expr->ReturnId(),
TOS_REG);
2482 context()->Plug(
rax);
2486 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2487 ZoneList<Expression*>* args = expr->arguments();
2488 ASSERT(args->length() == 1);
2490 VisitForAccumulatorValue(args->at(0));
2492 Label materialize_true, materialize_false;
2493 Label* if_true =
NULL;
2494 Label* if_false =
NULL;
2495 Label* fall_through =
NULL;
2496 context()->PrepareTest(&materialize_true, &materialize_false,
2497 &if_true, &if_false, &fall_through);
2499 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2500 __ JumpIfSmi(
rax, if_true);
2503 context()->Plug(if_true, if_false);
2507 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2508 ZoneList<Expression*>* args = expr->arguments();
2509 ASSERT(args->length() == 1);
2511 VisitForAccumulatorValue(args->at(0));
2513 Label materialize_true, materialize_false;
2514 Label* if_true =
NULL;
2515 Label* if_false =
NULL;
2516 Label* fall_through =
NULL;
2517 context()->PrepareTest(&materialize_true, &materialize_false,
2518 &if_true, &if_false, &fall_through);
2520 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2522 Split(non_negative_smi, if_true, if_false, fall_through);
2524 context()->Plug(if_true, if_false);
2528 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2529 ZoneList<Expression*>* args = expr->arguments();
2530 ASSERT(args->length() == 1);
2532 VisitForAccumulatorValue(args->at(0));
2534 Label materialize_true, materialize_false;
2535 Label* if_true =
NULL;
2536 Label* if_false =
NULL;
2537 Label* fall_through =
NULL;
2538 context()->PrepareTest(&materialize_true, &materialize_false,
2539 &if_true, &if_false, &fall_through);
2541 __ JumpIfSmi(
rax, if_false);
2542 __ CompareRoot(
rax, Heap::kNullValueRootIndex);
2553 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2554 Split(
below_equal, if_true, if_false, fall_through);
2556 context()->Plug(if_true, if_false);
2560 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2561 ZoneList<Expression*>* args = expr->arguments();
2562 ASSERT(args->length() == 1);
2564 VisitForAccumulatorValue(args->at(0));
2566 Label materialize_true, materialize_false;
2567 Label* if_true =
NULL;
2568 Label* if_false =
NULL;
2569 Label* fall_through =
NULL;
2570 context()->PrepareTest(&materialize_true, &materialize_false,
2571 &if_true, &if_false, &fall_through);
2573 __ JumpIfSmi(
rax, if_false);
2575 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2576 Split(
above_equal, if_true, if_false, fall_through);
2578 context()->Plug(if_true, if_false);
2582 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2583 ZoneList<Expression*>* args = expr->arguments();
2584 ASSERT(args->length() == 1);
2586 VisitForAccumulatorValue(args->at(0));
2588 Label materialize_true, materialize_false;
2589 Label* if_true =
NULL;
2590 Label* if_false =
NULL;
2591 Label* fall_through =
NULL;
2592 context()->PrepareTest(&materialize_true, &materialize_false,
2593 &if_true, &if_false, &fall_through);
2595 __ JumpIfSmi(
rax, if_false);
2599 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2600 Split(
not_zero, if_true, if_false, fall_through);
2602 context()->Plug(if_true, if_false);
2606 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2607 CallRuntime* expr) {
2608 ZoneList<Expression*>* args = expr->arguments();
2609 ASSERT(args->length() == 1);
2611 VisitForAccumulatorValue(args->at(0));
2613 Label materialize_true, materialize_false;
2614 Label* if_true =
NULL;
2615 Label* if_false =
NULL;
2616 Label* fall_through =
NULL;
2617 context()->PrepareTest(&materialize_true, &materialize_false,
2618 &if_true, &if_false, &fall_through);
2620 if (FLAG_debug_code)
__ AbortIfSmi(
rax);
2632 __ CompareRoot(
rcx, Heap::kHashTableMapRootIndex);
2638 __ LoadInstanceDescriptors(
rbx,
rbx);
2659 __ addq(
rbx, Immediate(kPointerSize));
2684 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2685 context()->Plug(if_true, if_false);
2689 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2690 ZoneList<Expression*>* args = expr->arguments();
2691 ASSERT(args->length() == 1);
2693 VisitForAccumulatorValue(args->at(0));
2695 Label materialize_true, materialize_false;
2696 Label* if_true =
NULL;
2697 Label* if_false =
NULL;
2698 Label* fall_through =
NULL;
2699 context()->PrepareTest(&materialize_true, &materialize_false,
2700 &if_true, &if_false, &fall_through);
2702 __ JumpIfSmi(
rax, if_false);
2704 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2705 Split(
equal, if_true, if_false, fall_through);
2707 context()->Plug(if_true, if_false);
2711 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2712 ZoneList<Expression*>* args = expr->arguments();
2713 ASSERT(args->length() == 1);
2715 VisitForAccumulatorValue(args->at(0));
2717 Label materialize_true, materialize_false;
2718 Label* if_true =
NULL;
2719 Label* if_false =
NULL;
2720 Label* fall_through =
NULL;
2721 context()->PrepareTest(&materialize_true, &materialize_false,
2722 &if_true, &if_false, &fall_through);
2724 __ JumpIfSmi(
rax, if_false);
2726 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2727 Split(
equal, if_true, if_false, fall_through);
2729 context()->Plug(if_true, if_false);
2733 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2734 ZoneList<Expression*>* args = expr->arguments();
2735 ASSERT(args->length() == 1);
2737 VisitForAccumulatorValue(args->at(0));
2739 Label materialize_true, materialize_false;
2740 Label* if_true =
NULL;
2741 Label* if_false =
NULL;
2742 Label* fall_through =
NULL;
2743 context()->PrepareTest(&materialize_true, &materialize_false,
2744 &if_true, &if_false, &fall_through);
2746 __ JumpIfSmi(
rax, if_false);
2748 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2749 Split(
equal, if_true, if_false, fall_through);
2751 context()->Plug(if_true, if_false);
2756 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2757 ASSERT(expr->arguments()->length() == 0);
2759 Label materialize_true, materialize_false;
2760 Label* if_true =
NULL;
2761 Label* if_false =
NULL;
2762 Label* fall_through =
NULL;
2763 context()->PrepareTest(&materialize_true, &materialize_false,
2764 &if_true, &if_false, &fall_through);
2770 Label check_frame_marker;
2777 __ bind(&check_frame_marker);
2780 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2781 Split(
equal, if_true, if_false, fall_through);
2783 context()->Plug(if_true, if_false);
2787 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2788 ZoneList<Expression*>* args = expr->arguments();
2789 ASSERT(args->length() == 2);
2792 VisitForStackValue(args->at(0));
2793 VisitForAccumulatorValue(args->at(1));
2795 Label materialize_true, materialize_false;
2796 Label* if_true =
NULL;
2797 Label* if_false =
NULL;
2798 Label* fall_through =
NULL;
2799 context()->PrepareTest(&materialize_true, &materialize_false,
2800 &if_true, &if_false, &fall_through);
2804 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2805 Split(
equal, if_true, if_false, fall_through);
2807 context()->Plug(if_true, if_false);
2811 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2812 ZoneList<Expression*>* args = expr->arguments();
2813 ASSERT(args->length() == 1);
2817 VisitForAccumulatorValue(args->at(0));
2822 context()->Plug(
rax);
2826 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2827 ASSERT(expr->arguments()->length() == 0);
2844 if (FLAG_debug_code)
__ AbortIfNotSmi(
rax);
2845 context()->Plug(
rax);
2849 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2850 ZoneList<Expression*>* args = expr->arguments();
2851 ASSERT(args->length() == 1);
2852 Label done, null,
function, non_function_constructor;
2854 VisitForAccumulatorValue(args->at(0));
2857 __ JumpIfSmi(
rax, &null);
2891 __ Move(
rax, isolate()->factory()->function_class_symbol());
2895 __ bind(&non_function_constructor);
2896 __ Move(
rax, isolate()->factory()->Object_symbol());
2901 __ LoadRoot(
rax, Heap::kNullValueRootIndex);
2906 context()->Plug(
rax);
2910 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2918 ZoneList<Expression*>* args = expr->arguments();
2921 VisitForStackValue(args->at(1));
2922 VisitForStackValue(args->at(2));
2923 __ CallRuntime(Runtime::kLog, 2);
2926 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
2927 context()->Plug(
rax);
2931 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2932 ASSERT(expr->arguments()->length() == 0);
2934 Label slow_allocate_heapnumber;
2935 Label heapnumber_allocated;
2937 __ AllocateHeapNumber(
rbx,
rcx, &slow_allocate_heapnumber);
2938 __ jmp(&heapnumber_allocated);
2940 __ bind(&slow_allocate_heapnumber);
2942 __ CallRuntime(Runtime::kNumberAlloc, 0);
2945 __ bind(&heapnumber_allocated);
2949 __ PrepareCallCFunction(1);
2958 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2963 __ movl(
rcx, Immediate(0x49800000));
2972 context()->Plug(
rax);
2976 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
2979 ZoneList<Expression*>* args = expr->arguments();
2980 ASSERT(args->length() == 3);
2981 VisitForStackValue(args->at(0));
2982 VisitForStackValue(args->at(1));
2983 VisitForStackValue(args->at(2));
2985 context()->Plug(
rax);
2989 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
2991 RegExpExecStub stub;
2992 ZoneList<Expression*>* args = expr->arguments();
2993 ASSERT(args->length() == 4);
2994 VisitForStackValue(args->at(0));
2995 VisitForStackValue(args->at(1));
2996 VisitForStackValue(args->at(2));
2997 VisitForStackValue(args->at(3));
2999 context()->Plug(
rax);
3003 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3004 ZoneList<Expression*>* args = expr->arguments();
3005 ASSERT(args->length() == 1);
3007 VisitForAccumulatorValue(args->at(0));
3011 __ JumpIfSmi(
rax, &done);
3018 context()->Plug(
rax);
3022 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3023 ZoneList<Expression*>* args = expr->arguments();
3024 ASSERT(args->length() == 2);
3026 Smi* index =
Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3028 VisitForAccumulatorValue(args->at(0));
3030 Label runtime, done;
3031 Register
object =
rax;
3032 Register result =
rax;
3033 Register scratch =
rcx;
3036 __ AbortIfSmi(
object);
3038 __ Assert(
equal,
"Trying to get date field from non-date.");
3041 if (index->value() == 0) {
3045 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3046 __ movq(scratch, stamp);
3050 kPointerSize * index->value()));
3054 __ PrepareCallCFunction(2);
3056 __ movq(
rcx,
object);
3059 __ movq(
rdi,
object);
3062 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3066 context()->Plug(
rax);
3070 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3072 ZoneList<Expression*>* args = expr->arguments();
3073 ASSERT(args->length() == 2);
3074 VisitForStackValue(args->at(0));
3075 VisitForStackValue(args->at(1));
3078 context()->Plug(
rax);
3082 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3083 ZoneList<Expression*>* args = expr->arguments();
3084 ASSERT(args->length() == 2);
3086 VisitForStackValue(args->at(0));
3087 VisitForAccumulatorValue(args->at(1));
3092 __ JumpIfSmi(
rbx, &done);
3106 context()->Plug(
rax);
3110 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3111 ZoneList<Expression*>* args = expr->arguments();
3115 VisitForStackValue(args->at(0));
3117 NumberToStringStub stub;
3119 context()->Plug(
rax);
3123 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3124 ZoneList<Expression*>* args = expr->arguments();
3125 ASSERT(args->length() == 1);
3127 VisitForAccumulatorValue(args->at(0));
3130 StringCharFromCodeGenerator generator(
rax,
rbx);
3131 generator.GenerateFast(masm_);
3134 NopRuntimeCallHelper call_helper;
3135 generator.GenerateSlow(masm_, call_helper);
3138 context()->Plug(
rbx);
3142 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3143 ZoneList<Expression*>* args = expr->arguments();
3144 ASSERT(args->length() == 2);
3146 VisitForStackValue(args->at(0));
3147 VisitForAccumulatorValue(args->at(1));
3149 Register
object =
rbx;
3150 Register index =
rax;
3151 Register result =
rdx;
3155 Label need_conversion;
3156 Label index_out_of_range;
3158 StringCharCodeAtGenerator generator(
object,
3163 &index_out_of_range,
3165 generator.GenerateFast(masm_);
3168 __ bind(&index_out_of_range);
3171 __ LoadRoot(result, Heap::kNanValueRootIndex);
3174 __ bind(&need_conversion);
3177 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3180 NopRuntimeCallHelper call_helper;
3181 generator.GenerateSlow(masm_, call_helper);
3184 context()->Plug(result);
3188 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3189 ZoneList<Expression*>* args = expr->arguments();
3190 ASSERT(args->length() == 2);
3192 VisitForStackValue(args->at(0));
3193 VisitForAccumulatorValue(args->at(1));
3195 Register
object =
rbx;
3196 Register index =
rax;
3197 Register scratch =
rdx;
3198 Register result =
rax;
3202 Label need_conversion;
3203 Label index_out_of_range;
3205 StringCharAtGenerator generator(
object,
3211 &index_out_of_range,
3213 generator.GenerateFast(masm_);
3216 __ bind(&index_out_of_range);
3219 __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3222 __ bind(&need_conversion);
3228 NopRuntimeCallHelper call_helper;
3229 generator.GenerateSlow(masm_, call_helper);
3232 context()->Plug(result);
3236 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3237 ZoneList<Expression*>* args = expr->arguments();
3240 VisitForStackValue(args->at(0));
3241 VisitForStackValue(args->at(1));
3245 context()->Plug(
rax);
3249 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3250 ZoneList<Expression*>* args = expr->arguments();
3253 VisitForStackValue(args->at(0));
3254 VisitForStackValue(args->at(1));
3256 StringCompareStub stub;
3258 context()->Plug(
rax);
3262 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3266 ZoneList<Expression*>* args = expr->arguments();
3267 ASSERT(args->length() == 1);
3268 VisitForStackValue(args->at(0));
3270 context()->Plug(
rax);
3274 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3278 ZoneList<Expression*>* args = expr->arguments();
3279 ASSERT(args->length() == 1);
3280 VisitForStackValue(args->at(0));
3282 context()->Plug(
rax);
3286 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3290 ZoneList<Expression*>* args = expr->arguments();
3291 ASSERT(args->length() == 1);
3292 VisitForStackValue(args->at(0));
3294 context()->Plug(
rax);
3298 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3302 ZoneList<Expression*>* args = expr->arguments();
3303 ASSERT(args->length() == 1);
3304 VisitForStackValue(args->at(0));
3306 context()->Plug(
rax);
3310 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3312 ZoneList<Expression*>* args = expr->arguments();
3313 ASSERT(args->length() == 1);
3314 VisitForStackValue(args->at(0));
3315 __ CallRuntime(Runtime::kMath_sqrt, 1);
3316 context()->Plug(
rax);
3320 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3321 ZoneList<Expression*>* args = expr->arguments();
3322 ASSERT(args->length() >= 2);
3324 int arg_count = args->length() - 2;
3325 for (
int i = 0; i < arg_count + 1; i++) {
3326 VisitForStackValue(args->at(i));
3328 VisitForAccumulatorValue(args->last());
3336 __ movq(
rdi, result_register());
3337 ParameterCount count(arg_count);
3345 __ CallRuntime(Runtime::kCall, args->length());
3348 context()->Plug(
rax);
3352 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3353 RegExpConstructResultStub stub;
3354 ZoneList<Expression*>* args = expr->arguments();
3355 ASSERT(args->length() == 3);
3356 VisitForStackValue(args->at(0));
3357 VisitForStackValue(args->at(1));
3358 VisitForStackValue(args->at(2));
3360 context()->Plug(
rax);
3364 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3365 ZoneList<Expression*>* args = expr->arguments();
3369 int cache_id =
Smi::cast(*(args->at(0)->AsLiteral()->handle()))->
value();
3371 Handle<FixedArray> jsfunction_result_caches(
3372 isolate()->global_context()->jsfunction_result_caches());
3373 if (jsfunction_result_caches->length() <= cache_id) {
3374 __ Abort(
"Attempt to use undefined cache.");
3375 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
3376 context()->Plug(
rax);
3380 VisitForAccumulatorValue(args->at(1));
3383 Register cache =
rbx;
3393 Label done, not_found;
3408 __ jmp(&done, Label::kNear);
3410 __ bind(¬_found);
3414 __ CallRuntime(Runtime::kGetFromCache, 2);
3417 context()->Plug(
rax);
3421 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3422 ZoneList<Expression*>* args = expr->arguments();
3425 Register right =
rax;
3426 Register left =
rbx;
3429 VisitForStackValue(args->at(0));
3430 VisitForAccumulatorValue(args->at(1));
3433 Label done, fail, ok;
3434 __ cmpq(left, right);
3435 __ j(
equal, &ok, Label::kNear);
3438 __ j(either_smi, &fail, Label::kNear);
3439 __ j(
zero, &fail, Label::kNear);
3448 __ j(
equal, &ok, Label::kNear);
3450 __ Move(
rax, isolate()->factory()->false_value());
3451 __ jmp(&done, Label::kNear);
3453 __ Move(
rax, isolate()->factory()->true_value());
3456 context()->Plug(
rax);
3460 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3461 ZoneList<Expression*>* args = expr->arguments();
3462 ASSERT(args->length() == 1);
3464 VisitForAccumulatorValue(args->at(0));
3466 Label materialize_true, materialize_false;
3467 Label* if_true =
NULL;
3468 Label* if_false =
NULL;
3469 Label* fall_through =
NULL;
3470 context()->PrepareTest(&materialize_true, &materialize_false,
3471 &if_true, &if_false, &fall_through);
3475 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3479 context()->Plug(if_true, if_false);
3483 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3484 ZoneList<Expression*>* args = expr->arguments();
3485 ASSERT(args->length() == 1);
3486 VisitForAccumulatorValue(args->at(0));
3488 if (FLAG_debug_code) {
3489 __ AbortIfNotString(
rax);
3496 context()->Plug(
rax);
3500 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3501 Label bailout, return_result, done, one_char_separator, long_separator,
3502 non_trivial_array, not_size_one_array, loop,
3503 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3504 ZoneList<Expression*>* args = expr->arguments();
3505 ASSERT(args->length() == 2);
3507 VisitForStackValue(args->at(1));
3509 VisitForAccumulatorValue(args->at(0));
3511 Register array =
rax;
3512 Register elements =
no_reg;
3514 Register index =
rdx;
3516 Register string_length =
rcx;
3518 Register
string =
rsi;
3520 Register scratch =
rbx;
3522 Register array_length =
rdi;
3523 Register result_pos =
no_reg;
3525 Operand separator_operand = Operand(
rsp, 2 * kPointerSize);
3526 Operand result_operand = Operand(
rsp, 1 * kPointerSize);
3527 Operand array_length_operand = Operand(
rsp, 0 * kPointerSize);
3531 __ subq(
rsp, Immediate(2 * kPointerSize));
3534 __ JumpIfSmi(array, &bailout);
3539 __ CheckFastElements(scratch, &bailout);
3546 __ LoadRoot(
rax, Heap::kEmptyStringRootIndex);
3547 __ jmp(&return_result);
3550 __ bind(&non_trivial_array);
3551 __ SmiToInteger32(array_length, array_length);
3552 __ movl(array_length_operand, array_length);
3564 __ Set(string_length, 0);
3568 if (FLAG_debug_code) {
3569 __ cmpq(index, array_length);
3570 __ Assert(
below,
"No empty arrays here in EmitFastAsciiArrayJoin");
3577 __ JumpIfSmi(
string, &bailout);
3580 __ andb(scratch, Immediate(
3584 __ AddSmiField(string_length,
3588 __ cmpl(index, array_length);
3598 __ cmpl(array_length, Immediate(1));
3601 __ jmp(&return_result);
3603 __ bind(¬_size_one_array);
3606 result_pos = array_length;
3615 __ movq(
string, separator_operand);
3616 __ JumpIfSmi(
string, &bailout);
3619 __ andb(scratch, Immediate(
3631 __ SmiToInteger32(scratch,
3634 __ imull(scratch, index);
3636 __ addl(string_length, scratch);
3642 __ AllocateAsciiString(result_pos, string_length, scratch,
3643 index,
string, &bailout);
3644 __ movq(result_operand, result_pos);
3647 __ movq(
string, separator_operand);
3650 __ j(
equal, &one_char_separator);
3656 __ movl(scratch, array_length_operand);
3657 __ jmp(&loop_1_condition);
3671 __ SmiToInteger32(string_length,
3675 __ CopyBytes(result_pos,
string, string_length);
3677 __ bind(&loop_1_condition);
3678 __ cmpl(index, scratch);
3684 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
3685 __ jmp(&return_result);
3689 __ bind(&one_char_separator);
3696 __ jmp(&loop_2_entry);
3707 __ movb(Operand(result_pos, 0), scratch);
3708 __ incq(result_pos);
3710 __ bind(&loop_2_entry);
3715 __ SmiToInteger32(string_length,
3719 __ CopyBytes(result_pos,
string, string_length);
3721 __ cmpl(index, array_length_operand);
3727 __ bind(&long_separator);
3732 __ movl(index, array_length_operand);
3739 __ movq(
string, separator_operand);
3740 __ SmiToInteger32(scratch,
3744 __ movq(separator_operand,
string);
3748 __ jmp(&loop_3_entry);
3759 __ movq(
string, separator_operand);
3760 __ movl(string_length, scratch);
3761 __ CopyBytes(result_pos,
string, string_length, 2);
3763 __ bind(&loop_3_entry);
3766 __ SmiToInteger32(string_length,
3770 __ CopyBytes(result_pos,
string, string_length);
3775 __ movq(
rax, result_operand);
3777 __ bind(&return_result);
3779 __ addq(
rsp, Immediate(3 * kPointerSize));
3781 context()->Plug(
rax);
3785 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3786 Handle<String> name = expr->name();
3787 if (name->length() > 0 && name->Get(0) ==
'_') {
3788 Comment cmnt(masm_,
"[ InlineRuntimeCall");
3789 EmitInlineRuntimeCall(expr);
3793 Comment cmnt(masm_,
"[ CallRuntime");
3794 ZoneList<Expression*>* args = expr->arguments();
3796 if (expr->is_jsruntime()) {
3803 int arg_count = args->length();
3804 for (
int i = 0; i < arg_count; i++) {
3805 VisitForStackValue(args->at(i));
3808 if (expr->is_jsruntime()) {
3810 __ Move(
rcx, expr->name());
3811 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3813 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3814 CallIC(ic, mode, expr->id());
3818 __ CallRuntime(expr->function(), arg_count);
3820 context()->Plug(
rax);
3824 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3825 switch (expr->op()) {
3826 case Token::DELETE: {
3827 Comment cmnt(masm_,
"[ UnaryOperation (DELETE)");
3828 Property*
property = expr->expression()->AsProperty();
3829 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3831 if (property !=
NULL) {
3832 VisitForStackValue(property->obj());
3833 VisitForStackValue(property->key());
3838 context()->Plug(
rax);
3839 }
else if (proxy !=
NULL) {
3840 Variable* var = proxy->var();
3844 if (var->IsUnallocated()) {
3846 __ Push(var->name());
3849 context()->Plug(
rax);
3850 }
else if (var->IsStackAllocated() || var->IsContextSlot()) {
3854 context()->Plug(var->is_this());
3858 __ push(context_register());
3859 __ Push(var->name());
3860 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3861 context()->Plug(
rax);
3866 VisitForEffect(expr->expression());
3867 context()->Plug(
true);
3873 Comment cmnt(masm_,
"[ UnaryOperation (VOID)");
3874 VisitForEffect(expr->expression());
3875 context()->Plug(Heap::kUndefinedValueRootIndex);
3880 Comment cmnt(masm_,
"[ UnaryOperation (NOT)");
3881 if (context()->IsEffect()) {
3884 VisitForEffect(expr->expression());
3885 }
else if (context()->IsTest()) {
3888 VisitForControl(expr->expression(),
3889 test->false_label(),
3891 test->fall_through());
3892 context()->Plug(test->true_label(), test->false_label());
3898 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3899 Label materialize_true, materialize_false, done;
3900 VisitForControl(expr->expression(),
3904 __ bind(&materialize_true);
3905 PrepareForBailoutForId(expr->MaterializeTrueId(),
NO_REGISTERS);
3906 if (context()->IsAccumulatorValue()) {
3907 __ LoadRoot(
rax, Heap::kTrueValueRootIndex);
3909 __ PushRoot(Heap::kTrueValueRootIndex);
3911 __ jmp(&done, Label::kNear);
3912 __ bind(&materialize_false);
3913 PrepareForBailoutForId(expr->MaterializeFalseId(),
NO_REGISTERS);
3914 if (context()->IsAccumulatorValue()) {
3915 __ LoadRoot(
rax, Heap::kFalseValueRootIndex);
3917 __ PushRoot(Heap::kFalseValueRootIndex);
3924 case Token::TYPEOF: {
3925 Comment cmnt(masm_,
"[ UnaryOperation (TYPEOF)");
3926 { StackValueContext context(
this);
3927 VisitForTypeofValue(expr->expression());
3929 __ CallRuntime(Runtime::kTypeof, 1);
3930 context()->Plug(
rax);
3935 Comment cmt(masm_,
"[ UnaryOperation (ADD)");
3936 VisitForAccumulatorValue(expr->expression());
3937 Label no_conversion;
3938 __ JumpIfSmi(result_register(), &no_conversion);
3939 ToNumberStub convert_stub;
3940 __ CallStub(&convert_stub);
3941 __ bind(&no_conversion);
3942 context()->Plug(result_register());
3947 EmitUnaryOperation(expr,
"[ UnaryOperation (SUB)");
3950 case Token::BIT_NOT:
3951 EmitUnaryOperation(expr,
"[ UnaryOperation (BIT_NOT)");
3960 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3963 Comment cmt(masm_, comment);
3964 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3967 UnaryOpStub stub(expr->op(), overwrite);
3970 VisitForAccumulatorValue(expr->expression());
3971 SetSourcePosition(expr->position());
3972 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
3973 context()->Plug(
rax);
3977 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3978 Comment cmnt(masm_,
"[ CountOperation");
3979 SetSourcePosition(expr->position());
3983 if (!expr->expression()->IsValidLeftHandSide()) {
3984 VisitForEffect(expr->expression());
3990 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3991 LhsKind assign_type = VARIABLE;
3992 Property* prop = expr->expression()->AsProperty();
3997 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4001 if (assign_type == VARIABLE) {
4002 ASSERT(expr->expression()->AsVariableProxy()->var() !=
NULL);
4003 AccumulatorValueContext context(
this);
4004 EmitVariableLoad(expr->expression()->AsVariableProxy());
4007 if (expr->is_postfix() && !context()->IsEffect()) {
4010 if (assign_type == NAMED_PROPERTY) {
4011 VisitForAccumulatorValue(prop->obj());
4013 EmitNamedPropertyLoad(prop);
4015 VisitForStackValue(prop->obj());
4016 VisitForAccumulatorValue(prop->key());
4019 EmitKeyedPropertyLoad(prop);
4025 if (assign_type == VARIABLE) {
4026 PrepareForBailout(expr->expression(),
TOS_REG);
4028 PrepareForBailoutForId(expr->CountId(),
TOS_REG);
4032 Label no_conversion;
4033 __ JumpIfSmi(
rax, &no_conversion, Label::kNear);
4034 ToNumberStub convert_stub;
4035 __ CallStub(&convert_stub);
4036 __ bind(&no_conversion);
4039 if (expr->is_postfix()) {
4040 if (!context()->IsEffect()) {
4044 switch (assign_type) {
4048 case NAMED_PROPERTY:
4049 __ movq(Operand(
rsp, kPointerSize),
rax);
4051 case KEYED_PROPERTY:
4052 __ movq(Operand(
rsp, 2 * kPointerSize),
rax);
4059 Label done, stub_call;
4060 JumpPatchSite patch_site(masm_);
4062 if (ShouldInlineSmiCase(expr->op())) {
4063 if (expr->op() == Token::INC) {
4071 patch_site.EmitJumpIfSmi(
rax, &done, Label::kNear);
4073 __ bind(&stub_call);
4075 if (expr->op() == Token::INC) {
4083 SetSourcePosition(expr->position());
4087 if (expr->op() == Token::INC) {
4093 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
4094 patch_site.EmitPatchInfo();
4098 switch (assign_type) {
4100 if (expr->is_postfix()) {
4102 { EffectContext context(
this);
4103 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4105 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4110 if (!context()->IsEffect()) {
4111 context()->PlugTOS();
4115 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4117 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4118 context()->Plug(
rax);
4121 case NAMED_PROPERTY: {
4122 __ Move(
rcx, prop->key()->AsLiteral()->handle());
4124 Handle<Code> ic = is_classic_mode()
4125 ? isolate()->builtins()->StoreIC_Initialize()
4126 : isolate()->builtins()->StoreIC_Initialize_Strict();
4127 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4128 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4129 if (expr->is_postfix()) {
4130 if (!context()->IsEffect()) {
4131 context()->PlugTOS();
4134 context()->Plug(
rax);
4138 case KEYED_PROPERTY: {
4141 Handle<Code> ic = is_classic_mode()
4142 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4143 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4144 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4145 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4146 if (expr->is_postfix()) {
4147 if (!context()->IsEffect()) {
4148 context()->PlugTOS();
4151 context()->Plug(
rax);
4159 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4160 VariableProxy* proxy = expr->AsVariableProxy();
4161 ASSERT(!context()->IsEffect());
4162 ASSERT(!context()->IsTest());
4164 if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
4165 Comment cmnt(masm_,
"Global variable");
4166 __ Move(
rcx, proxy->name());
4168 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4172 PrepareForBailout(expr,
TOS_REG);
4173 context()->Plug(
rax);
4174 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
4179 EmitDynamicLookupFastCase(proxy->var(),
INSIDE_TYPEOF, &slow, &done);
4183 __ Push(proxy->name());
4184 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4185 PrepareForBailout(expr,
TOS_REG);
4188 context()->Plug(
rax);
4191 VisitInDuplicateContext(expr);
4196 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4197 Expression* sub_expr,
4198 Handle<String>
check) {
4199 Label materialize_true, materialize_false;
4200 Label* if_true =
NULL;
4201 Label* if_false =
NULL;
4202 Label* fall_through =
NULL;
4203 context()->PrepareTest(&materialize_true, &materialize_false,
4204 &if_true, &if_false, &fall_through);
4206 { AccumulatorValueContext context(
this);
4207 VisitForTypeofValue(sub_expr);
4209 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4211 if (check->Equals(isolate()->heap()->number_symbol())) {
4212 __ JumpIfSmi(
rax, if_true);
4214 __ CompareRoot(
rax, Heap::kHeapNumberMapRootIndex);
4215 Split(
equal, if_true, if_false, fall_through);
4216 }
else if (check->Equals(isolate()->heap()->string_symbol())) {
4217 __ JumpIfSmi(
rax, if_false);
4223 Split(
zero, if_true, if_false, fall_through);
4224 }
else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4225 __ CompareRoot(
rax, Heap::kTrueValueRootIndex);
4227 __ CompareRoot(
rax, Heap::kFalseValueRootIndex);
4228 Split(
equal, if_true, if_false, fall_through);
4229 }
else if (FLAG_harmony_typeof &&
4230 check->Equals(isolate()->heap()->null_symbol())) {
4231 __ CompareRoot(
rax, Heap::kNullValueRootIndex);
4232 Split(
equal, if_true, if_false, fall_through);
4233 }
else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4234 __ CompareRoot(
rax, Heap::kUndefinedValueRootIndex);
4236 __ JumpIfSmi(
rax, if_false);
4241 Split(
not_zero, if_true, if_false, fall_through);
4242 }
else if (check->Equals(isolate()->heap()->function_symbol())) {
4243 __ JumpIfSmi(
rax, if_false);
4248 Split(
equal, if_true, if_false, fall_through);
4249 }
else if (check->Equals(isolate()->heap()->object_symbol())) {
4250 __ JumpIfSmi(
rax, if_false);
4251 if (!FLAG_harmony_typeof) {
4252 __ CompareRoot(
rax, Heap::kNullValueRootIndex);
4262 Split(
zero, if_true, if_false, fall_through);
4264 if (if_false != fall_through)
__ jmp(if_false);
4266 context()->Plug(if_true, if_false);
4270 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4271 Comment cmnt(masm_,
"[ CompareOperation");
4272 SetSourcePosition(expr->position());
4276 if (TryLiteralCompare(expr))
return;
4280 Label materialize_true, materialize_false;
4281 Label* if_true =
NULL;
4282 Label* if_false =
NULL;
4283 Label* fall_through =
NULL;
4284 context()->PrepareTest(&materialize_true, &materialize_false,
4285 &if_true, &if_false, &fall_through);
4288 VisitForStackValue(expr->left());
4291 VisitForStackValue(expr->right());
4293 PrepareForBailoutBeforeSplit(expr,
false,
NULL,
NULL);
4294 __ CompareRoot(
rax, Heap::kTrueValueRootIndex);
4295 Split(
equal, if_true, if_false, fall_through);
4298 case Token::INSTANCEOF: {
4299 VisitForStackValue(expr->right());
4302 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4305 Split(
zero, if_true, if_false, fall_through);
4310 VisitForAccumulatorValue(expr->right());
4313 case Token::EQ_STRICT:
4330 case Token::INSTANCEOF:
4336 bool inline_smi_code = ShouldInlineSmiCase(op);
4337 JumpPatchSite patch_site(masm_);
4338 if (inline_smi_code) {
4342 patch_site.EmitJumpIfNotSmi(
rcx, &slow_case, Label::kNear);
4344 Split(cc, if_true, if_false,
NULL);
4345 __ bind(&slow_case);
4349 SetSourcePosition(expr->position());
4351 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4352 patch_site.EmitPatchInfo();
4354 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4356 Split(cc, if_true, if_false, fall_through);
4362 context()->Plug(if_true, if_false);
4366 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4367 Expression* sub_expr,
4369 Label materialize_true, materialize_false;
4370 Label* if_true =
NULL;
4371 Label* if_false =
NULL;
4372 Label* fall_through =
NULL;
4373 context()->PrepareTest(&materialize_true, &materialize_false,
4374 &if_true, &if_false, &fall_through);
4376 VisitForAccumulatorValue(sub_expr);
4377 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4379 Heap::kNullValueRootIndex :
4380 Heap::kUndefinedValueRootIndex;
4381 __ CompareRoot(
rax, nil_value);
4382 if (expr->op() == Token::EQ_STRICT) {
4383 Split(
equal, if_true, if_false, fall_through);
4386 Heap::kUndefinedValueRootIndex :
4387 Heap::kNullValueRootIndex;
4389 __ CompareRoot(
rax, other_nil_value);
4391 __ JumpIfSmi(
rax, if_false);
4396 Split(
not_zero, if_true, if_false, fall_through);
4398 context()->Plug(if_true, if_false);
4402 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4404 context()->Plug(
rax);
4408 Register FullCodeGenerator::result_register() {
4413 Register FullCodeGenerator::context_register() {
4418 void FullCodeGenerator::StoreToFrameField(
int frame_offset, Register value) {
4420 __ movq(Operand(
rbp, frame_offset), value);
4424 void FullCodeGenerator::LoadContextField(Register dst,
int context_index) {
4429 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4431 if (declaration_scope->is_global_scope() ||
4432 declaration_scope->is_module_scope()) {
4438 }
else if (declaration_scope->is_eval_scope()) {
4444 ASSERT(declaration_scope->is_function_scope());
4454 void FullCodeGenerator::EnterFinallyBlock() {
4465 __ push(result_register());
4468 ExternalReference pending_message_obj =
4469 ExternalReference::address_of_pending_message_obj(isolate());
4473 ExternalReference has_pending_message =
4474 ExternalReference::address_of_has_pending_message(isolate());
4478 ExternalReference pending_message_script =
4479 ExternalReference::address_of_pending_message_script(isolate());
4485 void FullCodeGenerator::ExitFinallyBlock() {
4490 ExternalReference pending_message_script =
4491 ExternalReference::address_of_pending_message_script(isolate());
4492 __ Store(pending_message_script,
rdx);
4495 ExternalReference has_pending_message =
4496 ExternalReference::address_of_has_pending_message(isolate());
4497 __ Store(has_pending_message,
rdx);
4500 ExternalReference pending_message_obj =
4501 ExternalReference::address_of_pending_message_obj(isolate());
4502 __ Store(pending_message_obj,
rdx);
4505 __ pop(result_register());
4518 #define __ ACCESS_MASM(masm())
4522 int* context_length) {
4529 __ Drop(*stack_depth);
4530 if (*context_length > 0) {
4536 __ call(finally_entry_);
4539 *context_length = 0;
4548 #endif // V8_TARGET_ARCH_X64
static const int kCallerFPOffset
static const int kLengthOffset
static const int kBitFieldOffset
Scope * DeclarationScope()
const intptr_t kSmiTagMask
VariableDeclaration * function() const
static int SlotOffset(int index)
static const int kFingerOffset
static const int kBuiltinsOffset
static String * cast(Object *obj)
static const int kDeclarationsId
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
static const int kDataOffset
static const int kGlobalReceiverOffset
int SizeOfCodeGeneratedSince(Label *label)
Scope * outer_scope() const
static Handle< Object > UninitializedSentinel(Isolate *isolate)
SmiIndex SmiToIndex(Register dst, Register src, int shift)
static const int kCallerSPOffset
#define ASSERT(condition)
const int kPointerSizeLog2
static const int kInObjectFieldCount
const char * comment() const
static bool IsCompileTimeValue(Expression *expression)
const uint32_t kStringRepresentationMask
static const int kMaximumSlots
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
static const int kGlobalContextOffset
Variable * parameter(int index) const
MemOperand ContextOperand(Register context, int index)
static const int kFunctionEntryId
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
static const int kHashFieldOffset
static const int kLiteralsOffset
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
static const int kValueOffset
Variable * arguments() const
static const int kForInSlowCaseMarker
static const int kJSReturnSequenceLength
static const int kForInFastCaseMarker
static const int kStringWrapperSafeForDefaultValueOf
Operand FieldOperand(Register object, int offset)
bool IsAligned(T value, U alignment)
static const int kContextOffset
static const int kCacheStampOffset
static TestContext * cast(AstContext *context)
static const int kPropertiesOffset
static const int kHeaderSize
static const int kElementsOffset
static const int kContainsCachedArrayIndexMask
const uint32_t kStringTag
friend class NestedStatement
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
static const int kMaxLoopNestingMarker
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
static const int kEnumerationIndexOffset
static const int kMapOffset
static const int kValueOffset
static const int kEnumCacheBridgeCacheOffset
const uint32_t kIsNotStringMask
void VisitIllegalRedeclaration(AstVisitor *visitor)
static const int kLengthOffset
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
const Register kScratchRegister
static const int kContextOffset
static const int kFunctionOffset
v8::Handle< v8::Value > Load(const v8::Arguments &args)
Handle< Object > CodeObject()
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
static const int kConstructorOffset
Condition CheckNonNegativeSmi(Register src)
#define ASSERT_NE(v1, v2)
static const int kIsUndetectable
Condition CheckEitherSmi(Register first, Register second, Register scratch=kScratchRegister)
static bool ShouldGenerateLog(Expression *type)
static const int kMaximumClonedProperties
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static const int kPrototypeOffset
static const int kValueOffset
const uint32_t kAsciiStringTag
static const int kMarkerOffset
static const int kHashShift
static const int kSharedFunctionInfoOffset
static FixedArrayBase * cast(Object *object)
static const int kLocal0Offset
static const int kMaxValue
static const int kBitField2Offset
static Handle< Code > GetUninitialized(Token::Value op)
void check(i::Vector< const char > string)
static const int kMaximumClonedLength
static const int kValueOffset
static const int kFirstIndex
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset