30 #if defined(V8_TARGET_ARCH_ARM)
48 #define __ ACCESS_MASM(masm_)
59 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
61 info_emitted_ =
false;
66 ASSERT(patch_site_.is_bound() == info_emitted_);
71 void EmitJumpIfNotSmi(Register reg, Label* target) {
72 ASSERT(!patch_site_.is_bound() && !info_emitted_);
74 __ bind(&patch_site_);
75 __ cmp(reg, Operand(reg));
81 void EmitJumpIfSmi(Register reg, Label* target) {
82 ASSERT(!patch_site_.is_bound() && !info_emitted_);
84 __ bind(&patch_site_);
85 __ cmp(reg, Operand(reg));
89 void EmitPatchInfo() {
92 if (patch_site_.is_bound()) {
93 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
95 reg.set_code(delta_to_patch_site /
kOff12Mask);
96 __ cmp_raw_immediate(reg, delta_to_patch_site %
kOff12Mask);
106 MacroAssembler* masm_;
128 void FullCodeGenerator::Generate() {
129 CompilationInfo* info = info_;
131 isolate()->factory()->NewFixedArray(
function()->handler_count(),
TENURED);
132 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
134 SetFunctionPosition(
function());
135 Comment cmnt(masm_,
"[ function compiled by full code generator");
138 if (strlen(FLAG_stop_at) > 0 &&
139 info->function()->name()->IsEqualTo(
CStrVector(FLAG_stop_at))) {
148 if (!info->is_classic_mode() || info->is_native()) {
150 __ cmp(
r5, Operand(0));
152 int receiver_offset = info->scope()->num_parameters() *
kPointerSize;
153 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
161 FrameScope frame_scope(masm_, StackFrame::MANUAL);
163 int locals_count = info->scope()->num_stack_slots();
166 if (locals_count > 0) {
169 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
174 { Comment cmnt(masm_,
"[ Allocate locals");
175 for (
int i = 0; i < locals_count; i++) {
180 bool function_in_register =
true;
184 if (heap_slots > 0) {
185 Comment cmnt(masm_,
"[ Allocate local context");
189 FastNewContextStub stub(heap_slots);
192 __ CallRuntime(Runtime::kNewFunctionContext, 1);
194 function_in_register =
false;
199 int num_parameters = info->scope()->num_parameters();
200 for (
int i = 0; i < num_parameters; i++) {
202 if (var->IsContextSlot()) {
212 __ RecordWriteContextSlot(
218 Variable* arguments = scope()->
arguments();
219 if (arguments !=
NULL) {
221 Comment cmnt(masm_,
"[ Allocate arguments object");
222 if (!function_in_register) {
229 int num_parameters = info->scope()->num_parameters();
241 if (!is_classic_mode()) {
243 }
else if (
function()->has_duplicate_parameters()) {
248 ArgumentsAccessStub stub(type);
251 SetVar(arguments,
r0,
r1,
r2);
255 __ CallRuntime(Runtime::kTraceEnter, 0);
260 if (scope()->HasIllegalRedeclaration()) {
261 Comment cmnt(masm_,
"[ Declarations");
266 { Comment cmnt(masm_,
"[ Declarations");
269 if (scope()->is_function_scope() && scope()->
function() !=
NULL) {
270 VariableDeclaration*
function = scope()->
function();
271 ASSERT(function->proxy()->var()->mode() ==
CONST ||
274 VisitVariableDeclaration(
function);
276 VisitDeclarations(scope()->declarations());
279 { Comment cmnt(masm_,
"[ Stack check");
282 __ LoadRoot(
ip, Heap::kStackLimitRootIndex);
290 { Comment cmnt(masm_,
"[ Body");
291 ASSERT(loop_depth() == 0);
292 VisitStatements(
function()->body());
293 ASSERT(loop_depth() == 0);
299 { Comment cmnt(masm_,
"[ return <undefined>;");
300 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
302 EmitReturnSequence();
310 void FullCodeGenerator::ClearAccumulator() {
315 void FullCodeGenerator::EmitProfilingCounterDecrement(
int delta) {
316 __ mov(
r2, Operand(profiling_counter_));
323 void FullCodeGenerator::EmitProfilingCounterReset() {
324 int reset_value = FLAG_interrupt_budget;
325 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
329 if (isolate()->IsDebuggerActive()) {
333 __ mov(
r2, Operand(profiling_counter_));
339 static const int kMaxBackEdgeWeight = 127;
340 static const int kBackEdgeDistanceDivisor = 142;
343 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
344 Label* back_edge_target) {
345 Comment cmnt(masm_,
"[ Stack check");
350 if (FLAG_count_based_interrupts) {
352 if (FLAG_weighted_back_edges) {
353 ASSERT(back_edge_target->is_bound());
355 weight =
Min(kMaxBackEdgeWeight,
356 Max(1, distance / kBackEdgeDistanceDivisor));
358 EmitProfilingCounterDecrement(weight);
363 __ LoadRoot(
ip, Heap::kStackLimitRootIndex);
373 RecordStackCheck(stmt->OsrEntryId());
375 if (FLAG_count_based_interrupts) {
376 EmitProfilingCounterReset();
384 PrepareForBailoutForId(stmt->OsrEntryId(),
NO_REGISTERS);
388 void FullCodeGenerator::EmitReturnSequence() {
389 Comment cmnt(masm_,
"[ Return sequence");
390 if (return_label_.is_bound()) {
391 __ b(&return_label_);
393 __ bind(&return_label_);
398 __ CallRuntime(Runtime::kTraceExit, 1);
400 if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
403 if (info_->ShouldSelfOptimize()) {
404 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
405 }
else if (FLAG_weighted_back_edges) {
407 weight =
Min(kMaxBackEdgeWeight,
408 Max(1, distance / kBackEdgeDistanceDivisor));
410 EmitProfilingCounterDecrement(weight);
414 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
417 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
423 EmitProfilingCounterReset();
429 Label check_exit_codesize;
430 masm_->
bind(&check_exit_codesize);
437 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
442 masm_->
add(
sp,
sp, Operand(sp_delta));
456 void FullCodeGenerator::EffectContext::Plug(Variable* var)
const {
457 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
461 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var)
const {
462 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
463 codegen()->GetVar(result_register(), var);
467 void FullCodeGenerator::StackValueContext::Plug(Variable* var)
const {
468 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
469 codegen()->GetVar(result_register(), var);
470 __ push(result_register());
474 void FullCodeGenerator::TestContext::Plug(Variable* var)
const {
475 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
477 codegen()->GetVar(result_register(), var);
478 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
479 codegen()->DoTest(
this);
487 void FullCodeGenerator::AccumulatorValueContext::Plug(
489 __ LoadRoot(result_register(), index);
493 void FullCodeGenerator::StackValueContext::Plug(
495 __ LoadRoot(result_register(), index);
496 __ push(result_register());
501 codegen()->PrepareForBailoutBeforeSplit(condition(),
505 if (index == Heap::kUndefinedValueRootIndex ||
506 index == Heap::kNullValueRootIndex ||
507 index == Heap::kFalseValueRootIndex) {
508 if (false_label_ != fall_through_)
__ b(false_label_);
509 }
else if (index == Heap::kTrueValueRootIndex) {
510 if (true_label_ != fall_through_)
__ b(true_label_);
512 __ LoadRoot(result_register(), index);
513 codegen()->DoTest(
this);
518 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit)
const {
522 void FullCodeGenerator::AccumulatorValueContext::Plug(
523 Handle<Object> lit)
const {
524 __ mov(result_register(), Operand(lit));
528 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit)
const {
530 __ mov(result_register(), Operand(lit));
531 __ push(result_register());
535 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit)
const {
536 codegen()->PrepareForBailoutBeforeSplit(condition(),
540 ASSERT(!lit->IsUndetectableObject());
541 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
542 if (false_label_ != fall_through_)
__ b(false_label_);
543 }
else if (lit->IsTrue() || lit->IsJSObject()) {
544 if (true_label_ != fall_through_)
__ b(true_label_);
545 }
else if (lit->IsString()) {
547 if (false_label_ != fall_through_)
__ b(false_label_);
549 if (true_label_ != fall_through_)
__ b(true_label_);
551 }
else if (lit->IsSmi()) {
553 if (false_label_ != fall_through_)
__ b(false_label_);
555 if (true_label_ != fall_through_)
__ b(true_label_);
559 __ mov(result_register(), Operand(lit));
560 codegen()->DoTest(
this);
565 void FullCodeGenerator::EffectContext::DropAndPlug(
int count,
566 Register reg)
const {
572 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
574 Register reg)
const {
577 __ Move(result_register(), reg);
581 void FullCodeGenerator::StackValueContext::DropAndPlug(
int count,
582 Register reg)
const {
584 if (count > 1)
__ Drop(count - 1);
589 void FullCodeGenerator::TestContext::DropAndPlug(
int count,
590 Register reg)
const {
594 __ Move(result_register(), reg);
595 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
596 codegen()->DoTest(
this);
600 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
601 Label* materialize_false)
const {
602 ASSERT(materialize_true == materialize_false);
603 __ bind(materialize_true);
607 void FullCodeGenerator::AccumulatorValueContext::Plug(
608 Label* materialize_true,
609 Label* materialize_false)
const {
611 __ bind(materialize_true);
612 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
614 __ bind(materialize_false);
615 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
620 void FullCodeGenerator::StackValueContext::Plug(
621 Label* materialize_true,
622 Label* materialize_false)
const {
624 __ bind(materialize_true);
625 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
628 __ bind(materialize_false);
629 __ LoadRoot(
ip, Heap::kFalseValueRootIndex);
635 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
636 Label* materialize_false)
const {
637 ASSERT(materialize_true == true_label_);
638 ASSERT(materialize_false == false_label_);
642 void FullCodeGenerator::EffectContext::Plug(
bool flag)
const {
646 void FullCodeGenerator::AccumulatorValueContext::Plug(
bool flag)
const {
648 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
649 __ LoadRoot(result_register(), value_root_index);
653 void FullCodeGenerator::StackValueContext::Plug(
bool flag)
const {
655 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
656 __ LoadRoot(
ip, value_root_index);
661 void FullCodeGenerator::TestContext::Plug(
bool flag)
const {
662 codegen()->PrepareForBailoutBeforeSplit(condition(),
667 if (true_label_ != fall_through_)
__ b(true_label_);
669 if (false_label_ != fall_through_)
__ b(false_label_);
674 void FullCodeGenerator::DoTest(Expression* condition,
677 Label* fall_through) {
679 ToBooleanStub stub(result_register());
681 __ tst(result_register(), result_register());
685 __ push(result_register());
686 __ CallRuntime(Runtime::kToBool, 1);
687 __ LoadRoot(
ip, Heap::kFalseValueRootIndex);
690 Split(
ne, if_true, if_false, fall_through);
694 void FullCodeGenerator::Split(
Condition cond,
697 Label* fall_through) {
698 if (if_false == fall_through) {
700 }
else if (if_true == fall_through) {
709 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
710 ASSERT(var->IsStackAllocated());
714 if (var->IsParameter()) {
715 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
723 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
724 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
725 if (var->IsContextSlot()) {
727 __ LoadContext(scratch, context_chain_length);
730 return StackOperand(var);
735 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
738 __ ldr(dest, location);
742 void FullCodeGenerator::SetVar(Variable* var,
746 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
747 ASSERT(!scratch0.is(src));
748 ASSERT(!scratch0.is(scratch1));
749 ASSERT(!scratch1.is(src));
750 MemOperand location = VarOperand(var, scratch0);
751 __ str(src, location);
754 if (var->IsContextSlot()) {
755 __ RecordWriteContextSlot(scratch0,
765 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
766 bool should_normalize,
772 if (!context()->IsTest() || !info_->IsOptimizable())
return;
775 if (should_normalize)
__ b(&skip);
776 PrepareForBailout(expr,
TOS_REG);
777 if (should_normalize) {
778 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
780 Split(
eq, if_true, if_false,
NULL);
786 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
789 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
790 if (FLAG_debug_code) {
793 __ CompareRoot(
r1, Heap::kWithContextMapRootIndex);
794 __ Check(
ne,
"Declaration in with context.");
795 __ CompareRoot(
r1, Heap::kCatchContextMapRootIndex);
796 __ Check(
ne,
"Declaration in catch context.");
801 void FullCodeGenerator::VisitVariableDeclaration(
802 VariableDeclaration* declaration) {
806 VariableProxy* proxy = declaration->proxy();
808 Variable* variable = proxy->var();
810 switch (variable->location()) {
812 globals_->
Add(variable->name(),
zone());
813 globals_->
Add(variable->binding_needs_init()
814 ? isolate()->factory()->the_hole_value()
815 : isolate()->factory()->undefined_value(),
822 Comment cmnt(masm_,
"[ VariableDeclaration");
823 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
824 __ str(
ip, StackOperand(variable));
830 Comment cmnt(masm_,
"[ VariableDeclaration");
831 EmitDebugCheckDeclarationContext(variable);
832 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
840 Comment cmnt(masm_,
"[ VariableDeclaration");
841 __ mov(
r2, Operand(variable->name()));
853 __ LoadRoot(
r0, Heap::kTheHoleValueRootIndex);
859 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
866 void FullCodeGenerator::VisitFunctionDeclaration(
867 FunctionDeclaration* declaration) {
868 VariableProxy* proxy = declaration->proxy();
869 Variable* variable = proxy->var();
870 switch (variable->location()) {
872 globals_->
Add(variable->name(),
zone());
873 Handle<SharedFunctionInfo>
function =
876 if (
function.is_null())
return SetStackOverflow();
877 globals_->
Add(
function,
zone());
883 Comment cmnt(masm_,
"[ FunctionDeclaration");
884 VisitForAccumulatorValue(declaration->fun());
885 __ str(result_register(), StackOperand(variable));
890 Comment cmnt(masm_,
"[ FunctionDeclaration");
891 EmitDebugCheckDeclarationContext(variable);
892 VisitForAccumulatorValue(declaration->fun());
896 __ RecordWriteContextSlot(
cp,
909 Comment cmnt(masm_,
"[ FunctionDeclaration");
910 __ mov(
r2, Operand(variable->name()));
914 VisitForStackValue(declaration->fun());
915 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
922 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
923 VariableProxy* proxy = declaration->proxy();
924 Variable* variable = proxy->var();
925 Handle<JSModule> instance = declaration->module()->interface()->Instance();
926 ASSERT(!instance.is_null());
928 switch (variable->location()) {
930 Comment cmnt(masm_,
"[ ModuleDeclaration");
931 globals_->
Add(variable->name(),
zone());
932 globals_->
Add(instance,
zone());
933 Visit(declaration->module());
938 Comment cmnt(masm_,
"[ ModuleDeclaration");
939 EmitDebugCheckDeclarationContext(variable);
940 __ mov(
r1, Operand(instance));
942 Visit(declaration->module());
954 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
955 VariableProxy* proxy = declaration->proxy();
956 Variable* variable = proxy->var();
957 switch (variable->location()) {
963 Comment cmnt(masm_,
"[ ImportDeclaration");
964 EmitDebugCheckDeclarationContext(variable);
977 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
982 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
985 __ mov(
r1, Operand(pairs));
988 __ CallRuntime(Runtime::kDeclareGlobals, 3);
993 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
994 Comment cmnt(masm_,
"[ SwitchStatement");
995 Breakable nested_statement(
this, stmt);
996 SetStatementPosition(stmt);
999 VisitForStackValue(stmt->tag());
1002 ZoneList<CaseClause*>* clauses = stmt->cases();
1003 CaseClause* default_clause =
NULL;
1007 for (
int i = 0; i < clauses->length(); i++) {
1008 CaseClause* clause = clauses->at(i);
1009 clause->body_target()->Unuse();
1012 if (clause->is_default()) {
1013 default_clause = clause;
1017 Comment cmnt(masm_,
"[ Case comparison");
1018 __ bind(&next_test);
1022 VisitForAccumulatorValue(clause->label());
1026 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1027 JumpPatchSite patch_site(masm_);
1028 if (inline_smi_code) {
1031 patch_site.EmitJumpIfNotSmi(
r2, &slow_case);
1034 __ b(
ne, &next_test);
1036 __ b(clause->body_target());
1037 __ bind(&slow_case);
1041 SetSourcePosition(clause->position());
1043 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1044 patch_site.EmitPatchInfo();
1046 __ cmp(
r0, Operand(0));
1047 __ b(
ne, &next_test);
1049 __ b(clause->body_target());
1054 __ bind(&next_test);
1056 if (default_clause ==
NULL) {
1057 __ b(nested_statement.break_label());
1059 __ b(default_clause->body_target());
1063 for (
int i = 0; i < clauses->length(); i++) {
1064 Comment cmnt(masm_,
"[ Case body");
1065 CaseClause* clause = clauses->at(i);
1066 __ bind(clause->body_target());
1067 PrepareForBailoutForId(clause->EntryId(),
NO_REGISTERS);
1068 VisitStatements(clause->statements());
1071 __ bind(nested_statement.break_label());
1076 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1077 Comment cmnt(masm_,
"[ ForInStatement");
1078 SetStatementPosition(stmt);
1081 ForIn loop_statement(
this, stmt);
1082 increment_loop_depth();
1087 VisitForAccumulatorValue(stmt->enumerable());
1088 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
1091 Register null_value =
r5;
1092 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1093 __ cmp(
r0, null_value);
1096 PrepareForBailoutForId(stmt->PrepareId(),
TOS_REG);
1099 Label convert, done_convert;
1100 __ JumpIfSmi(
r0, &convert);
1102 __ b(
ge, &done_convert);
1106 __ bind(&done_convert);
1113 __ b(
le, &call_runtime);
1119 __ CheckEnumCache(null_value, &call_runtime);
1128 __ bind(&call_runtime);
1130 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1138 __ LoadRoot(
ip, Heap::kMetaMapRootIndex);
1140 __ b(
ne, &fixed_array);
1143 __ bind(&use_cache);
1144 __ LoadInstanceDescriptors(
r0,
r1);
1158 __ bind(&fixed_array);
1160 Handle<JSGlobalPropertyCell> cell =
1161 isolate()->factory()->NewJSGlobalPropertyCell(
1164 RecordTypeFeedbackCell(stmt->PrepareId(), cell);
1165 __ LoadHeapObject(
r1, cell);
1173 __ b(
gt, &non_proxy);
1175 __ bind(&non_proxy);
1187 __ b(
hs, loop_statement.break_label());
1204 __ b(
eq, &update_each);
1209 __ b(
eq, &update_each);
1218 __ b(
eq, loop_statement.continue_label());
1222 __ bind(&update_each);
1223 __ mov(result_register(),
r3);
1225 { EffectContext context(
this);
1226 EmitAssignment(stmt->each());
1230 Visit(stmt->body());
1234 __ bind(loop_statement.continue_label());
1239 EmitStackCheck(stmt, &loop);
1243 __ bind(loop_statement.break_label());
1249 decrement_loop_depth();
1253 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1261 if (!FLAG_always_opt &&
1262 !FLAG_prepare_always_opt &&
1264 scope()->is_function_scope() &&
1265 info->num_literals() == 0) {
1266 FastNewClosureStub stub(info->language_mode());
1267 __ mov(
r0, Operand(info));
1271 __ mov(
r0, Operand(info));
1272 __ LoadRoot(
r1, pretenure ? Heap::kTrueValueRootIndex
1273 : Heap::kFalseValueRootIndex);
1275 __ CallRuntime(Runtime::kNewClosure, 3);
1277 context()->Plug(
r0);
1281 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1282 Comment cmnt(masm_,
"[ VariableProxy");
1283 EmitVariableLoad(expr);
1287 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1290 Register current =
cp;
1296 if (s->num_heap_slots() > 0) {
1297 if (s->calls_non_strict_eval()) {
1310 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope())
break;
1311 s = s->outer_scope();
1314 if (s->is_eval_scope()) {
1316 if (!current.is(next)) {
1317 __ Move(next, current);
1322 __ LoadRoot(
ip, Heap::kGlobalContextMapRootIndex);
1336 __ mov(
r2, Operand(var->name()));
1338 ? RelocInfo::CODE_TARGET
1339 : RelocInfo::CODE_TARGET_CONTEXT;
1340 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1345 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1347 ASSERT(var->IsContextSlot());
1348 Register context =
cp;
1352 for (Scope* s = scope(); s != var->scope(); s = s->
outer_scope()) {
1353 if (s->num_heap_slots() > 0) {
1354 if (s->calls_non_strict_eval()) {
1377 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1387 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1390 Variable* local = var->local_if_not_shadowed();
1391 __ ldr(
r0, ContextSlotOperandCheckExtensions(local, slow));
1392 if (local->mode() ==
CONST ||
1394 local->mode() ==
LET) {
1395 __ CompareRoot(
r0, Heap::kTheHoleValueRootIndex);
1396 if (local->mode() ==
CONST) {
1397 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex,
eq);
1400 __ mov(
r0, Operand(var->name()));
1402 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1410 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1412 SetSourcePosition(proxy->position());
1413 Variable* var = proxy->var();
1417 switch (var->location()) {
1419 Comment cmnt(masm_,
"Global variable");
1423 __ mov(
r2, Operand(var->name()));
1424 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1425 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1426 context()->Plug(
r0);
1433 Comment cmnt(masm_, var->IsContextSlot()
1434 ?
"Context variable"
1435 :
"Stack variable");
1436 if (var->binding_needs_init()) {
1460 bool skip_init_check;
1462 skip_init_check =
false;
1465 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1466 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1467 skip_init_check = var->mode() !=
CONST &&
1468 var->initializer_position() < proxy->position();
1471 if (!skip_init_check) {
1474 __ CompareRoot(
r0, Heap::kTheHoleValueRootIndex);
1480 __ mov(
r0, Operand(var->name()));
1482 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1487 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex,
eq);
1489 context()->Plug(
r0);
1493 context()->Plug(var);
1503 Comment cmnt(masm_,
"Lookup variable");
1504 __ mov(
r1, Operand(var->name()));
1506 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1508 context()->Plug(
r0);
1514 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1515 Comment cmnt(masm_,
"[ RegExpLiteral");
1526 int literal_offset =
1529 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
1531 __ b(
ne, &materialized);
1536 __ mov(
r2, Operand(expr->pattern()));
1537 __ mov(
r1, Operand(expr->flags()));
1539 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1542 __ bind(&materialized);
1544 Label allocated, runtime_allocate;
1548 __ bind(&runtime_allocate);
1552 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1555 __ bind(&allocated);
1561 context()->Plug(
r0);
1565 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1566 if (expression ==
NULL) {
1567 __ LoadRoot(
r1, Heap::kNullValueRootIndex);
1570 VisitForStackValue(expression);
1575 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1576 Comment cmnt(masm_,
"[ ObjectLiteral");
1577 Handle<FixedArray> constant_properties = expr->constant_properties();
1581 __ mov(
r1, Operand(constant_properties));
1582 int flags = expr->fast_elements()
1585 flags |= expr->has_function()
1590 int properties_count = constant_properties->length() / 2;
1591 if (expr->depth() > 1) {
1592 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1595 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1597 FastCloneShallowObjectStub stub(properties_count);
1603 bool result_saved =
false;
1608 expr->CalculateEmitStore(
zone());
1610 AccessorTable accessor_table(isolate()->
zone());
1611 for (
int i = 0; i < expr->properties()->length(); i++) {
1612 ObjectLiteral::Property*
property = expr->properties()->at(i);
1613 if (property->IsCompileTimeValue())
continue;
1615 Literal* key =
property->key();
1616 Expression* value =
property->value();
1617 if (!result_saved) {
1619 result_saved =
true;
1621 switch (property->kind()) {
1628 if (key->handle()->IsSymbol()) {
1629 if (property->emit_store()) {
1630 VisitForAccumulatorValue(value);
1631 __ mov(
r2, Operand(key->handle()));
1633 Handle<Code> ic = is_classic_mode()
1634 ? isolate()->builtins()->StoreIC_Initialize()
1635 : isolate()->builtins()->StoreIC_Initialize_Strict();
1636 CallIC(ic, RelocInfo::CODE_TARGET, key->id());
1639 VisitForEffect(value);
1648 VisitForStackValue(key);
1649 VisitForStackValue(value);
1650 if (property->emit_store()) {
1653 __ CallRuntime(Runtime::kSetProperty, 4);
1659 accessor_table.lookup(key)->second->getter = value;
1662 accessor_table.lookup(key)->second->setter = value;
1669 for (AccessorTable::Iterator it = accessor_table.begin();
1670 it != accessor_table.end();
1674 VisitForStackValue(it->first);
1675 EmitAccessor(it->second->getter);
1676 EmitAccessor(it->second->setter);
1679 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1682 if (expr->has_function()) {
1686 __ CallRuntime(Runtime::kToFastProperties, 1);
1690 context()->PlugTOS();
1692 context()->Plug(
r0);
1697 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1698 Comment cmnt(masm_,
"[ ArrayLiteral");
1700 ZoneList<Expression*>* subexprs = expr->values();
1701 int length = subexprs->length();
1702 Handle<FixedArray> constant_elements = expr->constant_elements();
1703 ASSERT_EQ(2, constant_elements->length());
1707 Handle<FixedArrayBase> constant_elements_values(
1713 __ mov(
r1, Operand(constant_elements));
1715 if (has_fast_elements && constant_elements_values->map() ==
1716 isolate()->heap()->fixed_cow_array_map()) {
1717 FastCloneShallowArrayStub stub(
1720 __ IncrementCounter(
1721 isolate()->counters()->cow_arrays_created_stub(), 1,
r1,
r2);
1722 }
else if (expr->depth() > 1) {
1723 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1725 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1728 FLAG_smi_only_arrays);
1732 FastCloneShallowArrayStub stub(mode, length);
1736 bool result_saved =
false;
1740 for (
int i = 0; i < length; i++) {
1741 Expression* subexpr = subexprs->at(i);
1744 if (subexpr->AsLiteral() !=
NULL ||
1749 if (!result_saved) {
1751 result_saved =
true;
1753 VisitForAccumulatorValue(subexpr);
1761 __ RecordWriteField(
r1, offset, result_register(),
r2,
1769 StoreArrayLiteralElementStub stub;
1773 PrepareForBailoutForId(expr->GetIdForElement(i),
NO_REGISTERS);
1777 context()->PlugTOS();
1779 context()->Plug(
r0);
1784 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1785 Comment cmnt(masm_,
"[ Assignment");
1788 if (!expr->target()->IsValidLeftHandSide()) {
1789 VisitForEffect(expr->target());
1795 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1796 LhsKind assign_type = VARIABLE;
1797 Property*
property = expr->target()->AsProperty();
1798 if (property !=
NULL) {
1799 assign_type = (
property->key()->IsPropertyName())
1805 switch (assign_type) {
1809 case NAMED_PROPERTY:
1810 if (expr->is_compound()) {
1812 VisitForAccumulatorValue(property->obj());
1813 __ push(result_register());
1815 VisitForStackValue(property->obj());
1818 case KEYED_PROPERTY:
1819 if (expr->is_compound()) {
1820 VisitForStackValue(property->obj());
1821 VisitForAccumulatorValue(property->key());
1825 VisitForStackValue(property->obj());
1826 VisitForStackValue(property->key());
1833 if (expr->is_compound()) {
1834 { AccumulatorValueContext context(
this);
1835 switch (assign_type) {
1837 EmitVariableLoad(expr->target()->AsVariableProxy());
1838 PrepareForBailout(expr->target(),
TOS_REG);
1840 case NAMED_PROPERTY:
1841 EmitNamedPropertyLoad(property);
1842 PrepareForBailoutForId(expr->CompoundLoadId(),
TOS_REG);
1844 case KEYED_PROPERTY:
1845 EmitKeyedPropertyLoad(property);
1846 PrepareForBailoutForId(expr->CompoundLoadId(),
TOS_REG);
1853 VisitForAccumulatorValue(expr->value());
1855 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1858 SetSourcePosition(expr->position() + 1);
1859 AccumulatorValueContext context(
this);
1860 if (ShouldInlineSmiCase(op)) {
1861 EmitInlineSmiBinaryOp(expr->binary_operation(),
1867 EmitBinaryOp(expr->binary_operation(), op, mode);
1871 PrepareForBailout(expr->binary_operation(),
TOS_REG);
1873 VisitForAccumulatorValue(expr->value());
1877 SetSourcePosition(expr->position());
1880 switch (assign_type) {
1882 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1884 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
1885 context()->Plug(
r0);
1887 case NAMED_PROPERTY:
1888 EmitNamedPropertyAssignment(expr);
1890 case KEYED_PROPERTY:
1891 EmitKeyedPropertyAssignment(expr);
1897 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1898 SetSourcePosition(prop->position());
1899 Literal* key = prop->key()->AsLiteral();
1900 __ mov(
r2, Operand(key->handle()));
1902 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1903 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1907 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1908 SetSourcePosition(prop->position());
1910 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1911 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1915 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1918 Expression* left_expr,
1919 Expression* right_expr) {
1920 Label done, smi_case, stub_call;
1922 Register scratch1 =
r2;
1923 Register scratch2 =
r3;
1927 Register right =
r0;
1931 __ orr(scratch1, left, Operand(right));
1933 JumpPatchSite patch_site(masm_);
1934 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1936 __ bind(&stub_call);
1937 BinaryOpStub stub(op, mode);
1938 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1939 patch_site.EmitPatchInfo();
1949 __ GetLeastBitsFromSmi(scratch1, right, 5);
1950 __ mov(right, Operand(left,
ASR, scratch1));
1955 __ SmiUntag(scratch1, left);
1956 __ GetLeastBitsFromSmi(scratch2, right, 5);
1957 __ mov(scratch1, Operand(scratch1,
LSL, scratch2));
1958 __ add(scratch2, scratch1, Operand(0x40000000),
SetCC);
1959 __ b(
mi, &stub_call);
1960 __ SmiTag(right, scratch1);
1965 __ SmiUntag(scratch1, left);
1966 __ GetLeastBitsFromSmi(scratch2, right, 5);
1967 __ mov(scratch1, Operand(scratch1,
LSR, scratch2));
1968 __ tst(scratch1, Operand(0xc0000000));
1969 __ b(
ne, &stub_call);
1970 __ SmiTag(right, scratch1);
1974 __ add(scratch1, left, Operand(right),
SetCC);
1975 __ b(
vs, &stub_call);
1976 __ mov(right, scratch1);
1979 __ sub(scratch1, left, Operand(right),
SetCC);
1980 __ b(
vs, &stub_call);
1981 __ mov(right, scratch1);
1984 __ SmiUntag(
ip, right);
1985 __ smull(scratch1, scratch2, left,
ip);
1986 __ mov(
ip, Operand(scratch1,
ASR, 31));
1987 __ cmp(
ip, Operand(scratch2));
1988 __ b(
ne, &stub_call);
1989 __ cmp(scratch1, Operand(0));
1992 __ add(scratch2, right, Operand(left),
SetCC);
1994 __ b(
mi, &stub_call);
1998 __ orr(right, left, Operand(right));
2000 case Token::BIT_AND:
2001 __ and_(right, left, Operand(right));
2003 case Token::BIT_XOR:
2004 __ eor(right, left, Operand(right));
2011 context()->Plug(
r0);
2015 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2019 BinaryOpStub stub(op, mode);
2020 JumpPatchSite patch_site(masm_);
2021 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
2022 patch_site.EmitPatchInfo();
2023 context()->Plug(
r0);
2027 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2030 if (!expr->IsValidLeftHandSide()) {
2031 VisitForEffect(expr);
2037 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2038 LhsKind assign_type = VARIABLE;
2039 Property* prop = expr->AsProperty();
2041 assign_type = (prop->key()->IsPropertyName())
2046 switch (assign_type) {
2048 Variable* var = expr->AsVariableProxy()->var();
2049 EffectContext context(
this);
2050 EmitVariableAssignment(var, Token::ASSIGN);
2053 case NAMED_PROPERTY: {
2055 VisitForAccumulatorValue(prop->obj());
2058 __ mov(
r2, Operand(prop->key()->AsLiteral()->handle()));
2059 Handle<Code> ic = is_classic_mode()
2060 ? isolate()->builtins()->StoreIC_Initialize()
2061 : isolate()->builtins()->StoreIC_Initialize_Strict();
2065 case KEYED_PROPERTY: {
2067 VisitForStackValue(prop->obj());
2068 VisitForAccumulatorValue(prop->key());
2072 Handle<Code> ic = is_classic_mode()
2073 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2074 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2079 context()->Plug(
r0);
2083 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2085 if (var->IsUnallocated()) {
2087 __ mov(
r2, Operand(var->name()));
2089 Handle<Code> ic = is_classic_mode()
2090 ? isolate()->builtins()->StoreIC_Initialize()
2091 : isolate()->builtins()->StoreIC_Initialize_Strict();
2092 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2094 }
else if (op == Token::INIT_CONST) {
2096 ASSERT(!var->IsParameter());
2097 if (var->IsStackLocal()) {
2099 __ ldr(
r1, StackOperand(var));
2100 __ CompareRoot(
r1, Heap::kTheHoleValueRootIndex);
2102 __ str(result_register(), StackOperand(var));
2105 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2112 __ mov(
r0, Operand(var->name()));
2114 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2117 }
else if (var->mode() ==
LET && op != Token::INIT_LET) {
2119 if (var->IsLookupSlot()) {
2121 __ mov(
r1, Operand(var->name()));
2124 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2126 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2129 __ ldr(
r3, location);
2130 __ CompareRoot(
r3, Heap::kTheHoleValueRootIndex);
2132 __ mov(
r3, Operand(var->name()));
2134 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2137 __ str(result_register(), location);
2138 if (var->IsContextSlot()) {
2140 __ mov(
r3, result_register());
2142 __ RecordWriteContextSlot(
2147 }
else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2150 if (var->IsStackAllocated() || var->IsContextSlot()) {
2152 if (FLAG_debug_code && op == Token::INIT_LET) {
2154 __ ldr(
r2, location);
2155 __ CompareRoot(
r2, Heap::kTheHoleValueRootIndex);
2156 __ Check(
eq,
"Let binding re-initialization.");
2159 __ str(
r0, location);
2160 if (var->IsContextSlot()) {
2163 __ RecordWriteContextSlot(
2167 ASSERT(var->IsLookupSlot());
2169 __ mov(
r1, Operand(var->name()));
2172 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2179 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2181 Property* prop = expr->target()->AsProperty();
2188 if (expr->starts_initialization_block()) {
2189 __ push(result_register());
2192 __ CallRuntime(Runtime::kToSlowProperties, 1);
2193 __ pop(result_register());
2197 SetSourcePosition(expr->position());
2198 __ mov(
r2, Operand(prop->key()->AsLiteral()->handle()));
2201 if (expr->ends_initialization_block()) {
2207 Handle<Code> ic = is_classic_mode()
2208 ? isolate()->builtins()->StoreIC_Initialize()
2209 : isolate()->builtins()->StoreIC_Initialize_Strict();
2210 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2213 if (expr->ends_initialization_block()) {
2218 __ CallRuntime(Runtime::kToFastProperties, 1);
2222 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2223 context()->Plug(
r0);
2227 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2233 if (expr->starts_initialization_block()) {
2234 __ push(result_register());
2238 __ CallRuntime(Runtime::kToSlowProperties, 1);
2239 __ pop(result_register());
2243 SetSourcePosition(expr->position());
2247 if (expr->ends_initialization_block()) {
2253 Handle<Code> ic = is_classic_mode()
2254 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2255 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2256 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2259 if (expr->ends_initialization_block()) {
2264 __ CallRuntime(Runtime::kToFastProperties, 1);
2268 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2269 context()->Plug(
r0);
2273 void FullCodeGenerator::VisitProperty(Property* expr) {
2274 Comment cmnt(masm_,
"[ Property");
2275 Expression* key = expr->key();
2277 if (key->IsPropertyName()) {
2278 VisitForAccumulatorValue(expr->obj());
2279 EmitNamedPropertyLoad(expr);
2280 context()->Plug(
r0);
2282 VisitForStackValue(expr->obj());
2283 VisitForAccumulatorValue(expr->key());
2285 EmitKeyedPropertyLoad(expr);
2286 context()->Plug(
r0);
2291 void FullCodeGenerator::CallIC(Handle<Code> code,
2292 RelocInfo::Mode rmode,
2295 __ Call(code, rmode, ast_id);
2298 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2299 Handle<Object>
name,
2300 RelocInfo::Mode mode) {
2302 ZoneList<Expression*>* args = expr->arguments();
2303 int arg_count = args->length();
2304 { PreservePositionScope scope(masm()->positions_recorder());
2305 for (
int i = 0; i < arg_count; i++) {
2306 VisitForStackValue(args->at(i));
2308 __ mov(
r2, Operand(name));
2311 SetSourcePosition(expr->position());
2314 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2315 CallIC(ic, mode, expr->id());
2316 RecordJSReturnSite(expr);
2319 context()->Plug(
r0);
2323 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2326 VisitForAccumulatorValue(key);
2335 ZoneList<Expression*>* args = expr->arguments();
2336 int arg_count = args->length();
2337 { PreservePositionScope scope(masm()->positions_recorder());
2338 for (
int i = 0; i < arg_count; i++) {
2339 VisitForStackValue(args->at(i));
2343 SetSourcePosition(expr->position());
2346 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2348 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2349 RecordJSReturnSite(expr);
2352 context()->DropAndPlug(1,
r0);
2358 ZoneList<Expression*>* args = expr->arguments();
2359 int arg_count = args->length();
2360 { PreservePositionScope scope(masm()->positions_recorder());
2361 for (
int i = 0; i < arg_count; i++) {
2362 VisitForStackValue(args->at(i));
2366 SetSourcePosition(expr->position());
2371 Handle<Object> uninitialized =
2373 Handle<JSGlobalPropertyCell> cell =
2374 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2375 RecordTypeFeedbackCell(expr->id(), cell);
2376 __ mov(
r2, Operand(cell));
2379 CallFunctionStub stub(arg_count, flags);
2382 RecordJSReturnSite(expr);
2385 context()->DropAndPlug(1,
r0);
2389 void FullCodeGenerator::EmitResolvePossiblyDirectEval(
int arg_count) {
2391 if (arg_count > 0) {
2394 __ LoadRoot(
r1, Heap::kUndefinedValueRootIndex);
2399 int receiver_offset = 2 + info_->scope()->num_parameters();
2411 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2415 void FullCodeGenerator::VisitCall(Call* expr) {
2419 expr->return_is_recorded_ =
false;
2422 Comment cmnt(masm_,
"[ Call");
2423 Expression* callee = expr->expression();
2424 VariableProxy* proxy = callee->AsVariableProxy();
2425 Property*
property = callee->AsProperty();
2427 if (proxy !=
NULL && proxy->var()->is_possibly_eval()) {
2432 ZoneList<Expression*>* args = expr->arguments();
2433 int arg_count = args->length();
2435 { PreservePositionScope pos_scope(masm()->positions_recorder());
2436 VisitForStackValue(callee);
2437 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
2441 for (
int i = 0; i < arg_count; i++) {
2442 VisitForStackValue(args->at(i));
2449 EmitResolvePossiblyDirectEval(arg_count);
2458 SetSourcePosition(expr->position());
2462 RecordJSReturnSite(expr);
2465 context()->DropAndPlug(1,
r0);
2466 }
else if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
2470 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2471 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
2475 { PreservePositionScope scope(masm()->positions_recorder());
2484 __ push(context_register());
2485 __ mov(
r2, Operand(proxy->name()));
2487 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2493 if (done.is_linked()) {
2501 __ LoadRoot(
r1, Heap::kTheHoleValueRootIndex);
2510 }
else if (property !=
NULL) {
2511 { PreservePositionScope scope(masm()->positions_recorder());
2512 VisitForStackValue(property->obj());
2514 if (property->key()->IsPropertyName()) {
2515 EmitCallWithIC(expr,
2516 property->key()->AsLiteral()->handle(),
2517 RelocInfo::CODE_TARGET);
2519 EmitKeyedCallWithIC(expr, property->key());
2523 { PreservePositionScope scope(masm()->positions_recorder());
2524 VisitForStackValue(callee);
2536 ASSERT(expr->return_is_recorded_);
2541 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2542 Comment cmnt(masm_,
"[ CallNew");
2550 VisitForStackValue(expr->expression());
2553 ZoneList<Expression*>* args = expr->arguments();
2554 int arg_count = args->length();
2555 for (
int i = 0; i < arg_count; i++) {
2556 VisitForStackValue(args->at(i));
2561 SetSourcePosition(expr->position());
2564 __ mov(
r0, Operand(arg_count));
2571 Handle<Object> uninitialized =
2573 Handle<JSGlobalPropertyCell> cell =
2574 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2575 RecordTypeFeedbackCell(expr->id(), cell);
2576 __ mov(
r2, Operand(cell));
2581 CallConstructStub stub(flags);
2582 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2583 PrepareForBailoutForId(expr->ReturnId(),
TOS_REG);
2584 context()->Plug(
r0);
2588 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2589 ZoneList<Expression*>* args = expr->arguments();
2590 ASSERT(args->length() == 1);
2592 VisitForAccumulatorValue(args->at(0));
2594 Label materialize_true, materialize_false;
2595 Label* if_true =
NULL;
2596 Label* if_false =
NULL;
2597 Label* fall_through =
NULL;
2598 context()->PrepareTest(&materialize_true, &materialize_false,
2599 &if_true, &if_false, &fall_through);
2601 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2603 Split(
eq, if_true, if_false, fall_through);
2605 context()->Plug(if_true, if_false);
2609 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2610 ZoneList<Expression*>* args = expr->arguments();
2611 ASSERT(args->length() == 1);
2613 VisitForAccumulatorValue(args->at(0));
2615 Label materialize_true, materialize_false;
2616 Label* if_true =
NULL;
2617 Label* if_false =
NULL;
2618 Label* fall_through =
NULL;
2619 context()->PrepareTest(&materialize_true, &materialize_false,
2620 &if_true, &if_false, &fall_through);
2622 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2624 Split(
eq, if_true, if_false, fall_through);
2626 context()->Plug(if_true, if_false);
2630 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2631 ZoneList<Expression*>* args = expr->arguments();
2632 ASSERT(args->length() == 1);
2634 VisitForAccumulatorValue(args->at(0));
2636 Label materialize_true, materialize_false;
2637 Label* if_true =
NULL;
2638 Label* if_false =
NULL;
2639 Label* fall_through =
NULL;
2640 context()->PrepareTest(&materialize_true, &materialize_false,
2641 &if_true, &if_false, &fall_through);
2643 __ JumpIfSmi(
r0, if_false);
2644 __ LoadRoot(
ip, Heap::kNullValueRootIndex);
2656 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2657 Split(
le, if_true, if_false, fall_through);
2659 context()->Plug(if_true, if_false);
2663 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2664 ZoneList<Expression*>* args = expr->arguments();
2665 ASSERT(args->length() == 1);
2667 VisitForAccumulatorValue(args->at(0));
2669 Label materialize_true, materialize_false;
2670 Label* if_true =
NULL;
2671 Label* if_false =
NULL;
2672 Label* fall_through =
NULL;
2673 context()->PrepareTest(&materialize_true, &materialize_false,
2674 &if_true, &if_false, &fall_through);
2676 __ JumpIfSmi(
r0, if_false);
2678 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2679 Split(
ge, if_true, if_false, fall_through);
2681 context()->Plug(if_true, if_false);
2685 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2686 ZoneList<Expression*>* args = expr->arguments();
2687 ASSERT(args->length() == 1);
2689 VisitForAccumulatorValue(args->at(0));
2691 Label materialize_true, materialize_false;
2692 Label* if_true =
NULL;
2693 Label* if_false =
NULL;
2694 Label* fall_through =
NULL;
2695 context()->PrepareTest(&materialize_true, &materialize_false,
2696 &if_true, &if_false, &fall_through);
2698 __ JumpIfSmi(
r0, if_false);
2702 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2703 Split(
ne, if_true, if_false, fall_through);
2705 context()->Plug(if_true, if_false);
2709 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2710 CallRuntime* expr) {
2711 ZoneList<Expression*>* args = expr->arguments();
2712 ASSERT(args->length() == 1);
2714 VisitForAccumulatorValue(args->at(0));
2716 Label materialize_true, materialize_false;
2717 Label* if_true =
NULL;
2718 Label* if_false =
NULL;
2719 Label* fall_through =
NULL;
2720 context()->PrepareTest(&materialize_true, &materialize_false,
2721 &if_true, &if_false, &fall_through);
2723 if (FLAG_debug_code)
__ AbortIfSmi(
r0);
2733 __ LoadRoot(
ip, Heap::kHashTableMapRootIndex);
2740 __ LoadInstanceDescriptors(
r1,
r4);
2767 __ add(
r4,
r4, Operand(kPointerSize));
2775 __ JumpIfSmi(
r2, if_false);
2790 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2791 context()->Plug(if_true, if_false);
2795 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2796 ZoneList<Expression*>* args = expr->arguments();
2797 ASSERT(args->length() == 1);
2799 VisitForAccumulatorValue(args->at(0));
2801 Label materialize_true, materialize_false;
2802 Label* if_true =
NULL;
2803 Label* if_false =
NULL;
2804 Label* fall_through =
NULL;
2805 context()->PrepareTest(&materialize_true, &materialize_false,
2806 &if_true, &if_false, &fall_through);
2808 __ JumpIfSmi(
r0, if_false);
2810 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2811 Split(
eq, if_true, if_false, fall_through);
2813 context()->Plug(if_true, if_false);
2817 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2818 ZoneList<Expression*>* args = expr->arguments();
2819 ASSERT(args->length() == 1);
2821 VisitForAccumulatorValue(args->at(0));
2823 Label materialize_true, materialize_false;
2824 Label* if_true =
NULL;
2825 Label* if_false =
NULL;
2826 Label* fall_through =
NULL;
2827 context()->PrepareTest(&materialize_true, &materialize_false,
2828 &if_true, &if_false, &fall_through);
2830 __ JumpIfSmi(
r0, if_false);
2832 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2833 Split(
eq, if_true, if_false, fall_through);
2835 context()->Plug(if_true, if_false);
2839 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2840 ZoneList<Expression*>* args = expr->arguments();
2841 ASSERT(args->length() == 1);
2843 VisitForAccumulatorValue(args->at(0));
2845 Label materialize_true, materialize_false;
2846 Label* if_true =
NULL;
2847 Label* if_false =
NULL;
2848 Label* fall_through =
NULL;
2849 context()->PrepareTest(&materialize_true, &materialize_false,
2850 &if_true, &if_false, &fall_through);
2852 __ JumpIfSmi(
r0, if_false);
2854 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2855 Split(
eq, if_true, if_false, fall_through);
2857 context()->Plug(if_true, if_false);
2862 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2863 ASSERT(expr->arguments()->length() == 0);
2865 Label materialize_true, materialize_false;
2866 Label* if_true =
NULL;
2867 Label* if_false =
NULL;
2868 Label* fall_through =
NULL;
2869 context()->PrepareTest(&materialize_true, &materialize_false,
2870 &if_true, &if_false, &fall_through);
2876 Label check_frame_marker;
2879 __ b(
ne, &check_frame_marker);
2883 __ bind(&check_frame_marker);
2886 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2887 Split(
eq, if_true, if_false, fall_through);
2889 context()->Plug(if_true, if_false);
2893 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2894 ZoneList<Expression*>* args = expr->arguments();
2895 ASSERT(args->length() == 2);
2898 VisitForStackValue(args->at(0));
2899 VisitForAccumulatorValue(args->at(1));
2901 Label materialize_true, materialize_false;
2902 Label* if_true =
NULL;
2903 Label* if_false =
NULL;
2904 Label* fall_through =
NULL;
2905 context()->PrepareTest(&materialize_true, &materialize_false,
2906 &if_true, &if_false, &fall_through);
2910 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2911 Split(
eq, if_true, if_false, fall_through);
2913 context()->Plug(if_true, if_false);
2917 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2918 ZoneList<Expression*>* args = expr->arguments();
2919 ASSERT(args->length() == 1);
2923 VisitForAccumulatorValue(args->at(0));
2928 context()->Plug(
r0);
2932 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2933 ASSERT(expr->arguments()->length() == 0);
2949 context()->Plug(
r0);
2953 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2954 ZoneList<Expression*>* args = expr->arguments();
2955 ASSERT(args->length() == 1);
2956 Label done, null,
function, non_function_constructor;
2958 VisitForAccumulatorValue(args->at(0));
2961 __ JumpIfSmi(
r0, &null);
2973 __ b(
eq, &
function);
2978 __ b(
eq, &
function);
2985 __ b(
ne, &non_function_constructor);
2995 __ LoadRoot(
r0, Heap::kfunction_class_symbolRootIndex);
2999 __ bind(&non_function_constructor);
3000 __ LoadRoot(
r0, Heap::kObject_symbolRootIndex);
3005 __ LoadRoot(
r0, Heap::kNullValueRootIndex);
3010 context()->Plug(
r0);
3014 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3022 ZoneList<Expression*>* args = expr->arguments();
3025 VisitForStackValue(args->at(1));
3026 VisitForStackValue(args->at(2));
3027 __ CallRuntime(Runtime::kLog, 2);
3031 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
3032 context()->Plug(
r0);
3036 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
3037 ASSERT(expr->arguments()->length() == 0);
3038 Label slow_allocate_heapnumber;
3039 Label heapnumber_allocated;
3041 __ LoadRoot(
r6, Heap::kHeapNumberMapRootIndex);
3042 __ AllocateHeapNumber(
r4,
r1,
r2,
r6, &slow_allocate_heapnumber);
3043 __ jmp(&heapnumber_allocated);
3045 __ bind(&slow_allocate_heapnumber);
3047 __ CallRuntime(Runtime::kNumberAlloc, 0);
3050 __ bind(&heapnumber_allocated);
3056 __ PrepareCallCFunction(1,
r0);
3059 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3061 CpuFeatures::Scope scope(
VFP3);
3064 __ mov(
r1, Operand(0x41000000));
3065 __ orr(
r1,
r1, Operand(0x300000));
3077 __ PrepareCallCFunction(2,
r0);
3082 ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
3085 context()->Plug(
r0);
3089 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3092 ZoneList<Expression*>* args = expr->arguments();
3093 ASSERT(args->length() == 3);
3094 VisitForStackValue(args->at(0));
3095 VisitForStackValue(args->at(1));
3096 VisitForStackValue(args->at(2));
3098 context()->Plug(
r0);
3102 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3104 RegExpExecStub stub;
3105 ZoneList<Expression*>* args = expr->arguments();
3106 ASSERT(args->length() == 4);
3107 VisitForStackValue(args->at(0));
3108 VisitForStackValue(args->at(1));
3109 VisitForStackValue(args->at(2));
3110 VisitForStackValue(args->at(3));
3112 context()->Plug(
r0);
3116 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3117 ZoneList<Expression*>* args = expr->arguments();
3118 ASSERT(args->length() == 1);
3119 VisitForAccumulatorValue(args->at(0));
3123 __ JumpIfSmi(
r0, &done);
3130 context()->Plug(
r0);
3134 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3135 ZoneList<Expression*>* args = expr->arguments();
3136 ASSERT(args->length() == 2);
3138 Smi* index =
Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3140 VisitForAccumulatorValue(args->at(0));
3142 Label runtime, done;
3143 Register
object =
r0;
3144 Register result =
r0;
3145 Register scratch0 =
r9;
3146 Register scratch1 =
r1;
3149 __ AbortIfSmi(
object);
3151 __ Assert(
eq,
"Trying to get date field from non-date.");
3154 if (index->value() == 0) {
3158 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3159 __ mov(scratch1, Operand(stamp));
3162 __ cmp(scratch1, scratch0);
3165 kPointerSize * index->value()));
3169 __ PrepareCallCFunction(2, scratch1);
3170 __ mov(
r1, Operand(index));
3171 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3174 context()->Plug(
r0);
3178 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3180 ZoneList<Expression*>* args = expr->arguments();
3181 ASSERT(args->length() == 2);
3182 VisitForStackValue(args->at(0));
3183 VisitForStackValue(args->at(1));
3188 __ CallRuntime(Runtime::kMath_pow, 2);
3190 context()->Plug(
r0);
3194 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3195 ZoneList<Expression*>* args = expr->arguments();
3196 ASSERT(args->length() == 2);
3197 VisitForStackValue(args->at(0));
3198 VisitForAccumulatorValue(args->at(1));
3203 __ JumpIfSmi(
r1, &done);
3214 __ RecordWriteField(
3218 context()->Plug(
r0);
3222 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3223 ZoneList<Expression*>* args = expr->arguments();
3226 VisitForStackValue(args->at(0));
3228 NumberToStringStub stub;
3230 context()->Plug(
r0);
3234 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3235 ZoneList<Expression*>* args = expr->arguments();
3236 ASSERT(args->length() == 1);
3237 VisitForAccumulatorValue(args->at(0));
3240 StringCharFromCodeGenerator generator(
r0,
r1);
3241 generator.GenerateFast(masm_);
3244 NopRuntimeCallHelper call_helper;
3245 generator.GenerateSlow(masm_, call_helper);
3248 context()->Plug(
r1);
3252 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3253 ZoneList<Expression*>* args = expr->arguments();
3254 ASSERT(args->length() == 2);
3255 VisitForStackValue(args->at(0));
3256 VisitForAccumulatorValue(args->at(1));
3258 Register
object =
r1;
3259 Register index =
r0;
3260 Register result =
r3;
3264 Label need_conversion;
3265 Label index_out_of_range;
3267 StringCharCodeAtGenerator generator(
object,
3272 &index_out_of_range,
3274 generator.GenerateFast(masm_);
3277 __ bind(&index_out_of_range);
3280 __ LoadRoot(result, Heap::kNanValueRootIndex);
3283 __ bind(&need_conversion);
3286 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3289 NopRuntimeCallHelper call_helper;
3290 generator.GenerateSlow(masm_, call_helper);
3293 context()->Plug(result);
3297 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3298 ZoneList<Expression*>* args = expr->arguments();
3299 ASSERT(args->length() == 2);
3300 VisitForStackValue(args->at(0));
3301 VisitForAccumulatorValue(args->at(1));
3303 Register
object =
r1;
3304 Register index =
r0;
3305 Register scratch =
r3;
3306 Register result =
r0;
3310 Label need_conversion;
3311 Label index_out_of_range;
3313 StringCharAtGenerator generator(
object,
3319 &index_out_of_range,
3321 generator.GenerateFast(masm_);
3324 __ bind(&index_out_of_range);
3327 __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3330 __ bind(&need_conversion);
3336 NopRuntimeCallHelper call_helper;
3337 generator.GenerateSlow(masm_, call_helper);
3340 context()->Plug(result);
3344 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3345 ZoneList<Expression*>* args = expr->arguments();
3347 VisitForStackValue(args->at(0));
3348 VisitForStackValue(args->at(1));
3352 context()->Plug(
r0);
3356 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3357 ZoneList<Expression*>* args = expr->arguments();
3359 VisitForStackValue(args->at(0));
3360 VisitForStackValue(args->at(1));
3362 StringCompareStub stub;
3364 context()->Plug(
r0);
3368 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3372 ZoneList<Expression*>* args = expr->arguments();
3373 ASSERT(args->length() == 1);
3374 VisitForStackValue(args->at(0));
3376 context()->Plug(
r0);
3380 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3384 ZoneList<Expression*>* args = expr->arguments();
3385 ASSERT(args->length() == 1);
3386 VisitForStackValue(args->at(0));
3388 context()->Plug(
r0);
3392 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3396 ZoneList<Expression*>* args = expr->arguments();
3397 ASSERT(args->length() == 1);
3398 VisitForStackValue(args->at(0));
3400 context()->Plug(
r0);
3404 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3408 ZoneList<Expression*>* args = expr->arguments();
3409 ASSERT(args->length() == 1);
3410 VisitForStackValue(args->at(0));
3412 context()->Plug(
r0);
3416 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3418 ZoneList<Expression*>* args = expr->arguments();
3419 ASSERT(args->length() == 1);
3420 VisitForStackValue(args->at(0));
3421 __ CallRuntime(Runtime::kMath_sqrt, 1);
3422 context()->Plug(
r0);
3426 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3427 ZoneList<Expression*>* args = expr->arguments();
3428 ASSERT(args->length() >= 2);
3430 int arg_count = args->length() - 2;
3431 for (
int i = 0; i < arg_count + 1; i++) {
3432 VisitForStackValue(args->at(i));
3434 VisitForAccumulatorValue(args->last());
3442 __ mov(
r1, result_register());
3443 ParameterCount count(arg_count);
3451 __ CallRuntime(Runtime::kCall, args->length());
3454 context()->Plug(
r0);
3458 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3459 RegExpConstructResultStub stub;
3460 ZoneList<Expression*>* args = expr->arguments();
3461 ASSERT(args->length() == 3);
3462 VisitForStackValue(args->at(0));
3463 VisitForStackValue(args->at(1));
3464 VisitForStackValue(args->at(2));
3466 context()->Plug(
r0);
3470 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3471 ZoneList<Expression*>* args = expr->arguments();
3474 int cache_id =
Smi::cast(*(args->at(0)->AsLiteral()->handle()))->
value();
3476 Handle<FixedArray> jsfunction_result_caches(
3477 isolate()->global_context()->jsfunction_result_caches());
3478 if (jsfunction_result_caches->length() <= cache_id) {
3479 __ Abort(
"Attempt to use undefined cache.");
3480 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
3481 context()->Plug(
r0);
3485 VisitForAccumulatorValue(args->at(1));
3488 Register cache =
r1;
3496 Label done, not_found;
3506 __ b(
ne, ¬_found);
3511 __ bind(¬_found);
3513 __ Push(cache, key);
3514 __ CallRuntime(Runtime::kGetFromCache, 2);
3517 context()->Plug(
r0);
3521 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3522 ZoneList<Expression*>* args = expr->arguments();
3525 Register right =
r0;
3530 VisitForStackValue(args->at(0));
3531 VisitForAccumulatorValue(args->at(1));
3534 Label done, fail, ok;
3535 __ cmp(left, Operand(right));
3538 __ and_(tmp, left, Operand(right));
3539 __ JumpIfSmi(tmp, &fail);
3545 __ cmp(tmp, Operand(tmp2));
3552 __ LoadRoot(
r0, Heap::kFalseValueRootIndex);
3555 __ LoadRoot(
r0, Heap::kTrueValueRootIndex);
3558 context()->Plug(
r0);
3562 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3563 ZoneList<Expression*>* args = expr->arguments();
3564 VisitForAccumulatorValue(args->at(0));
3566 Label materialize_true, materialize_false;
3567 Label* if_true =
NULL;
3568 Label* if_false =
NULL;
3569 Label* fall_through =
NULL;
3570 context()->PrepareTest(&materialize_true, &materialize_false,
3571 &if_true, &if_false, &fall_through);
3575 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3576 Split(
eq, if_true, if_false, fall_through);
3578 context()->Plug(if_true, if_false);
3582 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3583 ZoneList<Expression*>* args = expr->arguments();
3584 ASSERT(args->length() == 1);
3585 VisitForAccumulatorValue(args->at(0));
3587 if (FLAG_debug_code) {
3588 __ AbortIfNotString(
r0);
3592 __ IndexFromHash(
r0,
r0);
3594 context()->Plug(
r0);
3598 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3599 Label bailout, done, one_char_separator, long_separator,
3600 non_trivial_array, not_size_one_array, loop,
3601 empty_separator_loop, one_char_separator_loop,
3602 one_char_separator_loop_entry, long_separator_loop;
3603 ZoneList<Expression*>* args = expr->arguments();
3604 ASSERT(args->length() == 2);
3605 VisitForStackValue(args->at(1));
3606 VisitForAccumulatorValue(args->at(0));
3609 Register array =
r0;
3610 Register elements =
no_reg;
3611 Register result =
no_reg;
3612 Register separator =
r1;
3613 Register array_length =
r2;
3614 Register result_pos =
no_reg;
3615 Register string_length =
r3;
3616 Register
string =
r4;
3617 Register element =
r5;
3618 Register elements_end =
r6;
3619 Register scratch1 =
r7;
3620 Register scratch2 =
r9;
3626 __ JumpIfSmi(array, &bailout);
3631 __ CheckFastElements(scratch1, scratch2, &bailout);
3635 __ SmiUntag(array_length,
SetCC);
3636 __ b(
ne, &non_trivial_array);
3637 __ LoadRoot(
r0, Heap::kEmptyStringRootIndex);
3640 __ bind(&non_trivial_array);
3649 __ mov(string_length, Operand(0));
3661 if (FLAG_debug_code) {
3662 __ cmp(array_length, Operand(0));
3663 __ Assert(
gt,
"No empty arrays here in EmitFastAsciiArrayJoin");
3667 __ JumpIfSmi(
string, &bailout);
3670 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3672 __ add(string_length, string_length, Operand(scratch1),
SetCC);
3674 __ cmp(element, elements_end);
3678 __ cmp(array_length, Operand(1));
3679 __ b(
ne, ¬_size_one_array);
3683 __ bind(¬_size_one_array);
3692 __ JumpIfSmi(separator, &bailout);
3695 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3701 __ sub(string_length, string_length, Operand(scratch1));
3702 __ smull(scratch2,
ip, array_length, scratch1);
3705 __ cmp(
ip, Operand(0));
3707 __ tst(scratch2, Operand(0x80000000));
3709 __ add(string_length, string_length, Operand(scratch2),
SetCC);
3711 __ SmiUntag(string_length);
3724 __ AllocateAsciiString(result,
3734 result_pos = array_length;
3743 __ b(
eq, &one_char_separator);
3744 __ b(
gt, &long_separator);
3747 __ bind(&empty_separator_loop);
3756 __ SmiUntag(string_length);
3758 __ CopyBytes(
string, result_pos, string_length, scratch1);
3759 __ cmp(element, elements_end);
3760 __ b(
lt, &empty_separator_loop);
3765 __ bind(&one_char_separator);
3770 __ jmp(&one_char_separator_loop_entry);
3772 __ bind(&one_char_separator_loop);
3783 __ bind(&one_char_separator_loop_entry);
3786 __ SmiUntag(string_length);
3788 __ CopyBytes(
string, result_pos, string_length, scratch1);
3789 __ cmp(element, elements_end);
3790 __ b(
lt, &one_char_separator_loop);
3796 __ bind(&long_separator_loop);
3805 __ SmiUntag(string_length);
3809 __ CopyBytes(
string, result_pos, string_length, scratch1);
3811 __ bind(&long_separator);
3814 __ SmiUntag(string_length);
3816 __ CopyBytes(
string, result_pos, string_length, scratch1);
3817 __ cmp(element, elements_end);
3818 __ b(
lt, &long_separator_loop);
3823 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
3825 context()->Plug(
r0);
3829 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3830 Handle<String> name = expr->name();
3831 if (name->length() > 0 && name->Get(0) ==
'_') {
3832 Comment cmnt(masm_,
"[ InlineRuntimeCall");
3833 EmitInlineRuntimeCall(expr);
3837 Comment cmnt(masm_,
"[ CallRuntime");
3838 ZoneList<Expression*>* args = expr->arguments();
3840 if (expr->is_jsruntime()) {
3848 int arg_count = args->length();
3849 for (
int i = 0; i < arg_count; i++) {
3850 VisitForStackValue(args->at(i));
3853 if (expr->is_jsruntime()) {
3855 __ mov(
r2, Operand(expr->name()));
3856 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3858 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3859 CallIC(ic, mode, expr->id());
3864 __ CallRuntime(expr->function(), arg_count);
3866 context()->Plug(
r0);
3870 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3871 switch (expr->op()) {
3872 case Token::DELETE: {
3873 Comment cmnt(masm_,
"[ UnaryOperation (DELETE)");
3874 Property*
property = expr->expression()->AsProperty();
3875 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3877 if (property !=
NULL) {
3878 VisitForStackValue(property->obj());
3879 VisitForStackValue(property->key());
3885 context()->Plug(
r0);
3886 }
else if (proxy !=
NULL) {
3887 Variable* var = proxy->var();
3891 if (var->IsUnallocated()) {
3893 __ mov(
r1, Operand(var->name()));
3897 context()->Plug(
r0);
3898 }
else if (var->IsStackAllocated() || var->IsContextSlot()) {
3901 context()->Plug(var->is_this());
3905 __ push(context_register());
3906 __ mov(
r2, Operand(var->name()));
3908 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3909 context()->Plug(
r0);
3914 VisitForEffect(expr->expression());
3915 context()->Plug(
true);
3921 Comment cmnt(masm_,
"[ UnaryOperation (VOID)");
3922 VisitForEffect(expr->expression());
3923 context()->Plug(Heap::kUndefinedValueRootIndex);
3928 Comment cmnt(masm_,
"[ UnaryOperation (NOT)");
3929 if (context()->IsEffect()) {
3932 VisitForEffect(expr->expression());
3933 }
else if (context()->IsTest()) {
3936 VisitForControl(expr->expression(),
3937 test->false_label(),
3939 test->fall_through());
3940 context()->Plug(test->true_label(), test->false_label());
3946 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3947 Label materialize_true, materialize_false, done;
3948 VisitForControl(expr->expression(),
3952 __ bind(&materialize_true);
3953 PrepareForBailoutForId(expr->MaterializeTrueId(),
NO_REGISTERS);
3954 __ LoadRoot(
r0, Heap::kTrueValueRootIndex);
3955 if (context()->IsStackValue())
__ push(
r0);
3957 __ bind(&materialize_false);
3958 PrepareForBailoutForId(expr->MaterializeFalseId(),
NO_REGISTERS);
3959 __ LoadRoot(
r0, Heap::kFalseValueRootIndex);
3960 if (context()->IsStackValue())
__ push(
r0);
3966 case Token::TYPEOF: {
3967 Comment cmnt(masm_,
"[ UnaryOperation (TYPEOF)");
3968 { StackValueContext context(
this);
3969 VisitForTypeofValue(expr->expression());
3971 __ CallRuntime(Runtime::kTypeof, 1);
3972 context()->Plug(
r0);
3977 Comment cmt(masm_,
"[ UnaryOperation (ADD)");
3978 VisitForAccumulatorValue(expr->expression());
3979 Label no_conversion;
3980 __ JumpIfSmi(result_register(), &no_conversion);
3981 ToNumberStub convert_stub;
3982 __ CallStub(&convert_stub);
3983 __ bind(&no_conversion);
3984 context()->Plug(result_register());
3989 EmitUnaryOperation(expr,
"[ UnaryOperation (SUB)");
3992 case Token::BIT_NOT:
3993 EmitUnaryOperation(expr,
"[ UnaryOperation (BIT_NOT)");
4002 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
4005 Comment cmt(masm_, comment);
4006 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
4009 UnaryOpStub stub(expr->op(), overwrite);
4012 VisitForAccumulatorValue(expr->expression());
4013 SetSourcePosition(expr->position());
4014 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
4015 context()->Plug(
r0);
4019 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4020 Comment cmnt(masm_,
"[ CountOperation");
4021 SetSourcePosition(expr->position());
4025 if (!expr->expression()->IsValidLeftHandSide()) {
4026 VisitForEffect(expr->expression());
4032 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4033 LhsKind assign_type = VARIABLE;
4034 Property* prop = expr->expression()->AsProperty();
4039 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4043 if (assign_type == VARIABLE) {
4044 ASSERT(expr->expression()->AsVariableProxy()->var() !=
NULL);
4045 AccumulatorValueContext context(
this);
4046 EmitVariableLoad(expr->expression()->AsVariableProxy());
4049 if (expr->is_postfix() && !context()->IsEffect()) {
4053 if (assign_type == NAMED_PROPERTY) {
4055 VisitForAccumulatorValue(prop->obj());
4057 EmitNamedPropertyLoad(prop);
4059 VisitForStackValue(prop->obj());
4060 VisitForAccumulatorValue(prop->key());
4063 EmitKeyedPropertyLoad(prop);
4069 if (assign_type == VARIABLE) {
4070 PrepareForBailout(expr->expression(),
TOS_REG);
4072 PrepareForBailoutForId(expr->CountId(),
TOS_REG);
4076 Label no_conversion;
4077 __ JumpIfSmi(
r0, &no_conversion);
4078 ToNumberStub convert_stub;
4079 __ CallStub(&convert_stub);
4080 __ bind(&no_conversion);
4083 if (expr->is_postfix()) {
4084 if (!context()->IsEffect()) {
4088 switch (assign_type) {
4092 case NAMED_PROPERTY:
4095 case KEYED_PROPERTY:
4104 Label stub_call, done;
4105 JumpPatchSite patch_site(masm_);
4107 int count_value = expr->op() == Token::INC ? 1 : -1;
4108 if (ShouldInlineSmiCase(expr->op())) {
4110 __ b(
vs, &stub_call);
4113 patch_site.EmitJumpIfSmi(
r0, &done);
4115 __ bind(&stub_call);
4122 SetSourcePosition(expr->position());
4125 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
4126 patch_site.EmitPatchInfo();
4130 switch (assign_type) {
4132 if (expr->is_postfix()) {
4133 { EffectContext context(
this);
4134 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4136 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4141 if (!context()->IsEffect()) {
4142 context()->PlugTOS();
4145 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4147 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4148 context()->Plug(
r0);
4151 case NAMED_PROPERTY: {
4152 __ mov(
r2, Operand(prop->key()->AsLiteral()->handle()));
4154 Handle<Code> ic = is_classic_mode()
4155 ? isolate()->builtins()->StoreIC_Initialize()
4156 : isolate()->builtins()->StoreIC_Initialize_Strict();
4157 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4158 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4159 if (expr->is_postfix()) {
4160 if (!context()->IsEffect()) {
4161 context()->PlugTOS();
4164 context()->Plug(
r0);
4168 case KEYED_PROPERTY: {
4171 Handle<Code> ic = is_classic_mode()
4172 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4173 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4174 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4175 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4176 if (expr->is_postfix()) {
4177 if (!context()->IsEffect()) {
4178 context()->PlugTOS();
4181 context()->Plug(
r0);
4189 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4190 ASSERT(!context()->IsEffect());
4191 ASSERT(!context()->IsTest());
4192 VariableProxy* proxy = expr->AsVariableProxy();
4193 if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
4194 Comment cmnt(masm_,
"Global variable");
4196 __ mov(
r2, Operand(proxy->name()));
4197 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4201 PrepareForBailout(expr,
TOS_REG);
4202 context()->Plug(
r0);
4203 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
4208 EmitDynamicLookupFastCase(proxy->var(),
INSIDE_TYPEOF, &slow, &done);
4211 __ mov(
r0, Operand(proxy->name()));
4213 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4214 PrepareForBailout(expr,
TOS_REG);
4217 context()->Plug(
r0);
4220 VisitInDuplicateContext(expr);
4225 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4226 Expression* sub_expr,
4227 Handle<String>
check) {
4228 Label materialize_true, materialize_false;
4229 Label* if_true =
NULL;
4230 Label* if_false =
NULL;
4231 Label* fall_through =
NULL;
4232 context()->PrepareTest(&materialize_true, &materialize_false,
4233 &if_true, &if_false, &fall_through);
4235 { AccumulatorValueContext context(
this);
4236 VisitForTypeofValue(sub_expr);
4238 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4240 if (check->Equals(isolate()->heap()->number_symbol())) {
4241 __ JumpIfSmi(
r0, if_true);
4243 __ LoadRoot(
ip, Heap::kHeapNumberMapRootIndex);
4245 Split(
eq, if_true, if_false, fall_through);
4246 }
else if (check->Equals(isolate()->heap()->string_symbol())) {
4247 __ JumpIfSmi(
r0, if_false);
4253 Split(
eq, if_true, if_false, fall_through);
4254 }
else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4255 __ CompareRoot(
r0, Heap::kTrueValueRootIndex);
4257 __ CompareRoot(
r0, Heap::kFalseValueRootIndex);
4258 Split(
eq, if_true, if_false, fall_through);
4259 }
else if (FLAG_harmony_typeof &&
4260 check->Equals(isolate()->heap()->null_symbol())) {
4261 __ CompareRoot(
r0, Heap::kNullValueRootIndex);
4262 Split(
eq, if_true, if_false, fall_through);
4263 }
else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4264 __ CompareRoot(
r0, Heap::kUndefinedValueRootIndex);
4266 __ JumpIfSmi(
r0, if_false);
4271 Split(
ne, if_true, if_false, fall_through);
4273 }
else if (check->Equals(isolate()->heap()->function_symbol())) {
4274 __ JumpIfSmi(
r0, if_false);
4279 Split(
eq, if_true, if_false, fall_through);
4280 }
else if (check->Equals(isolate()->heap()->object_symbol())) {
4281 __ JumpIfSmi(
r0, if_false);
4282 if (!FLAG_harmony_typeof) {
4283 __ CompareRoot(
r0, Heap::kNullValueRootIndex);
4294 Split(
eq, if_true, if_false, fall_through);
4296 if (if_false != fall_through)
__ jmp(if_false);
4298 context()->Plug(if_true, if_false);
4302 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4303 Comment cmnt(masm_,
"[ CompareOperation");
4304 SetSourcePosition(expr->position());
4308 if (TryLiteralCompare(expr))
return;
4312 Label materialize_true, materialize_false;
4313 Label* if_true =
NULL;
4314 Label* if_false =
NULL;
4315 Label* fall_through =
NULL;
4316 context()->PrepareTest(&materialize_true, &materialize_false,
4317 &if_true, &if_false, &fall_through);
4320 VisitForStackValue(expr->left());
4323 VisitForStackValue(expr->right());
4325 PrepareForBailoutBeforeSplit(expr,
false,
NULL,
NULL);
4326 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
4328 Split(
eq, if_true, if_false, fall_through);
4331 case Token::INSTANCEOF: {
4332 VisitForStackValue(expr->right());
4335 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4338 Split(
eq, if_true, if_false, fall_through);
4343 VisitForAccumulatorValue(expr->right());
4346 case Token::EQ_STRICT:
4363 case Token::INSTANCEOF:
4369 bool inline_smi_code = ShouldInlineSmiCase(op);
4370 JumpPatchSite patch_site(masm_);
4371 if (inline_smi_code) {
4374 patch_site.EmitJumpIfNotSmi(
r2, &slow_case);
4376 Split(cond, if_true, if_false,
NULL);
4377 __ bind(&slow_case);
4381 SetSourcePosition(expr->position());
4383 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4384 patch_site.EmitPatchInfo();
4385 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4386 __ cmp(
r0, Operand(0));
4387 Split(cond, if_true, if_false, fall_through);
4393 context()->Plug(if_true, if_false);
4397 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4398 Expression* sub_expr,
4400 Label materialize_true, materialize_false;
4401 Label* if_true =
NULL;
4402 Label* if_false =
NULL;
4403 Label* fall_through =
NULL;
4404 context()->PrepareTest(&materialize_true, &materialize_false,
4405 &if_true, &if_false, &fall_through);
4407 VisitForAccumulatorValue(sub_expr);
4408 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4410 Heap::kNullValueRootIndex :
4411 Heap::kUndefinedValueRootIndex;
4412 __ LoadRoot(
r1, nil_value);
4414 if (expr->op() == Token::EQ_STRICT) {
4415 Split(
eq, if_true, if_false, fall_through);
4418 Heap::kUndefinedValueRootIndex :
4419 Heap::kNullValueRootIndex;
4421 __ LoadRoot(
r1, other_nil_value);
4424 __ JumpIfSmi(
r0, if_false);
4430 Split(
eq, if_true, if_false, fall_through);
4432 context()->Plug(if_true, if_false);
4436 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4438 context()->Plug(
r0);
4442 Register FullCodeGenerator::result_register() {
4447 Register FullCodeGenerator::context_register() {
4452 void FullCodeGenerator::StoreToFrameField(
int frame_offset, Register value) {
4458 void FullCodeGenerator::LoadContextField(Register dst,
int context_index) {
4463 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4465 if (declaration_scope->is_global_scope() ||
4466 declaration_scope->is_module_scope()) {
4472 }
else if (declaration_scope->is_eval_scope()) {
4478 ASSERT(declaration_scope->is_function_scope());
4488 void FullCodeGenerator::EnterFinallyBlock() {
4491 __ push(result_register());
4502 ExternalReference pending_message_obj =
4503 ExternalReference::address_of_pending_message_obj(isolate());
4504 __ mov(
ip, Operand(pending_message_obj));
4508 ExternalReference has_pending_message =
4509 ExternalReference::address_of_has_pending_message(isolate());
4510 __ mov(
ip, Operand(has_pending_message));
4514 ExternalReference pending_message_script =
4515 ExternalReference::address_of_pending_message_script(isolate());
4516 __ mov(
ip, Operand(pending_message_script));
4522 void FullCodeGenerator::ExitFinallyBlock() {
4526 ExternalReference pending_message_script =
4527 ExternalReference::address_of_pending_message_script(isolate());
4528 __ mov(
ip, Operand(pending_message_script));
4532 ExternalReference has_pending_message =
4533 ExternalReference::address_of_has_pending_message(isolate());
4534 __ mov(
ip, Operand(has_pending_message));
4538 ExternalReference pending_message_obj =
4539 ExternalReference::address_of_pending_message_obj(isolate());
4540 __ mov(
ip, Operand(pending_message_obj));
4547 __ pop(result_register());
4556 #define __ ACCESS_MASM(masm())
4560 int* context_length) {
4567 __ Drop(*stack_depth);
4568 if (*context_length > 0) {
4574 __ bl(finally_entry_);
4577 *context_length = 0;
4586 #endif // V8_TARGET_ARCH_ARM
static const int kCallerFPOffset
static const int kLengthOffset
static const int kBitFieldOffset
Scope * DeclarationScope()
int InstructionsGeneratedSince(Label *label)
const intptr_t kSmiTagMask
VariableDeclaration * function() const
static int SlotOffset(int index)
static const int kFingerOffset
static const int kBuiltinsOffset
static String * cast(Object *obj)
void mov(Register rd, Register rt)
static const int kDeclarationsId
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
static const int kDataOffset
static const int kGlobalReceiverOffset
int SizeOfCodeGeneratedSince(Label *label)
Scope * outer_scope() const
static Handle< Object > UninitializedSentinel(Isolate *isolate)
static bool IsSupported(CpuFeature f)
static const int kCallerSPOffset
#define ASSERT(condition)
friend class BlockConstPoolScope
const int kPointerSizeLog2
static const int kInObjectFieldCount
const char * comment() const
static bool IsCompileTimeValue(Expression *expression)
#define POINTER_SIZE_ALIGN(value)
static const int kMaximumSlots
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
static const int kGlobalContextOffset
Variable * parameter(int index) const
MemOperand ContextOperand(Register context, int index)
static const int kFunctionEntryId
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
static const int kHashFieldOffset
static const int kLiteralsOffset
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
static const int kValueOffset
Variable * arguments() const
static const int kForInSlowCaseMarker
static const int kForInFastCaseMarker
static const int kStringWrapperSafeForDefaultValueOf
void Jump(Register target, Condition cond=al)
static const int kContextOffset
static const int kCacheStampOffset
static TestContext * cast(AstContext *context)
static const int kPropertiesOffset
void ldm(BlockAddrMode am, Register base, RegList dst, Condition cond=al)
static const int kHeaderSize
void CheckConstPool(bool force_emit, bool require_jump)
static const int kElementsOffset
static const int kContainsCachedArrayIndexMask
void add(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
friend class NestedStatement
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
static const int kEnumerationIndexOffset
static const int kMapOffset
static const int kValueOffset
static const int kEnumCacheBridgeCacheOffset
void VisitIllegalRedeclaration(AstVisitor *visitor)
static const int kLengthOffset
MemOperand FieldMemOperand(Register object, int offset)
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
static const int kContextOffset
static const int kFunctionOffset
Handle< Object > CodeObject()
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
static const int kJSReturnSequenceInstructions
static const int kConstructorOffset
#define ASSERT_NE(v1, v2)
static const int kIsUndetectable
static bool ShouldGenerateLog(Expression *type)
static const int kMaximumClonedProperties
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static const int kPrototypeOffset
static const int kValueOffset
static const int kMarkerOffset
static const int kSharedFunctionInfoOffset
static FixedArrayBase * cast(Object *object)
static const int kLocal0Offset
static const int kMaxValue
static const int kBitField2Offset
static Handle< Code > GetUninitialized(Token::Value op)
void check(i::Vector< const char > string)
static const int kMaximumClonedLength
static const int kValueOffset
static const int kFirstIndex
static const int kInstanceTypeOffset