30 #if V8_TARGET_ARCH_ARM64
48 #define __ ACCESS_MASM(masm_)
52 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
54 info_emitted_ =
false;
59 if (patch_site_.is_bound()) {
66 void EmitJumpIfNotSmi(Register reg, Label* target) {
68 InstructionAccurateScope scope(masm_, 1);
73 __ bind(&patch_site_);
74 __ tbz(xzr, 0, target);
77 void EmitJumpIfSmi(Register reg, Label* target) {
79 InstructionAccurateScope scope(masm_, 1);
84 __ bind(&patch_site_);
85 __ tbnz(xzr, 0, target);
88 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
89 UseScratchRegisterScope temps(masm_);
90 Register temp = temps.AcquireX();
91 __ Orr(temp, reg1, reg2);
92 EmitJumpIfNotSmi(temp, target);
95 void EmitPatchInfo() {
96 Assembler::BlockPoolsScope scope(masm_);
104 MacroAssembler* masm_;
113 static void EmitStackCheck(MacroAssembler* masm_,
115 Register scratch = jssp) {
116 Isolate* isolate = masm_->isolate();
119 ASSERT(scratch.Is(jssp) == (pointers == 0));
123 __ CompareRoot(scratch, Heap::kStackLimitRootIndex);
125 PredictableCodeSizeScope predictable(masm_,
127 __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
146 void FullCodeGenerator::Generate() {
147 CompilationInfo*
info = info_;
149 isolate()->factory()->NewFixedArray(
function()->handler_count(),
TENURED);
151 InitializeFeedbackVector();
153 profiling_counter_ = isolate()->factory()->NewCell(
154 Handle<Smi>(
Smi::FromInt(FLAG_interrupt_budget), isolate()));
155 SetFunctionPosition(
function());
156 Comment cmnt(masm_,
"[ Function compiled by full code generator");
161 if (strlen(FLAG_stop_at) > 0 &&
162 info->function()->name()->IsUtf8EqualTo(
CStrVector(FLAG_stop_at))) {
163 __ Debug(
"stop-at", __LINE__,
BREAK);
170 if (info->strict_mode() ==
SLOPPY && !info->is_native()) {
172 int receiver_offset = info->scope()->num_parameters() *
kXRegSize;
173 __ Peek(x10, receiver_offset);
174 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
178 __ Poke(x10, receiver_offset);
187 FrameScope frame_scope(masm_, StackFrame::MANUAL);
193 info->set_prologue_offset(masm_->pc_offset());
195 info->AddNoFrameRange(0, masm_->pc_offset());
198 { Comment cmnt(masm_,
"[ Allocate locals");
199 int locals_count = info->scope()->num_stack_slots();
201 ASSERT(!info->function()->is_generator() || locals_count == 0);
203 if (locals_count > 0) {
204 if (locals_count >= 128) {
205 EmitStackCheck(masm_, locals_count, x10);
207 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
208 if (FLAG_optimize_for_size) {
209 __ PushMultipleTimes(x10 , locals_count);
211 const int kMaxPushes = 32;
212 if (locals_count >= kMaxPushes) {
213 int loop_iterations = locals_count / kMaxPushes;
214 __ Mov(x3, loop_iterations);
216 __ Bind(&loop_header);
218 __ PushMultipleTimes(x10 , kMaxPushes);
220 __ B(
ne, &loop_header);
222 int remaining = locals_count % kMaxPushes;
224 __ PushMultipleTimes(x10 , remaining);
229 bool function_in_register_x1 =
true;
232 if (heap_slots > 0) {
234 Comment cmnt(masm_,
"[ Allocate context");
235 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
236 __ Mov(x10, Operand(info->scope()->GetScopeInfo()));
238 __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
239 }
else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
240 FastNewContextStub stub(heap_slots);
244 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
246 function_in_register_x1 =
false;
252 int num_parameters = info->scope()->num_parameters();
253 for (
int i = 0; i < num_parameters; i++) {
255 if (var->IsContextSlot()) {
265 __ RecordWriteContextSlot(
271 Variable* arguments = scope()->
arguments();
272 if (arguments !=
NULL) {
274 Comment cmnt(masm_,
"[ Allocate arguments object");
275 if (!function_in_register_x1) {
282 int num_parameters = info->scope()->num_parameters();
293 if (strict_mode() ==
STRICT) {
295 }
else if (
function()->has_duplicate_parameters()) {
300 ArgumentsAccessStub stub(type);
303 SetVar(arguments, x0, x1, x2);
307 __ CallRuntime(Runtime::kTraceEnter, 0);
313 if (scope()->HasIllegalRedeclaration()) {
314 Comment cmnt(masm_,
"[ Declarations");
319 { Comment cmnt(masm_,
"[ Declarations");
320 if (scope()->is_function_scope() && scope()->
function() !=
NULL) {
321 VariableDeclaration*
function = scope()->
function();
322 ASSERT(function->proxy()->var()->mode() ==
CONST ||
325 VisitVariableDeclaration(
function);
327 VisitDeclarations(scope()->declarations());
331 { Comment cmnt(masm_,
"[ Stack check");
333 EmitStackCheck(masm_);
336 { Comment cmnt(masm_,
"[ Body");
337 ASSERT(loop_depth() == 0);
338 VisitStatements(
function()->body());
339 ASSERT(loop_depth() == 0);
344 { Comment cmnt(masm_,
"[ return <undefined>;");
345 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
347 EmitReturnSequence();
356 void FullCodeGenerator::ClearAccumulator() {
361 void FullCodeGenerator::EmitProfilingCounterDecrement(
int delta) {
362 __ Mov(x2, Operand(profiling_counter_));
369 void FullCodeGenerator::EmitProfilingCounterReset() {
370 int reset_value = FLAG_interrupt_budget;
371 if (isolate()->IsDebuggerActive()) {
373 reset_value = FLAG_interrupt_budget >> 4;
375 __ Mov(x2, Operand(profiling_counter_));
381 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
382 Label* back_edge_target) {
384 Comment cmnt(masm_,
"[ Back edge bookkeeping");
386 Assembler::BlockPoolsScope block_const_pool(masm_);
389 ASSERT(back_edge_target->is_bound());
390 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
392 Max(1, distance / kCodeSizeMultiplier));
393 EmitProfilingCounterDecrement(weight);
395 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
400 RecordBackEdge(stmt->OsrEntryId());
402 EmitProfilingCounterReset();
409 PrepareForBailoutForId(stmt->OsrEntryId(),
NO_REGISTERS);
413 void FullCodeGenerator::EmitReturnSequence() {
414 Comment cmnt(masm_,
"[ Return sequence");
416 if (return_label_.is_bound()) {
417 __ B(&return_label_);
420 __ Bind(&return_label_);
424 __ Push(result_register());
425 __ CallRuntime(Runtime::kTraceExit, 1);
426 ASSERT(x0.Is(result_register()));
431 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
433 int distance = masm_->pc_offset();
435 Max(1, distance / kCodeSizeMultiplier));
437 EmitProfilingCounterDecrement(weight);
441 __ Call(isolate()->builtins()->InterruptCheck(),
442 RelocInfo::CODE_TARGET);
444 EmitProfilingCounterReset();
451 InstructionAccurateScope scope(masm_,
458 const Register& current_sp =
__ StackPointer();
460 ASSERT(!current_sp.Is(csp));
461 __ mov(current_sp,
fp);
462 int no_frame_start = masm_->pc_offset();
469 __ add(current_sp, current_sp, ip0);
478 void FullCodeGenerator::EffectContext::Plug(Variable* var)
const {
479 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
483 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var)
const {
484 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
485 codegen()->GetVar(result_register(), var);
489 void FullCodeGenerator::StackValueContext::Plug(Variable* var)
const {
490 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
491 codegen()->GetVar(result_register(), var);
492 __ Push(result_register());
496 void FullCodeGenerator::TestContext::Plug(Variable* var)
const {
497 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
499 codegen()->GetVar(result_register(), var);
500 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
501 codegen()->DoTest(
this);
510 void FullCodeGenerator::AccumulatorValueContext::Plug(
512 __ LoadRoot(result_register(), index);
516 void FullCodeGenerator::StackValueContext::Plug(
518 __ LoadRoot(result_register(), index);
519 __ Push(result_register());
524 codegen()->PrepareForBailoutBeforeSplit(condition(),
true, true_label_,
526 if (index == Heap::kUndefinedValueRootIndex ||
527 index == Heap::kNullValueRootIndex ||
528 index == Heap::kFalseValueRootIndex) {
529 if (false_label_ != fall_through_)
__ B(false_label_);
530 }
else if (index == Heap::kTrueValueRootIndex) {
531 if (true_label_ != fall_through_)
__ B(true_label_);
533 __ LoadRoot(result_register(), index);
534 codegen()->DoTest(
this);
539 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit)
const {
543 void FullCodeGenerator::AccumulatorValueContext::Plug(
544 Handle<Object> lit)
const {
545 __ Mov(result_register(), Operand(lit));
549 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit)
const {
551 __ Mov(result_register(), Operand(lit));
552 __ Push(result_register());
556 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit)
const {
557 codegen()->PrepareForBailoutBeforeSplit(condition(),
561 ASSERT(!lit->IsUndetectableObject());
562 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
563 if (false_label_ != fall_through_)
__ B(false_label_);
564 }
else if (lit->IsTrue() || lit->IsJSObject()) {
565 if (true_label_ != fall_through_)
__ B(true_label_);
566 }
else if (lit->IsString()) {
568 if (false_label_ != fall_through_)
__ B(false_label_);
570 if (true_label_ != fall_through_)
__ B(true_label_);
572 }
else if (lit->IsSmi()) {
574 if (false_label_ != fall_through_)
__ B(false_label_);
576 if (true_label_ != fall_through_)
__ B(true_label_);
580 __ Mov(result_register(), Operand(lit));
581 codegen()->DoTest(
this);
586 void FullCodeGenerator::EffectContext::DropAndPlug(
int count,
587 Register reg)
const {
593 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
595 Register reg)
const {
598 __ Move(result_register(), reg);
602 void FullCodeGenerator::StackValueContext::DropAndPlug(
int count,
603 Register reg)
const {
605 if (count > 1)
__ Drop(count - 1);
610 void FullCodeGenerator::TestContext::DropAndPlug(
int count,
611 Register reg)
const {
615 __ Mov(result_register(), reg);
616 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
617 codegen()->DoTest(
this);
621 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
622 Label* materialize_false)
const {
623 ASSERT(materialize_true == materialize_false);
624 __ Bind(materialize_true);
628 void FullCodeGenerator::AccumulatorValueContext::Plug(
629 Label* materialize_true,
630 Label* materialize_false)
const {
632 __ Bind(materialize_true);
633 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
635 __ Bind(materialize_false);
636 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
641 void FullCodeGenerator::StackValueContext::Plug(
642 Label* materialize_true,
643 Label* materialize_false)
const {
645 __ Bind(materialize_true);
646 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
648 __ Bind(materialize_false);
649 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
655 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
656 Label* materialize_false)
const {
657 ASSERT(materialize_true == true_label_);
658 ASSERT(materialize_false == false_label_);
662 void FullCodeGenerator::EffectContext::Plug(
bool flag)
const {
666 void FullCodeGenerator::AccumulatorValueContext::Plug(
bool flag)
const {
668 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
669 __ LoadRoot(result_register(), value_root_index);
673 void FullCodeGenerator::StackValueContext::Plug(
bool flag)
const {
675 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
676 __ LoadRoot(x10, value_root_index);
681 void FullCodeGenerator::TestContext::Plug(
bool flag)
const {
682 codegen()->PrepareForBailoutBeforeSplit(condition(),
687 if (true_label_ != fall_through_) {
691 if (false_label_ != fall_through_) {
698 void FullCodeGenerator::DoTest(Expression* condition,
701 Label* fall_through) {
703 CallIC(ic, condition->test_id());
704 __ CompareAndSplit(result_register(), 0,
ne, if_true, if_false, fall_through);
712 void FullCodeGenerator::Split(
Condition cond,
715 Label* fall_through) {
716 if (if_false == fall_through) {
718 }
else if (if_true == fall_through) {
719 ASSERT(if_false != fall_through);
728 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
732 if (var->IsParameter()) {
741 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
742 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
743 if (var->IsContextSlot()) {
745 __ LoadContext(scratch, context_chain_length);
748 return StackOperand(var);
753 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
756 __ Ldr(dest, location);
760 void FullCodeGenerator::SetVar(Variable* var,
764 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
766 MemOperand location = VarOperand(var, scratch0);
767 __ Str(src, location);
770 if (var->IsContextSlot()) {
772 __ RecordWriteContextSlot(scratch0,
782 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
783 bool should_normalize,
793 if (should_normalize) {
796 PrepareForBailout(expr,
TOS_REG);
797 if (should_normalize) {
798 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
799 Split(
eq, if_true, if_false,
NULL);
805 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
808 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
809 if (generate_debug_code_) {
812 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
813 __ Check(
ne, kDeclarationInWithContext);
814 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
815 __ Check(
ne, kDeclarationInCatchContext);
820 void FullCodeGenerator::VisitVariableDeclaration(
821 VariableDeclaration* declaration) {
825 VariableProxy* proxy = declaration->proxy();
827 Variable* variable = proxy->var();
830 switch (variable->location()) {
832 globals_->Add(variable->name(), zone());
833 globals_->Add(variable->binding_needs_init()
834 ? isolate()->factory()->the_hole_value()
835 : isolate()->factory()->undefined_value(),
842 Comment cmnt(masm_,
"[ VariableDeclaration");
843 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
844 __ Str(x10, StackOperand(variable));
850 Comment cmnt(masm_,
"[ VariableDeclaration");
851 EmitDebugCheckDeclarationContext(variable);
852 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
860 Comment cmnt(masm_,
"[ VariableDeclaration");
861 __ Mov(x2, Operand(variable->name()));
872 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
873 __ Push(
cp, x2, x1, x0);
876 __ Push(
cp, x2, x1, xzr);
878 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
885 void FullCodeGenerator::VisitFunctionDeclaration(
886 FunctionDeclaration* declaration) {
887 VariableProxy* proxy = declaration->proxy();
888 Variable* variable = proxy->var();
889 switch (variable->location()) {
891 globals_->Add(variable->name(), zone());
892 Handle<SharedFunctionInfo>
function =
895 if (
function.is_null())
return SetStackOverflow();
896 globals_->Add(
function, zone());
902 Comment cmnt(masm_,
"[ Function Declaration");
903 VisitForAccumulatorValue(declaration->fun());
904 __ Str(result_register(), StackOperand(variable));
909 Comment cmnt(masm_,
"[ Function Declaration");
910 EmitDebugCheckDeclarationContext(variable);
911 VisitForAccumulatorValue(declaration->fun());
915 __ RecordWriteContextSlot(
cp,
928 Comment cmnt(masm_,
"[ Function Declaration");
929 __ Mov(x2, Operand(variable->name()));
933 VisitForStackValue(declaration->fun());
934 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
941 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
942 Variable* variable = declaration->proxy()->var();
944 ASSERT(variable->interface()->IsFrozen());
946 Comment cmnt(masm_,
"[ ModuleDeclaration");
947 EmitDebugCheckDeclarationContext(variable);
957 __ RecordWriteContextSlot(
cp,
965 PrepareForBailoutForId(declaration->proxy()->id(),
NO_REGISTERS);
968 Visit(declaration->module());
972 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
973 VariableProxy* proxy = declaration->proxy();
974 Variable* variable = proxy->var();
975 switch (variable->location()) {
981 Comment cmnt(masm_,
"[ ImportDeclaration");
982 EmitDebugCheckDeclarationContext(variable);
995 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
1000 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray>
pairs) {
1002 __ Mov(x11, Operand(pairs));
1003 Register
flags = xzr;
1008 __ Push(
cp, x11, flags);
1009 __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
1014 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1016 __ Push(descriptions);
1017 __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
1022 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1023 ASM_LOCATION(
"FullCodeGenerator::VisitSwitchStatement");
1024 Comment cmnt(masm_,
"[ SwitchStatement");
1025 Breakable nested_statement(
this, stmt);
1026 SetStatementPosition(stmt);
1029 VisitForStackValue(stmt->tag());
1032 ZoneList<CaseClause*>* clauses = stmt->cases();
1033 CaseClause* default_clause =
NULL;
1037 for (
int i = 0; i < clauses->length(); i++) {
1038 CaseClause* clause = clauses->at(i);
1039 clause->body_target()->Unuse();
1042 if (clause->is_default()) {
1043 default_clause = clause;
1047 Comment cmnt(masm_,
"[ Case comparison");
1048 __ Bind(&next_test);
1052 VisitForAccumulatorValue(clause->label());
1057 JumpPatchSite patch_site(masm_);
1058 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
1060 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1062 __ B(
ne, &next_test);
1064 __ B(clause->body_target());
1065 __ Bind(&slow_case);
1069 SetSourcePosition(clause->position());
1071 CallIC(ic, clause->CompareId());
1072 patch_site.EmitPatchInfo();
1076 PrepareForBailout(clause,
TOS_REG);
1077 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1079 __ B(clause->body_target());
1082 __ Cbnz(x0, &next_test);
1084 __ B(clause->body_target());
1089 __ Bind(&next_test);
1091 if (default_clause ==
NULL) {
1092 __ B(nested_statement.break_label());
1094 __ B(default_clause->body_target());
1098 for (
int i = 0; i < clauses->length(); i++) {
1099 Comment cmnt(masm_,
"[ Case body");
1100 CaseClause* clause = clauses->at(i);
1101 __ Bind(clause->body_target());
1102 PrepareForBailoutForId(clause->EntryId(),
NO_REGISTERS);
1103 VisitStatements(clause->statements());
1106 __ Bind(nested_statement.break_label());
1111 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1112 ASM_LOCATION(
"FullCodeGenerator::VisitForInStatement");
1113 Comment cmnt(masm_,
"[ ForInStatement");
1114 int slot = stmt->ForInFeedbackSlot();
1116 SetStatementPosition(stmt);
1119 ForIn loop_statement(
this, stmt);
1120 increment_loop_depth();
1124 VisitForAccumulatorValue(stmt->enumerable());
1125 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1126 Register null_value = x15;
1127 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1128 __ Cmp(x0, null_value);
1131 PrepareForBailoutForId(stmt->PrepareId(),
TOS_REG);
1134 Label convert, done_convert;
1135 __ JumpIfSmi(x0, &convert);
1140 __ Bind(&done_convert);
1152 __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1161 __ Bind(&call_runtime);
1163 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1168 Label fixed_array, no_descriptors;
1170 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1173 __ Bind(&use_cache);
1175 __ EnumLengthUntagged(x1, x0);
1176 __ Cbz(x1, &no_descriptors);
1178 __ LoadInstanceDescriptors(x0, x2);
1188 __ Push(x2, x1, x0);
1191 __ Bind(&no_descriptors);
1196 __ Bind(&fixed_array);
1198 Handle<Object> feedback = Handle<Object>(
1201 StoreFeedbackVectorSlot(slot, feedback);
1202 __ LoadObject(x1, FeedbackVector());
1221 __ PeekPair(x0, x1, 0);
1223 __ B(
hs, loop_statement.break_label());
1240 __ B(
eq, &update_each);
1245 __ Cbz(x2, &update_each);
1253 __ Cbz(x0, loop_statement.continue_label());
1257 __ Bind(&update_each);
1258 __ Mov(result_register(), x3);
1260 { EffectContext context(
this);
1261 EmitAssignment(stmt->each());
1265 Visit(stmt->body());
1269 __ Bind(loop_statement.continue_label());
1275 EmitBackEdgeBookkeeping(stmt, &loop);
1279 __ Bind(loop_statement.break_label());
1285 decrement_loop_depth();
1289 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1290 Comment cmnt(masm_,
"[ ForOfStatement");
1291 SetStatementPosition(stmt);
1293 Iteration loop_statement(
this, stmt);
1294 increment_loop_depth();
1297 VisitForAccumulatorValue(stmt->assign_iterator());
1300 Register iterator = x0;
1301 __ JumpIfRoot(iterator, Heap::kUndefinedValueRootIndex,
1302 loop_statement.break_label());
1303 __ JumpIfRoot(iterator, Heap::kNullValueRootIndex,
1304 loop_statement.break_label());
1307 Label convert, done_convert;
1308 __ JumpIfSmi(iterator, &convert);
1310 __ B(
ge, &done_convert);
1314 __ Bind(&done_convert);
1318 __ Bind(loop_statement.continue_label());
1321 VisitForEffect(stmt->next_result());
1324 Label result_not_done;
1325 VisitForControl(stmt->result_done(),
1326 loop_statement.break_label(),
1329 __ Bind(&result_not_done);
1332 VisitForEffect(stmt->assign_each());
1335 Visit(stmt->body());
1338 PrepareForBailoutForId(stmt->BackEdgeId(),
NO_REGISTERS);
1339 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1340 __ B(loop_statement.continue_label());
1344 __ Bind(loop_statement.break_label());
1345 decrement_loop_depth();
1349 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1357 if (!FLAG_always_opt &&
1358 !FLAG_prepare_always_opt &&
1360 scope()->is_function_scope() &&
1361 info->num_literals() == 0) {
1362 FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1363 __ Mov(x2, Operand(info));
1366 __ Mov(x11, Operand(info));
1367 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1368 : Heap::kFalseValueRootIndex);
1369 __ Push(
cp, x11, x10);
1370 __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1372 context()->Plug(x0);
1376 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1377 Comment cmnt(masm_,
"[ VariableProxy");
1378 EmitVariableLoad(expr);
1382 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1385 Register current =
cp;
1386 Register next = x10;
1387 Register temp = x11;
1391 if (s->num_heap_slots() > 0) {
1392 if (s->calls_sloppy_eval()) {
1395 __ Cbnz(temp, slow);
1404 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope())
break;
1405 s = s->outer_scope();
1408 if (s->is_eval_scope()) {
1410 __ Mov(next, current);
1415 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1418 __ Cbnz(temp, slow);
1426 __ Mov(x2, Operand(var->name()));
1433 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1435 ASSERT(var->IsContextSlot());
1436 Register context =
cp;
1437 Register next = x10;
1438 Register temp = x11;
1440 for (Scope* s = scope(); s != var->scope(); s = s->
outer_scope()) {
1441 if (s->num_heap_slots() > 0) {
1442 if (s->calls_sloppy_eval()) {
1445 __ Cbnz(temp, slow);
1454 __ Cbnz(temp, slow);
1463 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1473 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1476 Variable* local = var->local_if_not_shadowed();
1477 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1478 if (local->mode() ==
LET || local->mode() ==
CONST ||
1480 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1482 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1484 __ Mov(x0, Operand(var->name()));
1486 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1494 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1496 SetSourcePosition(proxy->position());
1497 Variable* var = proxy->var();
1501 switch (var->location()) {
1503 Comment cmnt(masm_,
"Global variable");
1507 __ Mov(x2, Operand(var->name()));
1509 context()->Plug(x0);
1516 Comment cmnt(masm_, var->IsContextSlot()
1517 ?
"Context variable"
1518 :
"Stack variable");
1519 if (var->binding_needs_init()) {
1543 bool skip_init_check;
1545 skip_init_check =
false;
1548 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1549 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1551 var->initializer_position() < proxy->position();
1554 if (!skip_init_check) {
1558 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1559 if (var->mode() ==
LET || var->mode() ==
CONST) {
1562 __ Mov(x0, Operand(var->name()));
1564 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1569 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1572 context()->Plug(x0);
1576 context()->Plug(var);
1586 Comment cmnt(masm_,
"Lookup variable");
1587 __ Mov(x1, Operand(var->name()));
1589 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1591 context()->Plug(x0);
1598 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1599 Comment cmnt(masm_,
"[ RegExpLiteral");
1610 int literal_offset =
1613 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1618 __ Mov(x2, Operand(expr->pattern()));
1619 __ Mov(x1, Operand(expr->flags()));
1620 __ Push(x4, x3, x2, x1);
1621 __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1624 __ Bind(&materialized);
1626 Label allocated, runtime_allocate;
1627 __ Allocate(size, x0, x2, x3, &runtime_allocate,
TAG_OBJECT);
1630 __ Bind(&runtime_allocate);
1633 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1636 __ Bind(&allocated);
1641 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1642 context()->Plug(x0);
1646 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1647 if (expression ==
NULL) {
1648 __ LoadRoot(x10, Heap::kNullValueRootIndex);
1651 VisitForStackValue(expression);
1656 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1657 Comment cmnt(masm_,
"[ ObjectLiteral");
1659 expr->BuildConstantProperties(isolate());
1660 Handle<FixedArray> constant_properties = expr->constant_properties();
1664 __ Mov(x1, Operand(constant_properties));
1665 int flags = expr->fast_elements()
1666 ? ObjectLiteral::kFastElements
1667 : ObjectLiteral::kNoFlags;
1668 flags |= expr->has_function()
1669 ? ObjectLiteral::kHasFunction
1670 : ObjectLiteral::kNoFlags;
1672 int properties_count = constant_properties->length() / 2;
1673 const int max_cloned_properties =
1676 flags != ObjectLiteral::kFastElements ||
1677 properties_count > max_cloned_properties) {
1678 __ Push(x3, x2, x1, x0);
1679 __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1681 FastCloneShallowObjectStub stub(properties_count);
1687 bool result_saved =
false;
1692 expr->CalculateEmitStore(zone());
1694 AccessorTable accessor_table(zone());
1695 for (
int i = 0; i < expr->properties()->length(); i++) {
1696 ObjectLiteral::Property*
property = expr->properties()->at(i);
1697 if (property->IsCompileTimeValue())
continue;
1699 Literal* key =
property->key();
1700 Expression* value =
property->value();
1701 if (!result_saved) {
1703 result_saved =
true;
1705 switch (property->kind()) {
1708 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1711 case ObjectLiteral::Property::COMPUTED:
1712 if (key->value()->IsInternalizedString()) {
1713 if (property->emit_store()) {
1714 VisitForAccumulatorValue(value);
1715 __ Mov(x2, Operand(key->value()));
1717 CallStoreIC(key->LiteralFeedbackId());
1720 VisitForEffect(value);
1724 if (property->emit_store()) {
1728 VisitForStackValue(key);
1729 VisitForStackValue(value);
1732 __ CallRuntime(Runtime::kSetProperty, 4);
1734 VisitForEffect(key);
1735 VisitForEffect(value);
1738 case ObjectLiteral::Property::PROTOTYPE:
1739 if (property->emit_store()) {
1743 VisitForStackValue(value);
1744 __ CallRuntime(Runtime::kSetPrototype, 2);
1746 VisitForEffect(value);
1749 case ObjectLiteral::Property::GETTER:
1750 accessor_table.lookup(key)->second->getter = value;
1752 case ObjectLiteral::Property::SETTER:
1753 accessor_table.lookup(key)->second->setter = value;
1760 for (AccessorTable::Iterator it = accessor_table.begin();
1761 it != accessor_table.end();
1765 VisitForStackValue(it->first);
1766 EmitAccessor(it->second->getter);
1767 EmitAccessor(it->second->setter);
1770 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1773 if (expr->has_function()) {
1777 __ CallRuntime(Runtime::kToFastProperties, 1);
1781 context()->PlugTOS();
1783 context()->Plug(x0);
1788 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1789 Comment cmnt(masm_,
"[ ArrayLiteral");
1791 expr->BuildConstantElements(isolate());
1792 int flags = (expr->depth() == 1) ? ArrayLiteral::kShallowElements
1793 : ArrayLiteral::kNoFlags;
1795 ZoneList<Expression*>* subexprs = expr->values();
1796 int length = subexprs->length();
1797 Handle<FixedArray> constant_elements = expr->constant_elements();
1798 ASSERT_EQ(2, constant_elements->length());
1802 Handle<FixedArrayBase> constant_elements_values(
1806 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1815 __ Mov(x1, Operand(constant_elements));
1816 if (has_fast_elements && constant_elements_values->map() ==
1817 isolate()->heap()->fixed_cow_array_map()) {
1818 FastCloneShallowArrayStub stub(
1820 allocation_site_mode,
1823 __ IncrementCounter(
1824 isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11);
1828 __ Push(x3, x2, x1, x0);
1829 __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1832 FLAG_smi_only_arrays);
1836 if (has_fast_elements) {
1840 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1844 bool result_saved =
false;
1848 for (
int i = 0; i < length; i++) {
1849 Expression* subexpr = subexprs->at(i);
1854 if (!result_saved) {
1857 result_saved =
true;
1859 VisitForAccumulatorValue(subexpr);
1863 __ Peek(x6, kPointerSize);
1867 __ RecordWriteField(x1, offset, result_register(), x10,
1872 StoreArrayLiteralElementStub stub;
1876 PrepareForBailoutForId(expr->GetIdForElement(i),
NO_REGISTERS);
1881 context()->PlugTOS();
1883 context()->Plug(x0);
1888 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1889 ASSERT(expr->target()->IsValidLeftHandSide());
1891 Comment cmnt(masm_,
"[ Assignment");
1895 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1896 LhsKind assign_type = VARIABLE;
1897 Property*
property = expr->target()->AsProperty();
1898 if (property !=
NULL) {
1899 assign_type = (
property->key()->IsPropertyName())
1905 switch (assign_type) {
1909 case NAMED_PROPERTY:
1910 if (expr->is_compound()) {
1912 VisitForAccumulatorValue(property->obj());
1913 __ Push(result_register());
1915 VisitForStackValue(property->obj());
1918 case KEYED_PROPERTY:
1919 if (expr->is_compound()) {
1920 VisitForStackValue(property->obj());
1921 VisitForAccumulatorValue(property->key());
1925 VisitForStackValue(property->obj());
1926 VisitForStackValue(property->key());
1933 if (expr->is_compound()) {
1934 { AccumulatorValueContext context(
this);
1935 switch (assign_type) {
1937 EmitVariableLoad(expr->target()->AsVariableProxy());
1938 PrepareForBailout(expr->target(),
TOS_REG);
1940 case NAMED_PROPERTY:
1941 EmitNamedPropertyLoad(property);
1942 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1944 case KEYED_PROPERTY:
1945 EmitKeyedPropertyLoad(property);
1946 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1953 VisitForAccumulatorValue(expr->value());
1955 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1958 SetSourcePosition(expr->position() + 1);
1959 AccumulatorValueContext context(
this);
1960 if (ShouldInlineSmiCase(op)) {
1961 EmitInlineSmiBinaryOp(expr->binary_operation(),
1967 EmitBinaryOp(expr->binary_operation(), op,
mode);
1971 PrepareForBailout(expr->binary_operation(),
TOS_REG);
1973 VisitForAccumulatorValue(expr->value());
1977 SetSourcePosition(expr->position());
1980 switch (assign_type) {
1982 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1984 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
1985 context()->Plug(x0);
1987 case NAMED_PROPERTY:
1988 EmitNamedPropertyAssignment(expr);
1990 case KEYED_PROPERTY:
1991 EmitKeyedPropertyAssignment(expr);
1997 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1998 SetSourcePosition(prop->position());
1999 Literal* key = prop->key()->AsLiteral();
2000 __ Mov(x2, Operand(key->value()));
2006 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2007 SetSourcePosition(prop->position());
2009 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2010 CallIC(ic, prop->PropertyFeedbackId());
2014 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2017 Expression* left_expr,
2018 Expression* right_expr) {
2019 Label done, both_smis, stub_call;
2023 Register right = x0;
2024 Register result = x0;
2028 __ Orr(x10, left, right);
2029 JumpPatchSite patch_site(masm_);
2030 patch_site.EmitJumpIfSmi(x10, &both_smis);
2032 __ Bind(&stub_call);
2033 BinaryOpICStub stub(op, mode);
2035 Assembler::BlockPoolsScope scope(masm_);
2036 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2037 patch_site.EmitPatchInfo();
2041 __ Bind(&both_smis);
2052 __ Asr(result, left, right);
2057 __ Lsl(result, left, right);
2060 Label right_not_zero;
2061 __ Cbnz(right, &right_not_zero);
2063 __ Bind(&right_not_zero);
2065 __ Lsr(result, left, right);
2070 __ Adds(x10, left, right);
2071 __ B(
vs, &stub_call);
2072 __ Mov(result, x10);
2075 __ Subs(x10, left, right);
2076 __ B(
vs, &stub_call);
2077 __ Mov(result, x10);
2080 Label not_minus_zero, done;
2081 __ Smulh(x10, left, right);
2082 __ Cbnz(x10, ¬_minus_zero);
2083 __ Eor(x11, left, right);
2086 __ Mov(result, x10);
2088 __ Bind(¬_minus_zero);
2091 __ B(
lt, &stub_call);
2092 __ SmiTag(result, x10);
2097 __ Orr(result, left, right);
2099 case Token::BIT_AND:
2100 __ And(result, left, right);
2102 case Token::BIT_XOR:
2103 __ Eor(result, left, right);
2110 context()->Plug(x0);
2114 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2118 BinaryOpICStub stub(op, mode);
2119 JumpPatchSite patch_site(masm_);
2121 Assembler::BlockPoolsScope scope(masm_);
2122 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2123 patch_site.EmitPatchInfo();
2125 context()->Plug(x0);
2129 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2130 ASSERT(expr->IsValidLeftHandSide());
2134 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2135 LhsKind assign_type = VARIABLE;
2136 Property* prop = expr->AsProperty();
2138 assign_type = (prop->key()->IsPropertyName())
2143 switch (assign_type) {
2145 Variable* var = expr->AsVariableProxy()->var();
2146 EffectContext context(
this);
2147 EmitVariableAssignment(var, Token::ASSIGN);
2150 case NAMED_PROPERTY: {
2152 VisitForAccumulatorValue(prop->obj());
2157 __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
2161 case KEYED_PROPERTY: {
2163 VisitForStackValue(prop->obj());
2164 VisitForAccumulatorValue(prop->key());
2167 Handle<Code> ic = strict_mode() ==
SLOPPY
2168 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2169 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2174 context()->Plug(x0);
2178 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2180 __ Str(result_register(), location);
2181 if (var->IsContextSlot()) {
2183 __ Mov(x10, result_register());
2185 __ RecordWriteContextSlot(
2191 void FullCodeGenerator::EmitCallStoreContextSlot(
2193 __ Mov(x11, Operand(name));
2199 __ Push(x0,
cp, x11, x10);
2200 __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2204 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2206 ASM_LOCATION(
"FullCodeGenerator::EmitVariableAssignment");
2207 if (var->IsUnallocated()) {
2209 __ Mov(x2, Operand(var->name()));
2213 }
else if (op == Token::INIT_CONST_LEGACY) {
2215 ASSERT(!var->IsParameter());
2216 if (var->IsLookupSlot()) {
2218 __ Mov(x0, Operand(var->name()));
2220 __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2222 ASSERT(var->IsStackLocal() || var->IsContextSlot());
2225 __ Ldr(x10, location);
2226 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2227 EmitStoreToStackLocalOrContextSlot(var, location);
2231 }
else if (var->mode() ==
LET && op != Token::INIT_LET) {
2233 if (var->IsLookupSlot()) {
2234 EmitCallStoreContextSlot(var->name(), strict_mode());
2236 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2239 __ Ldr(x10, location);
2240 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2241 __ Mov(x10, Operand(var->name()));
2243 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2246 EmitStoreToStackLocalOrContextSlot(var, location);
2249 }
else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2252 if (var->IsLookupSlot()) {
2253 EmitCallStoreContextSlot(var->name(), strict_mode());
2255 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2257 if (FLAG_debug_code && op == Token::INIT_LET) {
2258 __ Ldr(x10, location);
2259 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2260 __ Check(
eq, kLetBindingReInitialization);
2262 EmitStoreToStackLocalOrContextSlot(var, location);
2269 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2270 ASM_LOCATION(
"FullCodeGenerator::EmitNamedPropertyAssignment");
2272 Property* prop = expr->target()->AsProperty();
2277 SetSourcePosition(expr->position());
2278 __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
2281 CallStoreIC(expr->AssignmentFeedbackId());
2283 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2284 context()->Plug(x0);
2288 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2289 ASM_LOCATION(
"FullCodeGenerator::EmitKeyedPropertyAssignment");
2293 SetSourcePosition(expr->position());
2297 Handle<Code> ic = strict_mode() ==
SLOPPY
2298 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2299 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2300 CallIC(ic, expr->AssignmentFeedbackId());
2302 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2303 context()->Plug(x0);
2307 void FullCodeGenerator::VisitProperty(Property* expr) {
2308 Comment cmnt(masm_,
"[ Property");
2309 Expression* key = expr->key();
2311 if (key->IsPropertyName()) {
2312 VisitForAccumulatorValue(expr->obj());
2313 EmitNamedPropertyLoad(expr);
2314 PrepareForBailoutForId(expr->LoadId(),
TOS_REG);
2315 context()->Plug(x0);
2317 VisitForStackValue(expr->obj());
2318 VisitForAccumulatorValue(expr->key());
2320 EmitKeyedPropertyLoad(expr);
2321 context()->Plug(x0);
2326 void FullCodeGenerator::CallIC(Handle<Code>
code,
2327 TypeFeedbackId ast_id) {
2331 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2336 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2339 Expression* callee = expr->expression();
2340 ZoneList<Expression*>* args = expr->arguments();
2341 int arg_count = args->length();
2345 if (callee->IsVariableProxy()) {
2346 { StackValueContext context(
this);
2347 EmitVariableLoad(callee->AsVariableProxy());
2352 __ Push(isolate()->factory()->undefined_value());
2356 ASSERT(callee->IsProperty());
2358 EmitNamedPropertyLoad(callee->AsProperty());
2359 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
TOS_REG);
2367 { PreservePositionScope scope(masm()->positions_recorder());
2368 for (
int i = 0; i < arg_count; i++) {
2369 VisitForStackValue(args->at(i));
2374 SetSourcePosition(expr->position());
2375 CallFunctionStub stub(arg_count, flags);
2376 __ Peek(x1, (arg_count + 1) * kPointerSize);
2379 RecordJSReturnSite(expr);
2384 context()->DropAndPlug(1, x0);
2389 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2392 VisitForAccumulatorValue(key);
2394 Expression* callee = expr->expression();
2395 ZoneList<Expression*>* args = expr->arguments();
2396 int arg_count = args->length();
2399 ASSERT(callee->IsProperty());
2401 EmitKeyedPropertyLoad(callee->AsProperty());
2402 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
TOS_REG);
2408 { PreservePositionScope scope(masm()->positions_recorder());
2409 for (
int i = 0; i < arg_count; i++) {
2410 VisitForStackValue(args->at(i));
2415 SetSourcePosition(expr->position());
2417 __ Peek(x1, (arg_count + 1) * kPointerSize);
2420 RecordJSReturnSite(expr);
2424 context()->DropAndPlug(1, x0);
2428 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2430 ZoneList<Expression*>* args = expr->arguments();
2431 int arg_count = args->length();
2432 { PreservePositionScope scope(masm()->positions_recorder());
2433 for (
int i = 0; i < arg_count; i++) {
2434 VisitForStackValue(args->at(i));
2438 SetSourcePosition(expr->position());
2440 Handle<Object> uninitialized =
2442 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2443 __ LoadObject(x2, FeedbackVector());
2450 RecordJSReturnSite(expr);
2453 context()->DropAndPlug(1, x0);
2457 void FullCodeGenerator::EmitResolvePossiblyDirectEval(
int arg_count) {
2458 ASM_LOCATION(
"FullCodeGenerator::EmitResolvePossiblyDirectEval");
2461 if (arg_count > 0) {
2464 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2483 __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2487 void FullCodeGenerator::VisitCall(Call* expr) {
2491 expr->return_is_recorded_ =
false;
2494 Comment cmnt(masm_,
"[ Call");
2495 Expression* callee = expr->expression();
2496 Call::CallType call_type = expr->GetCallType(isolate());
2498 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2503 ZoneList<Expression*>* args = expr->arguments();
2504 int arg_count = args->length();
2507 PreservePositionScope pos_scope(masm()->positions_recorder());
2508 VisitForStackValue(callee);
2509 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2513 for (
int i = 0; i < arg_count; i++) {
2514 VisitForStackValue(args->at(i));
2519 __ Peek(x10, (arg_count + 1) * kPointerSize);
2521 EmitResolvePossiblyDirectEval(arg_count);
2525 __ PokePair(x1, x0, arg_count * kPointerSize);
2529 SetSourcePosition(expr->position());
2535 RecordJSReturnSite(expr);
2538 context()->DropAndPlug(1, x0);
2540 }
else if (call_type == Call::GLOBAL_CALL) {
2541 EmitCallWithIC(expr);
2543 }
else if (call_type == Call::LOOKUP_SLOT_CALL) {
2545 VariableProxy* proxy = callee->AsVariableProxy();
2548 { PreservePositionScope scope(masm()->positions_recorder());
2557 __ Push(context_register());
2558 __ Mov(x10, Operand(proxy->name()));
2560 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2566 if (done.is_linked()) {
2574 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2581 EmitCallWithStub(expr);
2582 }
else if (call_type == Call::PROPERTY_CALL) {
2583 Property*
property = callee->AsProperty();
2584 { PreservePositionScope scope(masm()->positions_recorder());
2585 VisitForStackValue(property->obj());
2587 if (property->key()->IsPropertyName()) {
2588 EmitCallWithIC(expr);
2590 EmitKeyedCallWithIC(expr, property->key());
2594 ASSERT(call_type == Call::OTHER_CALL);
2596 { PreservePositionScope scope(masm()->positions_recorder());
2597 VisitForStackValue(callee);
2599 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2602 EmitCallWithStub(expr);
2607 ASSERT(expr->return_is_recorded_);
2612 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2613 Comment cmnt(masm_,
"[ CallNew");
2621 VisitForStackValue(expr->expression());
2624 ZoneList<Expression*>* args = expr->arguments();
2625 int arg_count = args->length();
2626 for (
int i = 0; i < arg_count; i++) {
2627 VisitForStackValue(args->at(i));
2632 SetSourcePosition(expr->position());
2635 __ Mov(x0, arg_count);
2639 Handle<Object> uninitialized =
2641 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2642 if (FLAG_pretenuring_call_new) {
2643 StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
2644 isolate()->factory()->NewAllocationSite());
2645 ASSERT(expr->AllocationSiteFeedbackSlot() ==
2646 expr->CallNewFeedbackSlot() + 1);
2649 __ LoadObject(x2, FeedbackVector());
2653 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2654 PrepareForBailoutForId(expr->ReturnId(),
TOS_REG);
2655 context()->Plug(x0);
2659 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2660 ZoneList<Expression*>* args = expr->arguments();
2661 ASSERT(args->length() == 1);
2663 VisitForAccumulatorValue(args->at(0));
2665 Label materialize_true, materialize_false;
2666 Label* if_true =
NULL;
2667 Label* if_false =
NULL;
2668 Label* fall_through =
NULL;
2669 context()->PrepareTest(&materialize_true, &materialize_false,
2670 &if_true, &if_false, &fall_through);
2672 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2673 __ TestAndSplit(x0,
kSmiTagMask, if_true, if_false, fall_through);
2675 context()->Plug(if_true, if_false);
2679 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2680 ZoneList<Expression*>* args = expr->arguments();
2681 ASSERT(args->length() == 1);
2683 VisitForAccumulatorValue(args->at(0));
2685 Label materialize_true, materialize_false;
2686 Label* if_true =
NULL;
2687 Label* if_false =
NULL;
2688 Label* fall_through =
NULL;
2689 context()->PrepareTest(&materialize_true, &materialize_false,
2690 &if_true, &if_false, &fall_through);
2692 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2694 if_false, fall_through);
2696 context()->Plug(if_true, if_false);
2700 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2701 ZoneList<Expression*>* args = expr->arguments();
2702 ASSERT(args->length() == 1);
2704 VisitForAccumulatorValue(args->at(0));
2706 Label materialize_true, materialize_false;
2707 Label* if_true =
NULL;
2708 Label* if_false =
NULL;
2709 Label* fall_through =
NULL;
2710 context()->PrepareTest(&materialize_true, &materialize_false,
2711 &if_true, &if_false, &fall_through);
2713 __ JumpIfSmi(x0, if_false);
2714 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
2723 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2724 Split(
le, if_true, if_false, fall_through);
2726 context()->Plug(if_true, if_false);
2730 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2731 ZoneList<Expression*>* args = expr->arguments();
2732 ASSERT(args->length() == 1);
2734 VisitForAccumulatorValue(args->at(0));
2736 Label materialize_true, materialize_false;
2737 Label* if_true =
NULL;
2738 Label* if_false =
NULL;
2739 Label* fall_through =
NULL;
2740 context()->PrepareTest(&materialize_true, &materialize_false,
2741 &if_true, &if_false, &fall_through);
2743 __ JumpIfSmi(x0, if_false);
2745 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2746 Split(
ge, if_true, if_false, fall_through);
2748 context()->Plug(if_true, if_false);
2752 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2753 ASM_LOCATION(
"FullCodeGenerator::EmitIsUndetectableObject");
2754 ZoneList<Expression*>* args = expr->arguments();
2755 ASSERT(args->length() == 1);
2757 VisitForAccumulatorValue(args->at(0));
2759 Label materialize_true, materialize_false;
2760 Label* if_true =
NULL;
2761 Label* if_false =
NULL;
2762 Label* fall_through =
NULL;
2763 context()->PrepareTest(&materialize_true, &materialize_false,
2764 &if_true, &if_false, &fall_through);
2766 __ JumpIfSmi(x0, if_false);
2770 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2771 Split(
ne, if_true, if_false, fall_through);
2773 context()->Plug(if_true, if_false);
2777 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2778 CallRuntime* expr) {
2779 ZoneList<Expression*>* args = expr->arguments();
2780 ASSERT(args->length() == 1);
2781 VisitForAccumulatorValue(args->at(0));
2783 Label materialize_true, materialize_false, skip_lookup;
2784 Label* if_true =
NULL;
2785 Label* if_false =
NULL;
2786 Label* fall_through =
NULL;
2787 context()->PrepareTest(&materialize_true, &materialize_false,
2788 &if_true, &if_false, &fall_through);
2790 Register
object = x0;
2791 __ AssertNotSmi(
object);
2794 Register bitfield2 = x11;
2800 Register props = x12;
2801 Register props_map = x12;
2802 Register hash_table_map = x13;
2805 __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
2806 __ Cmp(props_map, hash_table_map);
2815 Register descriptors = x12;
2816 Register descriptors_length = x13;
2817 __ NumberOfOwnDescriptors(descriptors_length, map);
2818 __ Cbz(descriptors_length, &done);
2820 __ LoadInstanceDescriptors(map, descriptors);
2823 Register descriptors_end = x14;
2825 __ Mul(descriptors_length, descriptors_length, x15);
2827 __ Add(descriptors, descriptors,
2830 __ Add(descriptors_end, descriptors,
2835 Register valueof_string = x1;
2837 __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
2840 __ Cmp(x15, valueof_string);
2842 __ Cmp(descriptors, descriptors_end);
2852 __ Bind(&skip_lookup);
2856 Register prototype = x1;
2857 Register global_idx = x2;
2858 Register native_context = x2;
2859 Register string_proto = x3;
2860 Register proto_map = x4;
2862 __ JumpIfSmi(prototype, if_false);
2865 __ Ldr(native_context,
2867 __ Ldr(string_proto,
2870 __ Cmp(proto_map, string_proto);
2872 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2873 Split(
eq, if_true, if_false, fall_through);
2875 context()->Plug(if_true, if_false);
2879 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2880 ZoneList<Expression*>* args = expr->arguments();
2881 ASSERT(args->length() == 1);
2883 VisitForAccumulatorValue(args->at(0));
2885 Label materialize_true, materialize_false;
2886 Label* if_true =
NULL;
2887 Label* if_false =
NULL;
2888 Label* fall_through =
NULL;
2889 context()->PrepareTest(&materialize_true, &materialize_false,
2890 &if_true, &if_false, &fall_through);
2892 __ JumpIfSmi(x0, if_false);
2894 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2895 Split(
eq, if_true, if_false, fall_through);
2897 context()->Plug(if_true, if_false);
2901 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
2902 ZoneList<Expression*>* args = expr->arguments();
2903 ASSERT(args->length() == 1);
2905 VisitForAccumulatorValue(args->at(0));
2907 Label materialize_true, materialize_false;
2908 Label* if_true =
NULL;
2909 Label* if_false =
NULL;
2910 Label* fall_through =
NULL;
2911 context()->PrepareTest(&materialize_true, &materialize_false,
2912 &if_true, &if_false, &fall_through);
2915 __ CheckMap(x0, x1, Heap::kHeapNumberMapRootIndex, if_false,
DO_SMI_CHECK);
2921 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2922 Split(
vs, if_true, if_false, fall_through);
2924 context()->Plug(if_true, if_false);
2928 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2929 ZoneList<Expression*>* args = expr->arguments();
2930 ASSERT(args->length() == 1);
2932 VisitForAccumulatorValue(args->at(0));
2934 Label materialize_true, materialize_false;
2935 Label* if_true =
NULL;
2936 Label* if_false =
NULL;
2937 Label* fall_through =
NULL;
2938 context()->PrepareTest(&materialize_true, &materialize_false,
2939 &if_true, &if_false, &fall_through);
2941 __ JumpIfSmi(x0, if_false);
2943 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2944 Split(
eq, if_true, if_false, fall_through);
2946 context()->Plug(if_true, if_false);
2950 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2951 ZoneList<Expression*>* args = expr->arguments();
2952 ASSERT(args->length() == 1);
2954 VisitForAccumulatorValue(args->at(0));
2956 Label materialize_true, materialize_false;
2957 Label* if_true =
NULL;
2958 Label* if_false =
NULL;
2959 Label* fall_through =
NULL;
2960 context()->PrepareTest(&materialize_true, &materialize_false,
2961 &if_true, &if_false, &fall_through);
2963 __ JumpIfSmi(x0, if_false);
2965 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2966 Split(
eq, if_true, if_false, fall_through);
2968 context()->Plug(if_true, if_false);
2973 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2974 ASSERT(expr->arguments()->length() == 0);
2976 Label materialize_true, materialize_false;
2977 Label* if_true =
NULL;
2978 Label* if_false =
NULL;
2979 Label* fall_through =
NULL;
2980 context()->PrepareTest(&materialize_true, &materialize_false,
2981 &if_true, &if_false, &fall_through);
2987 Label check_frame_marker;
2990 __ B(
ne, &check_frame_marker);
2994 __ Bind(&check_frame_marker);
2997 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2998 Split(
eq, if_true, if_false, fall_through);
3000 context()->Plug(if_true, if_false);
3004 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3005 ZoneList<Expression*>* args = expr->arguments();
3006 ASSERT(args->length() == 2);
3009 VisitForStackValue(args->at(0));
3010 VisitForAccumulatorValue(args->at(1));
3012 Label materialize_true, materialize_false;
3013 Label* if_true =
NULL;
3014 Label* if_false =
NULL;
3015 Label* fall_through =
NULL;
3016 context()->PrepareTest(&materialize_true, &materialize_false,
3017 &if_true, &if_false, &fall_through);
3021 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3022 Split(
eq, if_true, if_false, fall_through);
3024 context()->Plug(if_true, if_false);
3028 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3029 ZoneList<Expression*>* args = expr->arguments();
3030 ASSERT(args->length() == 1);
3033 VisitForAccumulatorValue(args->at(0));
3038 context()->Plug(x0);
3042 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3043 ASSERT(expr->arguments()->length() == 0);
3059 context()->Plug(x0);
3063 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3065 ZoneList<Expression*>* args = expr->arguments();
3066 ASSERT(args->length() == 1);
3067 Label done, null,
function, non_function_constructor;
3069 VisitForAccumulatorValue(args->at(0));
3072 __ JumpIfSmi(x0, &null);
3085 __ B(
eq, &
function);
3090 __ B(
eq, &
function);
3097 &non_function_constructor);
3108 __ LoadRoot(x0, Heap::kfunction_class_stringRootIndex);
3112 __ Bind(&non_function_constructor);
3113 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3118 __ LoadRoot(x0, Heap::kNullValueRootIndex);
3123 context()->Plug(x0);
3127 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3135 ZoneList<Expression*>* args = expr->arguments();
3138 VisitForStackValue(args->at(1));
3139 VisitForStackValue(args->at(2));
3140 __ CallRuntime(Runtime::kHiddenLog, 2);
3144 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3145 context()->Plug(x0);
3149 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3152 ZoneList<Expression*>* args = expr->arguments();
3153 ASSERT(args->length() == 3);
3154 VisitForStackValue(args->at(0));
3155 VisitForStackValue(args->at(1));
3156 VisitForStackValue(args->at(2));
3158 context()->Plug(x0);
3162 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3164 RegExpExecStub stub;
3165 ZoneList<Expression*>* args = expr->arguments();
3166 ASSERT(args->length() == 4);
3167 VisitForStackValue(args->at(0));
3168 VisitForStackValue(args->at(1));
3169 VisitForStackValue(args->at(2));
3170 VisitForStackValue(args->at(3));
3172 context()->Plug(x0);
3176 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3178 ZoneList<Expression*>* args = expr->arguments();
3179 ASSERT(args->length() == 1);
3180 VisitForAccumulatorValue(args->at(0));
3184 __ JumpIfSmi(x0, &done);
3190 context()->Plug(x0);
3194 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3195 ZoneList<Expression*>* args = expr->arguments();
3196 ASSERT(args->length() == 2);
3198 Smi* index =
Smi::cast(*(args->at(1)->AsLiteral()->value()));
3200 VisitForAccumulatorValue(args->at(0));
3202 Label runtime, done, not_date_object;
3203 Register
object = x0;
3204 Register result = x0;
3205 Register stamp_addr = x10;
3206 Register stamp_cache = x11;
3208 __ JumpIfSmi(
object, ¬_date_object);
3209 __ JumpIfNotObjectType(
object, x10, x10,
JS_DATE_TYPE, ¬_date_object);
3211 if (index->value() == 0) {
3216 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3220 __ Cmp(stamp_addr, stamp_cache);
3223 kPointerSize * index->value()));
3229 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3233 __ Bind(¬_date_object);
3234 __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3236 context()->Plug(x0);
3240 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3241 ZoneList<Expression*>* args = expr->arguments();
3244 Register
string = x0;
3245 Register index = x1;
3246 Register value = x2;
3247 Register scratch = x10;
3249 VisitForStackValue(args->at(1));
3250 VisitForStackValue(args->at(2));
3251 VisitForAccumulatorValue(args->at(0));
3252 __ Pop(value, index);
3254 if (FLAG_debug_code) {
3255 __ AssertSmi(value, kNonSmiValue);
3256 __ AssertSmi(index, kNonSmiIndex);
3258 __ EmitSeqStringSetCharCheck(
string, index,
kIndexIsSmi, scratch,
3266 context()->Plug(
string);
3270 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3271 ZoneList<Expression*>* args = expr->arguments();
3274 Register
string = x0;
3275 Register index = x1;
3276 Register value = x2;
3277 Register scratch = x10;
3279 VisitForStackValue(args->at(1));
3280 VisitForStackValue(args->at(2));
3281 VisitForAccumulatorValue(args->at(0));
3282 __ Pop(value, index);
3284 if (FLAG_debug_code) {
3285 __ AssertSmi(value, kNonSmiValue);
3286 __ AssertSmi(index, kNonSmiIndex);
3288 __ EmitSeqStringSetCharCheck(
string, index,
kIndexIsSmi, scratch,
3296 context()->Plug(
string);
3300 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3302 ZoneList<Expression*>* args = expr->arguments();
3303 ASSERT(args->length() == 2);
3304 VisitForStackValue(args->at(0));
3305 VisitForStackValue(args->at(1));
3308 context()->Plug(x0);
3312 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3313 ZoneList<Expression*>* args = expr->arguments();
3314 ASSERT(args->length() == 2);
3315 VisitForStackValue(args->at(0));
3316 VisitForAccumulatorValue(args->at(1));
3323 __ JumpIfSmi(x1, &done);
3333 __ RecordWriteField(
3337 context()->Plug(x0);
3341 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3342 ZoneList<Expression*>* args = expr->arguments();
3346 VisitForAccumulatorValue(args->at(0));
3348 NumberToStringStub stub;
3350 context()->Plug(x0);
3354 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3355 ZoneList<Expression*>* args = expr->arguments();
3356 ASSERT(args->length() == 1);
3358 VisitForAccumulatorValue(args->at(0));
3362 Register result = x1;
3364 StringCharFromCodeGenerator generator(code, result);
3365 generator.GenerateFast(masm_);
3368 NopRuntimeCallHelper call_helper;
3369 generator.GenerateSlow(masm_, call_helper);
3372 context()->Plug(result);
3376 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3377 ZoneList<Expression*>* args = expr->arguments();
3378 ASSERT(args->length() == 2);
3380 VisitForStackValue(args->at(0));
3381 VisitForAccumulatorValue(args->at(1));
3383 Register
object = x1;
3384 Register index = x0;
3385 Register result = x3;
3389 Label need_conversion;
3390 Label index_out_of_range;
3392 StringCharCodeAtGenerator generator(
object,
3397 &index_out_of_range,
3399 generator.GenerateFast(masm_);
3402 __ Bind(&index_out_of_range);
3404 __ LoadRoot(result, Heap::kNanValueRootIndex);
3407 __ Bind(&need_conversion);
3410 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3413 NopRuntimeCallHelper call_helper;
3414 generator.GenerateSlow(masm_, call_helper);
3417 context()->Plug(result);
3421 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3422 ZoneList<Expression*>* args = expr->arguments();
3423 ASSERT(args->length() == 2);
3425 VisitForStackValue(args->at(0));
3426 VisitForAccumulatorValue(args->at(1));
3428 Register
object = x1;
3429 Register index = x0;
3430 Register result = x0;
3434 Label need_conversion;
3435 Label index_out_of_range;
3437 StringCharAtGenerator generator(
object,
3443 &index_out_of_range,
3445 generator.GenerateFast(masm_);
3448 __ Bind(&index_out_of_range);
3451 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3454 __ Bind(&need_conversion);
3459 NopRuntimeCallHelper call_helper;
3460 generator.GenerateSlow(masm_, call_helper);
3463 context()->Plug(result);
3467 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3469 ZoneList<Expression*>* args = expr->arguments();
3472 VisitForStackValue(args->at(0));
3473 VisitForAccumulatorValue(args->at(1));
3479 context()->Plug(x0);
3483 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3484 ZoneList<Expression*>* args = expr->arguments();
3486 VisitForStackValue(args->at(0));
3487 VisitForStackValue(args->at(1));
3489 StringCompareStub stub;
3491 context()->Plug(x0);
3495 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3497 ZoneList<Expression*>* args = expr->arguments();
3498 ASSERT(args->length() == 1);
3499 VisitForStackValue(args->at(0));
3500 __ CallRuntime(Runtime::kMath_log, 1);
3501 context()->Plug(x0);
3505 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3507 ZoneList<Expression*>* args = expr->arguments();
3508 ASSERT(args->length() == 1);
3509 VisitForStackValue(args->at(0));
3510 __ CallRuntime(Runtime::kMath_sqrt, 1);
3511 context()->Plug(x0);
3515 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3517 ZoneList<Expression*>* args = expr->arguments();
3518 ASSERT(args->length() >= 2);
3520 int arg_count = args->length() - 2;
3521 for (
int i = 0; i < arg_count + 1; i++) {
3522 VisitForStackValue(args->at(i));
3524 VisitForAccumulatorValue(args->last());
3526 Label runtime, done;
3528 __ JumpIfSmi(x0, &runtime);
3533 ParameterCount count(arg_count);
3540 __ CallRuntime(Runtime::kCall, args->length());
3543 context()->Plug(x0);
3547 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3548 RegExpConstructResultStub stub;
3549 ZoneList<Expression*>* args = expr->arguments();
3550 ASSERT(args->length() == 3);
3551 VisitForStackValue(args->at(0));
3552 VisitForStackValue(args->at(1));
3553 VisitForAccumulatorValue(args->at(2));
3556 context()->Plug(x0);
3560 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3561 ZoneList<Expression*>* args = expr->arguments();
3564 int cache_id =
Smi::cast(*(args->at(0)->AsLiteral()->value()))->
value();
3566 Handle<FixedArray> jsfunction_result_caches(
3567 isolate()->native_context()->jsfunction_result_caches());
3568 if (jsfunction_result_caches->length() <= cache_id) {
3569 __ Abort(kAttemptToUseUndefinedCache);
3570 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3571 context()->Plug(x0);
3575 VisitForAccumulatorValue(args->at(1));
3578 Register cache = x1;
3596 __ CmovX(x0, x3,
eq);
3600 __ Push(cache, key);
3601 __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3604 context()->Plug(x0);
3608 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3609 ZoneList<Expression*>* args = expr->arguments();
3610 VisitForAccumulatorValue(args->at(0));
3612 Label materialize_true, materialize_false;
3613 Label* if_true =
NULL;
3614 Label* if_false =
NULL;
3615 Label* fall_through =
NULL;
3616 context()->PrepareTest(&materialize_true, &materialize_false,
3617 &if_true, &if_false, &fall_through);
3621 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3622 Split(
eq, if_true, if_false, fall_through);
3624 context()->Plug(if_true, if_false);
3628 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3629 ZoneList<Expression*>* args = expr->arguments();
3630 ASSERT(args->length() == 1);
3631 VisitForAccumulatorValue(args->at(0));
3633 __ AssertString(x0);
3636 __ IndexFromHash(x10, x0);
3638 context()->Plug(x0);
3642 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3643 ASM_LOCATION(
"FullCodeGenerator::EmitFastAsciiArrayJoin");
3645 ZoneList<Expression*>* args = expr->arguments();
3646 ASSERT(args->length() == 2);
3647 VisitForStackValue(args->at(1));
3648 VisitForAccumulatorValue(args->at(0));
3650 Register array = x0;
3651 Register result = x0;
3652 Register elements = x1;
3653 Register element = x2;
3654 Register separator = x3;
3655 Register array_length = x4;
3656 Register result_pos = x5;
3658 Register string_length = x10;
3659 Register elements_end = x11;
3660 Register
string = x12;
3661 Register scratch1 = x13;
3662 Register scratch2 = x14;
3663 Register scratch3 = x7;
3664 Register separator_length = x15;
3666 Label bailout, done, one_char_separator, long_separator,
3667 non_trivial_array, not_size_one_array, loop,
3668 empty_separator_loop, one_char_separator_loop,
3669 one_char_separator_loop_entry, long_separator_loop;
3675 __ JumpIfSmi(array, &bailout);
3676 __ JumpIfNotObjectType(array, map, scratch1,
JS_ARRAY_TYPE, &bailout);
3679 __ CheckFastElements(map, scratch1, &bailout);
3685 __ Ldrsw(array_length,
3687 __ Cbnz(array_length, &non_trivial_array);
3688 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3691 __ Bind(&non_trivial_array);
3697 __ Mov(string_length, 0);
3708 if (FLAG_debug_code) {
3709 __ Cmp(array_length, 0);
3710 __ Assert(
gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3714 __ JumpIfSmi(
string, &bailout);
3717 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3720 __ Adds(string_length, string_length, scratch1);
3722 __ Cmp(element, elements_end);
3726 __ Cmp(array_length, 1);
3727 __ B(
ne, ¬_size_one_array);
3731 __ Bind(¬_size_one_array);
3740 __ JumpIfSmi(separator, &bailout);
3743 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3749 __ Ldrsw(separator_length,
3752 __ Sub(string_length, string_length, separator_length);
3753 __ Umaddl(string_length, array_length.W(), separator_length.W(),
3763 __ AllocateAsciiString(result, string_length, scratch1, scratch2, scratch3,
3774 __ Cmp(separator_length, 1);
3775 __ B(
eq, &one_char_separator);
3776 __ B(
gt, &long_separator);
3779 __ Bind(&empty_separator_loop);
3787 __ Ldrsw(string_length,
3790 __ CopyBytes(result_pos,
string, string_length, scratch1);
3791 __ Cmp(element, elements_end);
3792 __ B(
lt, &empty_separator_loop);
3796 __ Bind(&one_char_separator);
3801 __ B(&one_char_separator_loop_entry);
3803 __ Bind(&one_char_separator_loop);
3814 __ Bind(&one_char_separator_loop_entry);
3816 __ Ldrsw(string_length,
3819 __ CopyBytes(result_pos,
string, string_length, scratch1);
3820 __ Cmp(element, elements_end);
3821 __ B(
lt, &one_char_separator_loop);
3826 __ Bind(&long_separator_loop);
3835 __ Ldrsw(string_length,
3838 __ CopyBytes(result_pos,
string, string_length, scratch1);
3840 __ Bind(&long_separator);
3842 __ Ldrsw(string_length,
3845 __ CopyBytes(result_pos,
string, string_length, scratch1);
3846 __ Cmp(element, elements_end);
3847 __ B(
lt, &long_separator_loop);
3852 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3854 context()->Plug(result);
3858 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3859 if (expr->function() !=
NULL &&
3861 Comment cmnt(masm_,
"[ InlineRuntimeCall");
3862 EmitInlineRuntimeCall(expr);
3866 Comment cmnt(masm_,
"[ CallRunTime");
3867 ZoneList<Expression*>* args = expr->arguments();
3868 int arg_count = args->length();
3870 if (expr->is_jsruntime()) {
3877 Handle<String> name = expr->name();
3878 __ Mov(x2, Operand(name));
3885 int arg_count = args->length();
3886 for (
int i = 0; i < arg_count; i++) {
3887 VisitForStackValue(args->at(i));
3891 SetSourcePosition(expr->position());
3893 __ Peek(x1, (arg_count + 1) * kPointerSize);
3899 context()->DropAndPlug(1, x0);
3902 for (
int i = 0; i < arg_count; i++) {
3903 VisitForStackValue(args->at(i));
3907 __ CallRuntime(expr->function(), arg_count);
3908 context()->Plug(x0);
3913 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3914 switch (expr->op()) {
3915 case Token::DELETE: {
3916 Comment cmnt(masm_,
"[ UnaryOperation (DELETE)");
3917 Property*
property = expr->expression()->AsProperty();
3918 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3920 if (property !=
NULL) {
3921 VisitForStackValue(property->obj());
3922 VisitForStackValue(property->key());
3926 context()->Plug(x0);
3927 }
else if (proxy !=
NULL) {
3928 Variable* var = proxy->var();
3932 if (var->IsUnallocated()) {
3934 __ Mov(x11, Operand(var->name()));
3936 __ Push(x12, x11, x10);
3938 context()->Plug(x0);
3939 }
else if (var->IsStackAllocated() || var->IsContextSlot()) {
3942 context()->Plug(var->is_this());
3946 __ Mov(x2, Operand(var->name()));
3947 __ Push(context_register(), x2);
3948 __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
3949 context()->Plug(x0);
3954 VisitForEffect(expr->expression());
3955 context()->Plug(
true);
3961 Comment cmnt(masm_,
"[ UnaryOperation (VOID)");
3962 VisitForEffect(expr->expression());
3963 context()->Plug(Heap::kUndefinedValueRootIndex);
3967 Comment cmnt(masm_,
"[ UnaryOperation (NOT)");
3968 if (context()->IsEffect()) {
3971 VisitForEffect(expr->expression());
3972 }
else if (context()->IsTest()) {
3973 const TestContext* test = TestContext::cast(context());
3975 VisitForControl(expr->expression(),
3976 test->false_label(),
3978 test->fall_through());
3979 context()->Plug(test->true_label(), test->false_label());
3981 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3984 Label materialize_true, materialize_false, done;
3985 VisitForControl(expr->expression(),
3990 __ Bind(&materialize_true);
3991 PrepareForBailoutForId(expr->MaterializeTrueId(),
NO_REGISTERS);
3992 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
3995 __ Bind(&materialize_false);
3996 PrepareForBailoutForId(expr->MaterializeFalseId(),
NO_REGISTERS);
3997 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
4001 if (context()->IsStackValue()) {
4002 __ Push(result_register());
4007 case Token::TYPEOF: {
4008 Comment cmnt(masm_,
"[ UnaryOperation (TYPEOF)");
4010 StackValueContext context(
this);
4011 VisitForTypeofValue(expr->expression());
4013 __ CallRuntime(Runtime::kTypeof, 1);
4014 context()->Plug(x0);
4023 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4024 ASSERT(expr->expression()->IsValidLeftHandSide());
4026 Comment cmnt(masm_,
"[ CountOperation");
4027 SetSourcePosition(expr->position());
4031 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4032 LhsKind assign_type = VARIABLE;
4033 Property* prop = expr->expression()->AsProperty();
4038 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4042 if (assign_type == VARIABLE) {
4043 ASSERT(expr->expression()->AsVariableProxy()->var() !=
NULL);
4044 AccumulatorValueContext context(
this);
4045 EmitVariableLoad(expr->expression()->AsVariableProxy());
4048 if (expr->is_postfix() && !context()->IsEffect()) {
4051 if (assign_type == NAMED_PROPERTY) {
4053 VisitForAccumulatorValue(prop->obj());
4055 EmitNamedPropertyLoad(prop);
4058 VisitForStackValue(prop->obj());
4059 VisitForAccumulatorValue(prop->key());
4062 EmitKeyedPropertyLoad(prop);
4068 if (assign_type == VARIABLE) {
4069 PrepareForBailout(expr->expression(),
TOS_REG);
4071 PrepareForBailoutForId(prop->LoadId(),
TOS_REG);
4075 Label stub_call, done;
4076 JumpPatchSite patch_site(masm_);
4078 int count_value = expr->op() == Token::INC ? 1 : -1;
4079 if (ShouldInlineSmiCase(expr->op())) {
4081 patch_site.EmitJumpIfNotSmi(x0, &slow);
4084 if (expr->is_postfix()) {
4085 if (!context()->IsEffect()) {
4089 switch (assign_type) {
4093 case NAMED_PROPERTY:
4094 __ Poke(x0, kPointerSize);
4096 case KEYED_PROPERTY:
4097 __ Poke(x0, kPointerSize * 2);
4110 ToNumberStub convert_stub;
4111 __ CallStub(&convert_stub);
4114 if (expr->is_postfix()) {
4115 if (!context()->IsEffect()) {
4119 switch (assign_type) {
4123 case NAMED_PROPERTY:
4126 case KEYED_PROPERTY:
4133 __ Bind(&stub_call);
4138 SetSourcePosition(expr->position());
4141 Assembler::BlockPoolsScope scope(masm_);
4143 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4144 patch_site.EmitPatchInfo();
4149 switch (assign_type) {
4151 if (expr->is_postfix()) {
4152 { EffectContext context(
this);
4153 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4155 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4160 if (!context()->IsEffect()) {
4161 context()->PlugTOS();
4164 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4166 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4167 context()->Plug(x0);
4170 case NAMED_PROPERTY: {
4171 __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
4173 CallStoreIC(expr->CountStoreFeedbackId());
4174 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4175 if (expr->is_postfix()) {
4176 if (!context()->IsEffect()) {
4177 context()->PlugTOS();
4180 context()->Plug(x0);
4184 case KEYED_PROPERTY: {
4187 Handle<Code> ic = strict_mode() ==
SLOPPY
4188 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4189 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4190 CallIC(ic, expr->CountStoreFeedbackId());
4191 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4192 if (expr->is_postfix()) {
4193 if (!context()->IsEffect()) {
4194 context()->PlugTOS();
4197 context()->Plug(x0);
4205 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4206 ASSERT(!context()->IsEffect());
4207 ASSERT(!context()->IsTest());
4208 VariableProxy* proxy = expr->AsVariableProxy();
4209 if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
4210 Comment cmnt(masm_,
"Global variable");
4212 __ Mov(x2, Operand(proxy->name()));
4216 PrepareForBailout(expr,
TOS_REG);
4217 context()->Plug(x0);
4218 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
4223 EmitDynamicLookupFastCase(proxy->var(),
INSIDE_TYPEOF, &slow, &done);
4226 __ Mov(x0, Operand(proxy->name()));
4228 __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4229 PrepareForBailout(expr,
TOS_REG);
4232 context()->Plug(x0);
4235 VisitInDuplicateContext(expr);
4240 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4241 Expression* sub_expr,
4242 Handle<String>
check) {
4243 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof");
4244 Comment cmnt(masm_,
"[ EmitLiteralCompareTypeof");
4245 Label materialize_true, materialize_false;
4246 Label* if_true =
NULL;
4247 Label* if_false =
NULL;
4248 Label* fall_through =
NULL;
4249 context()->PrepareTest(&materialize_true, &materialize_false,
4250 &if_true, &if_false, &fall_through);
4252 { AccumulatorValueContext context(
this);
4253 VisitForTypeofValue(sub_expr);
4255 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4257 if (check->Equals(isolate()->heap()->number_string())) {
4258 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4259 __ JumpIfSmi(x0, if_true);
4261 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4262 Split(
eq, if_true, if_false, fall_through);
4263 }
else if (check->Equals(isolate()->heap()->string_string())) {
4264 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4265 __ JumpIfSmi(x0, if_false);
4271 }
else if (check->Equals(isolate()->heap()->symbol_string())) {
4272 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4273 __ JumpIfSmi(x0, if_false);
4275 Split(
eq, if_true, if_false, fall_through);
4276 }
else if (check->Equals(isolate()->heap()->boolean_string())) {
4277 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4278 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4279 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4280 Split(
eq, if_true, if_false, fall_through);
4281 }
else if (FLAG_harmony_typeof &&
4282 check->Equals(isolate()->heap()->null_string())) {
4283 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof null_string");
4284 __ CompareRoot(x0, Heap::kNullValueRootIndex);
4285 Split(
eq, if_true, if_false, fall_through);
4286 }
else if (check->Equals(isolate()->heap()->undefined_string())) {
4288 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4289 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4290 __ JumpIfSmi(x0, if_false);
4296 }
else if (check->Equals(isolate()->heap()->function_string())) {
4297 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4298 __ JumpIfSmi(x0, if_false);
4304 }
else if (check->Equals(isolate()->heap()->object_string())) {
4305 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4306 __ JumpIfSmi(x0, if_false);
4307 if (!FLAG_harmony_typeof) {
4308 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4323 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof other");
4324 if (if_false != fall_through)
__ B(if_false);
4326 context()->Plug(if_true, if_false);
4330 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4331 Comment cmnt(masm_,
"[ CompareOperation");
4332 SetSourcePosition(expr->position());
4337 if (TryLiteralCompare(expr)) {
4342 Label materialize_true;
4343 Label materialize_false;
4344 Label* if_true =
NULL;
4345 Label* if_false =
NULL;
4346 Label* fall_through =
NULL;
4347 context()->PrepareTest(&materialize_true, &materialize_false,
4348 &if_true, &if_false, &fall_through);
4351 VisitForStackValue(expr->left());
4354 VisitForStackValue(expr->right());
4356 PrepareForBailoutBeforeSplit(expr,
false,
NULL,
NULL);
4357 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4358 Split(
eq, if_true, if_false, fall_through);
4361 case Token::INSTANCEOF: {
4362 VisitForStackValue(expr->right());
4365 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4367 __ CompareAndSplit(x0, 0,
eq, if_true, if_false, fall_through);
4372 VisitForAccumulatorValue(expr->right());
4378 JumpPatchSite patch_site(masm_);
4379 if (ShouldInlineSmiCase(op)) {
4381 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4383 Split(cond, if_true, if_false,
NULL);
4384 __ Bind(&slow_case);
4388 SetSourcePosition(expr->position());
4390 CallIC(ic, expr->CompareOperationFeedbackId());
4391 patch_site.EmitPatchInfo();
4392 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4393 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4399 context()->Plug(if_true, if_false);
4403 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4404 Expression* sub_expr,
4406 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareNil");
4407 Label materialize_true, materialize_false;
4408 Label* if_true =
NULL;
4409 Label* if_false =
NULL;
4410 Label* fall_through =
NULL;
4411 context()->PrepareTest(&materialize_true, &materialize_false,
4412 &if_true, &if_false, &fall_through);
4414 VisitForAccumulatorValue(sub_expr);
4415 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4417 if (expr->op() == Token::EQ_STRICT) {
4419 Heap::kNullValueRootIndex :
4420 Heap::kUndefinedValueRootIndex;
4421 __ CompareRoot(x0, nil_value);
4422 Split(
eq, if_true, if_false, fall_through);
4425 CallIC(ic, expr->CompareOperationFeedbackId());
4426 __ CompareAndSplit(x0, 0,
ne, if_true, if_false, fall_through);
4429 context()->Plug(if_true, if_false);
4433 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4435 context()->Plug(x0);
4439 void FullCodeGenerator::VisitYield(Yield* expr) {
4440 Comment cmnt(masm_,
"[ Yield");
4443 VisitForStackValue(expr->expression());
4449 switch (expr->yield_kind()) {
4450 case Yield::SUSPEND:
4452 EmitCreateIteratorResult(
false);
4453 __ Push(result_register());
4455 case Yield::INITIAL: {
4456 Label suspend, continuation, post_runtime, resume;
4463 __ Bind(&continuation);
4467 VisitForAccumulatorValue(expr->generator_object());
4476 __ Cmp(
__ StackPointer(), x1);
4477 __ B(
eq, &post_runtime);
4479 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
4481 __ Bind(&post_runtime);
4482 __ Pop(result_register());
4483 EmitReturnSequence();
4486 context()->Plug(result_register());
4490 case Yield::FINAL: {
4491 VisitForAccumulatorValue(expr->generator_object());
4496 EmitCreateIteratorResult(
true);
4497 EmitUnwindBeforeReturn();
4498 EmitReturnSequence();
4502 case Yield::DELEGATING: {
4503 VisitForStackValue(expr->generator_object());
4509 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
4510 Label l_next, l_call, l_loop;
4512 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4517 handler_table()->set(expr->index(),
Smi::FromInt(l_catch.pos()));
4518 __ LoadRoot(x2, Heap::kthrow_stringRootIndex);
4519 __ Peek(x3, 1 * kPointerSize);
4520 __ Push(x2, x3, x0);
4528 __ PushTryHandler(StackHandler::CATCH, expr->index());
4536 __ Bind(&l_continuation);
4539 __ Bind(&l_suspend);
4540 const int generator_object_depth = kPointerSize + handler_size;
4541 __ Peek(x0, generator_object_depth);
4550 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
4553 EmitReturnSequence();
4559 __ LoadRoot(x2, Heap::knext_stringRootIndex);
4560 __ Peek(x3, 1 * kPointerSize);
4561 __ Push(x2, x3, x0);
4565 __ Peek(x1, 1 * kPointerSize);
4566 __ Peek(x0, 2 * kPointerSize);
4567 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
4570 __ Poke(x1, 2 * kPointerSize);
4580 __ LoadRoot(x2, Heap::kdone_stringRootIndex);
4589 __ LoadRoot(x2, Heap::kvalue_stringRootIndex);
4591 context()->DropAndPlug(2, x0);
4598 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
4601 ASM_LOCATION(
"FullCodeGenerator::EmitGeneratorResume");
4602 Register value_reg = x0;
4603 Register generator_object = x1;
4604 Register the_hole = x2;
4605 Register operand_stack_size = w3;
4606 Register
function = x4;
4612 VisitForStackValue(generator);
4613 VisitForAccumulatorValue(value);
4614 __ Pop(generator_object);
4617 Label wrong_state, closed_state, done;
4645 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
4646 __ PushMultipleTimes(the_hole, w10);
4651 __ Bl(&resume_frame);
4654 __ Bind(&resume_frame);
4659 __ Add(
fp,
__ StackPointer(), kPointerSize * 2);
4664 __ Ldr(operand_stack_size,
4671 __ Cbnz(operand_stack_size, &slow_resume);
4676 __ Add(x10, x10, x11);
4682 __ Bind(&slow_resume);
4687 __ PushMultipleTimes(the_hole, operand_stack_size);
4690 __ Push(generator_object, result_register(), x10);
4691 __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
4696 __ Bind(&closed_state);
4699 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
4702 EmitCreateIteratorResult(
true);
4706 __ CallRuntime(Runtime::kHiddenThrow, 1);
4711 __ Bind(&wrong_state);
4712 __ Push(generator_object);
4713 __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
4716 context()->Plug(result_register());
4720 void FullCodeGenerator::EmitCreateIteratorResult(
bool done) {
4724 Handle<Map>
map(isolate()->native_context()->generator_result_map());
4728 Register result = x0;
4729 __ Allocate(map->instance_size(), result, x10, x11, &gc_required,
TAG_OBJECT);
4732 __ Bind(&gc_required);
4734 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
4735 __ Ldr(context_register(),
4738 __ Bind(&allocated);
4739 Register map_reg = x1;
4740 Register result_value = x2;
4741 Register boolean_done = x3;
4742 Register empty_fixed_array = x4;
4743 __ Mov(map_reg, Operand(map));
4744 __ Pop(result_value);
4745 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
4746 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
4750 __ Str(empty_fixed_array,
4753 __ Str(result_value,
4756 __ Str(boolean_done,
4776 Register FullCodeGenerator::result_register() {
4781 Register FullCodeGenerator::context_register() {
4786 void FullCodeGenerator::StoreToFrameField(
int frame_offset, Register value) {
4792 void FullCodeGenerator::LoadContextField(Register dst,
int context_index) {
4797 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4799 if (declaration_scope->is_global_scope() ||
4800 declaration_scope->is_module_scope()) {
4807 }
else if (declaration_scope->is_eval_scope()) {
4814 ASSERT(declaration_scope->is_function_scope());
4821 void FullCodeGenerator::EnterFinallyBlock() {
4823 ASSERT(!result_register().is(x10));
4826 __ Sub(x10,
lr, Operand(masm_->CodeObject()));
4828 __ Push(result_register(), x10);
4831 ExternalReference pending_message_obj =
4832 ExternalReference::address_of_pending_message_obj(isolate());
4833 __ Mov(x10, pending_message_obj);
4836 ExternalReference has_pending_message =
4837 ExternalReference::address_of_has_pending_message(isolate());
4838 __ Mov(x11, has_pending_message);
4844 ExternalReference pending_message_script =
4845 ExternalReference::address_of_pending_message_script(isolate());
4846 __ Mov(x10, pending_message_script);
4852 void FullCodeGenerator::ExitFinallyBlock() {
4854 ASSERT(!result_register().is(x10));
4857 __ Pop(x10, x11, x12);
4858 ExternalReference pending_message_script =
4859 ExternalReference::address_of_pending_message_script(isolate());
4860 __ Mov(x13, pending_message_script);
4864 ExternalReference has_pending_message =
4865 ExternalReference::address_of_has_pending_message(isolate());
4866 __ Mov(x13, has_pending_message);
4869 ExternalReference pending_message_obj =
4870 ExternalReference::address_of_pending_message_obj(isolate());
4871 __ Mov(x13, pending_message_obj);
4875 __ Pop(x10, result_register());
4879 __ Add(x11, x10, Operand(masm_->CodeObject()));
4889 BackEdgeState target_state,
4890 Code* replacement_code) {
4893 PatchingAssembler patcher(branch_address, 1);
4899 6 * kInstructionSize));
4901 switch (target_state) {
4924 Address interrupt_address_pointer =
4925 reinterpret_cast<Address>(load) + load->ImmPCOffset();
4927 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4929 ->OnStackReplacement()
4932 reinterpret_cast<uint64_t
>(unoptimized_code->GetIsolate()
4937 reinterpret_cast<uint64_t
>(unoptimized_code->GetIsolate()
4939 ->OsrAfterStackCheck()
4942 reinterpret_cast<uint64_t
>(unoptimized_code->GetIsolate()
4944 ->OnStackReplacement()
4947 reinterpret_cast<uint64_t
>(replacement_code->entry());
4949 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4950 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
4956 Code* unoptimized_code,
4966 load->ImmPCOffset());
4967 if (entry == reinterpret_cast<uint64_t>(
4968 isolate->builtins()->OnStackReplacement()->entry())) {
4970 }
else if (entry == reinterpret_cast<uint64_t>(
4971 isolate->builtins()->OsrAfterStackCheck()->entry())) {
4982 #define __ ACCESS_MASM(masm())
4987 int* context_length) {
4995 __ Drop(*stack_depth);
4996 if (*context_length > 0) {
5002 __ Bl(finally_entry_);
5005 *context_length = 0;
5015 #endif // V8_TARGET_ARCH_ARM64
static const int kFunctionOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
static const int kLengthOffset
static const int kHashFieldOffset
static const int kBitFieldOffset
Scope * DeclarationScope()
const intptr_t kSmiTagMask
static const int kForInFastCaseMarker
VariableDeclaration * function() const
static const int kCodeEntryOffset
static const int kValueOffset
static int SlotOffset(int index)
static const int kFingerOffset
static const int kBuiltinsOffset
static Handle< Code > GetUninitialized(Isolate *isolate)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static const int kEnumCacheOffset
static String * cast(Object *obj)
const uint32_t kTwoByteStringTag
static const int kCallSizeWithRelocation
static Smi * FromInt(int value)
static const int kResultValuePropertyOffset
bool IsFastObjectElementsKind(ElementsKind kind)
#define ASM_LOCATION(message)
static TypeFeedbackId None()
static Handle< Code > GetUninitialized(Isolate *isolate, Token::Value op)
static const int kGlobalReceiverOffset
Scope * outer_scope() const
const unsigned kXRegSizeInBits
static const int kGeneratorClosed
static const unsigned int kContainsCachedArrayIndexMask
static const int kForInSlowCaseMarker
static void Emit(MacroAssembler *masm, const Register ®, const Label *smi_check)
static const int kResultDonePropertyOffset
#define ASSERT(condition)
static const int kContextOffset
const int kPointerSizeLog2
static const int kMaxBackEdgeWeight
static const int kInObjectFieldCount
static bool IsCompileTimeValue(Expression *expression)
#define POINTER_SIZE_ALIGN(value)
static V8_INLINE Instruction * Cast(T src)
static const int kReceiverOffset
static const int kCallerFPOffset
static const int kInstanceClassNameOffset
bool IsOptimizable() const
Variable * parameter(int index) const
static Operand UntagSmiAndScale(Register smi, int scale)
V8_INLINE Instruction * preceding(int count=1)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
kInstanceClassNameOffset flag
const uint64_t kSmiShiftMask
MemOperand UntagSmiFieldMemOperand(Register object, int offset)
static const int kLiteralsOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static Condition ComputeCondition(Token::Value op)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
static const int kLengthOffset
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kValueOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Condition InvertCondition(Condition cond)
Variable * arguments() const
static const int kFirstOffset
static bool IsValid(intptr_t value)
static BailoutId Declarations()
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
void check(i::Vector< const uint8_t > string)
static const int kStringWrapperSafeForDefaultValueOf
const unsigned kInstructionSize
static void MaybeCallEntryHook(MacroAssembler *masm)
bool ShouldSelfOptimize()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kCallerSPOffset
static const int kCacheStampOffset
static const int kJSRetSequenceInstructions
static const int kDescriptorSize
static const int kPropertiesOffset
int num_parameters() const
static const int kMarkerOffset
static const int kExpressionsOffset
static const int kHeaderSize
void CheckConstPool(bool force_emit, bool require_jump)
static const int kElementsOffset
static BailoutId FunctionEntry()
bool IsDeclaredVariableMode(VariableMode mode)
friend class NestedStatement
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
static const int kContextOffset
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
static const int kMapOffset
static const int kValueOffset
static const int kEnumCacheBridgeCacheOffset
void VisitIllegalRedeclaration(AstVisitor *visitor)
static const int kLengthOffset
static Handle< Object > UninitializedSentinel(Isolate *isolate)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
MemOperand FieldMemOperand(Register object, int offset)
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
static const int kContextOffset
static const int kFunctionOffset
static const int kFormalParameterCountOffset
void CopyBytes(uint8_t *target, uint8_t *source)
static const int kGeneratorExecuting
static bool ShouldGenerateLog(Isolate *isolate, Expression *type)
#define ASSERT_EQ(v1, v2)
static const int kContinuationOffset
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
static uint64_t & uint64_at(Address addr)
static const int kConstructorOffset
const uint32_t kOneByteStringTag
#define ASSERT_NE(v1, v2)
static const int kIsUndetectable
static const int kMaximumClonedProperties
static const int kPrototypeOffset
void CheckVeneerPool(bool force_emit, bool require_jump, int margin=kVeneerDistanceMargin)
static const int kValueOffset
bool IsImmutableVariableMode(VariableMode mode)
static const int kNativeContextOffset
void AddNoFrameRange(int from, int to)
MemOperand ContextMemOperand(Register context, int index)
static const int kSharedFunctionInfoOffset
static FixedArrayBase * cast(Object *object)
static const int kLocal0Offset
static const int kBitField2Offset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
static const int kMaximumClonedLength
MemOperand GlobalObjectMemOperand()
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
static const int kInstanceTypeOffset
static const int kOperandStackOffset