30 #if V8_TARGET_ARCH_ARM
48 #define __ ACCESS_MASM(masm_)
59 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
61 info_emitted_ =
false;
66 ASSERT(patch_site_.is_bound() == info_emitted_);
71 void EmitJumpIfNotSmi(Register reg, Label* target) {
72 ASSERT(!patch_site_.is_bound() && !info_emitted_);
74 __ bind(&patch_site_);
75 __ cmp(reg, Operand(reg));
81 void EmitJumpIfSmi(Register reg, Label* target) {
82 ASSERT(!patch_site_.is_bound() && !info_emitted_);
84 __ bind(&patch_site_);
85 __ cmp(reg, Operand(reg));
89 void EmitPatchInfo() {
92 if (patch_site_.is_bound()) {
93 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
95 reg.set_code(delta_to_patch_site /
kOff12Mask);
96 __ cmp_raw_immediate(reg, delta_to_patch_site %
kOff12Mask);
106 MacroAssembler* masm_;
114 static void EmitStackCheck(MacroAssembler* masm_,
115 Register stack_limit_scratch,
117 Register scratch =
sp) {
118 Isolate* isolate = masm_->isolate();
120 ASSERT(scratch.is(
sp) == (pointers == 0));
124 __ LoadRoot(stack_limit_scratch, Heap::kStackLimitRootIndex);
125 __ cmp(scratch, Operand(stack_limit_scratch));
128 __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
148 void FullCodeGenerator::Generate() {
149 CompilationInfo*
info = info_;
151 isolate()->factory()->NewFixedArray(
function()->handler_count(),
TENURED);
153 InitializeFeedbackVector();
155 profiling_counter_ = isolate()->factory()->NewCell(
156 Handle<Smi>(
Smi::FromInt(FLAG_interrupt_budget), isolate()));
157 SetFunctionPosition(
function());
158 Comment cmnt(masm_,
"[ function compiled by full code generator");
163 if (strlen(FLAG_stop_at) > 0 &&
164 info->function()->name()->IsUtf8EqualTo(
CStrVector(FLAG_stop_at))) {
172 if (info->strict_mode() ==
SLOPPY && !info->is_native()) {
174 int receiver_offset = info->scope()->num_parameters() *
kPointerSize;
176 __ CompareRoot(
r2, Heap::kUndefinedValueRootIndex);
190 FrameScope frame_scope(masm_, StackFrame::MANUAL);
192 info->set_prologue_offset(masm_->pc_offset());
194 info->AddNoFrameRange(0, masm_->pc_offset());
196 { Comment cmnt(masm_,
"[ Allocate locals");
197 int locals_count = info->scope()->num_stack_slots();
199 ASSERT(!info->function()->is_generator() || locals_count == 0);
200 if (locals_count > 0) {
201 if (locals_count >= 128) {
202 EmitStackCheck(masm_,
r2, locals_count,
r9);
204 __ LoadRoot(
r9, Heap::kUndefinedValueRootIndex);
205 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
206 if (locals_count >= kMaxPushes) {
207 int loop_iterations = locals_count / kMaxPushes;
208 __ mov(
r2, Operand(loop_iterations));
210 __ bind(&loop_header);
212 for (
int i = 0; i < kMaxPushes; i++) {
217 __ b(&loop_header,
ne);
219 int remaining = locals_count % kMaxPushes;
221 for (
int i = 0; i < remaining; i++) {
227 bool function_in_register =
true;
231 if (heap_slots > 0) {
233 Comment cmnt(masm_,
"[ Allocate context");
234 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
236 __ Push(info->scope()->GetScopeInfo());
237 __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
238 }
else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
239 FastNewContextStub stub(heap_slots);
243 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
245 function_in_register =
false;
251 int num_parameters = info->scope()->num_parameters();
252 for (
int i = 0; i < num_parameters; i++) {
254 if (var->IsContextSlot()) {
264 __ RecordWriteContextSlot(
270 Variable* arguments = scope()->
arguments();
271 if (arguments !=
NULL) {
273 Comment cmnt(masm_,
"[ Allocate arguments object");
274 if (!function_in_register) {
281 int num_parameters = info->scope()->num_parameters();
293 if (strict_mode() ==
STRICT) {
295 }
else if (
function()->has_duplicate_parameters()) {
300 ArgumentsAccessStub stub(type);
303 SetVar(arguments,
r0,
r1,
r2);
307 __ CallRuntime(Runtime::kTraceEnter, 0);
312 if (scope()->HasIllegalRedeclaration()) {
313 Comment cmnt(masm_,
"[ Declarations");
318 { Comment cmnt(masm_,
"[ Declarations");
321 if (scope()->is_function_scope() && scope()->
function() !=
NULL) {
322 VariableDeclaration*
function = scope()->
function();
323 ASSERT(function->proxy()->var()->mode() ==
CONST ||
326 VisitVariableDeclaration(
function);
328 VisitDeclarations(scope()->declarations());
331 { Comment cmnt(masm_,
"[ Stack check");
333 EmitStackCheck(masm_,
ip);
336 { Comment cmnt(masm_,
"[ Body");
337 ASSERT(loop_depth() == 0);
338 VisitStatements(
function()->body());
339 ASSERT(loop_depth() == 0);
345 { Comment cmnt(masm_,
"[ return <undefined>;");
346 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
348 EmitReturnSequence();
356 void FullCodeGenerator::ClearAccumulator() {
361 void FullCodeGenerator::EmitProfilingCounterDecrement(
int delta) {
362 __ mov(
r2, Operand(profiling_counter_));
369 void FullCodeGenerator::EmitProfilingCounterReset() {
370 int reset_value = FLAG_interrupt_budget;
371 if (isolate()->IsDebuggerActive()) {
373 reset_value = FLAG_interrupt_budget >> 4;
375 __ mov(
r2, Operand(profiling_counter_));
381 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
382 Label* back_edge_target) {
383 Comment cmnt(masm_,
"[ Back edge bookkeeping");
388 ASSERT(back_edge_target->is_bound());
389 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
391 Max(1, distance / kCodeSizeMultiplier));
392 EmitProfilingCounterDecrement(weight);
394 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
399 RecordBackEdge(stmt->OsrEntryId());
401 EmitProfilingCounterReset();
408 PrepareForBailoutForId(stmt->OsrEntryId(),
NO_REGISTERS);
412 void FullCodeGenerator::EmitReturnSequence() {
413 Comment cmnt(masm_,
"[ Return sequence");
414 if (return_label_.is_bound()) {
415 __ b(&return_label_);
417 __ bind(&return_label_);
422 __ CallRuntime(Runtime::kTraceExit, 1);
427 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
429 int distance = masm_->pc_offset();
431 Max(1, distance / kCodeSizeMultiplier));
433 EmitProfilingCounterDecrement(weight);
437 __ Call(isolate()->builtins()->InterruptCheck(),
438 RelocInfo::CODE_TARGET);
440 EmitProfilingCounterReset();
445 Label check_exit_codesize;
446 __ bind(&check_exit_codesize);
454 PredictableCodeSizeScope predictable(masm_, -1);
456 int no_frame_start =
__ LeaveFrame(StackFrame::JAVA_SCRIPT);
457 __ add(
sp,
sp, Operand(sp_delta));
466 masm_->InstructionsGeneratedSince(&check_exit_codesize));
472 void FullCodeGenerator::EffectContext::Plug(Variable* var)
const {
473 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
477 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var)
const {
478 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
479 codegen()->GetVar(result_register(), var);
483 void FullCodeGenerator::StackValueContext::Plug(Variable* var)
const {
484 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
485 codegen()->GetVar(result_register(), var);
486 __ push(result_register());
490 void FullCodeGenerator::TestContext::Plug(Variable* var)
const {
491 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
493 codegen()->GetVar(result_register(), var);
494 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
495 codegen()->DoTest(
this);
503 void FullCodeGenerator::AccumulatorValueContext::Plug(
505 __ LoadRoot(result_register(), index);
509 void FullCodeGenerator::StackValueContext::Plug(
511 __ LoadRoot(result_register(), index);
512 __ push(result_register());
517 codegen()->PrepareForBailoutBeforeSplit(condition(),
521 if (index == Heap::kUndefinedValueRootIndex ||
522 index == Heap::kNullValueRootIndex ||
523 index == Heap::kFalseValueRootIndex) {
524 if (false_label_ != fall_through_)
__ b(false_label_);
525 }
else if (index == Heap::kTrueValueRootIndex) {
526 if (true_label_ != fall_through_)
__ b(true_label_);
528 __ LoadRoot(result_register(), index);
529 codegen()->DoTest(
this);
534 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit)
const {
538 void FullCodeGenerator::AccumulatorValueContext::Plug(
539 Handle<Object> lit)
const {
540 __ mov(result_register(), Operand(lit));
544 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit)
const {
546 __ mov(result_register(), Operand(lit));
547 __ push(result_register());
551 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit)
const {
552 codegen()->PrepareForBailoutBeforeSplit(condition(),
556 ASSERT(!lit->IsUndetectableObject());
557 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
558 if (false_label_ != fall_through_)
__ b(false_label_);
559 }
else if (lit->IsTrue() || lit->IsJSObject()) {
560 if (true_label_ != fall_through_)
__ b(true_label_);
561 }
else if (lit->IsString()) {
563 if (false_label_ != fall_through_)
__ b(false_label_);
565 if (true_label_ != fall_through_)
__ b(true_label_);
567 }
else if (lit->IsSmi()) {
569 if (false_label_ != fall_through_)
__ b(false_label_);
571 if (true_label_ != fall_through_)
__ b(true_label_);
575 __ mov(result_register(), Operand(lit));
576 codegen()->DoTest(
this);
581 void FullCodeGenerator::EffectContext::DropAndPlug(
int count,
582 Register reg)
const {
588 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
590 Register reg)
const {
593 __ Move(result_register(), reg);
597 void FullCodeGenerator::StackValueContext::DropAndPlug(
int count,
598 Register reg)
const {
600 if (count > 1)
__ Drop(count - 1);
605 void FullCodeGenerator::TestContext::DropAndPlug(
int count,
606 Register reg)
const {
610 __ Move(result_register(), reg);
611 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
612 codegen()->DoTest(
this);
616 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
617 Label* materialize_false)
const {
618 ASSERT(materialize_true == materialize_false);
619 __ bind(materialize_true);
623 void FullCodeGenerator::AccumulatorValueContext::Plug(
624 Label* materialize_true,
625 Label* materialize_false)
const {
627 __ bind(materialize_true);
628 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
630 __ bind(materialize_false);
631 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
636 void FullCodeGenerator::StackValueContext::Plug(
637 Label* materialize_true,
638 Label* materialize_false)
const {
640 __ bind(materialize_true);
641 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
643 __ bind(materialize_false);
644 __ LoadRoot(
ip, Heap::kFalseValueRootIndex);
650 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
651 Label* materialize_false)
const {
652 ASSERT(materialize_true == true_label_);
653 ASSERT(materialize_false == false_label_);
657 void FullCodeGenerator::EffectContext::Plug(
bool flag)
const {
661 void FullCodeGenerator::AccumulatorValueContext::Plug(
bool flag)
const {
663 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
664 __ LoadRoot(result_register(), value_root_index);
668 void FullCodeGenerator::StackValueContext::Plug(
bool flag)
const {
670 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
671 __ LoadRoot(
ip, value_root_index);
676 void FullCodeGenerator::TestContext::Plug(
bool flag)
const {
677 codegen()->PrepareForBailoutBeforeSplit(condition(),
682 if (true_label_ != fall_through_)
__ b(true_label_);
684 if (false_label_ != fall_through_)
__ b(false_label_);
689 void FullCodeGenerator::DoTest(Expression* condition,
692 Label* fall_through) {
694 CallIC(ic, condition->test_id());
695 __ tst(result_register(), result_register());
696 Split(
ne, if_true, if_false, fall_through);
700 void FullCodeGenerator::Split(
Condition cond,
703 Label* fall_through) {
704 if (if_false == fall_through) {
706 }
else if (if_true == fall_through) {
715 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
716 ASSERT(var->IsStackAllocated());
720 if (var->IsParameter()) {
729 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
730 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
731 if (var->IsContextSlot()) {
733 __ LoadContext(scratch, context_chain_length);
736 return StackOperand(var);
741 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
744 __ ldr(dest, location);
748 void FullCodeGenerator::SetVar(Variable* var,
752 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
753 ASSERT(!scratch0.is(src));
754 ASSERT(!scratch0.is(scratch1));
755 ASSERT(!scratch1.is(src));
756 MemOperand location = VarOperand(var, scratch0);
757 __ str(src, location);
760 if (var->IsContextSlot()) {
761 __ RecordWriteContextSlot(scratch0,
771 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
772 bool should_normalize,
781 if (should_normalize)
__ b(&skip);
782 PrepareForBailout(expr,
TOS_REG);
783 if (should_normalize) {
784 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
786 Split(
eq, if_true, if_false,
NULL);
792 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
795 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
796 if (generate_debug_code_) {
799 __ CompareRoot(
r1, Heap::kWithContextMapRootIndex);
800 __ Check(
ne, kDeclarationInWithContext);
801 __ CompareRoot(
r1, Heap::kCatchContextMapRootIndex);
802 __ Check(
ne, kDeclarationInCatchContext);
807 void FullCodeGenerator::VisitVariableDeclaration(
808 VariableDeclaration* declaration) {
812 VariableProxy* proxy = declaration->proxy();
814 Variable* variable = proxy->var();
816 switch (variable->location()) {
818 globals_->Add(variable->name(), zone());
819 globals_->Add(variable->binding_needs_init()
820 ? isolate()->factory()->the_hole_value()
821 : isolate()->factory()->undefined_value(),
828 Comment cmnt(masm_,
"[ VariableDeclaration");
829 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
830 __ str(
ip, StackOperand(variable));
836 Comment cmnt(masm_,
"[ VariableDeclaration");
837 EmitDebugCheckDeclarationContext(variable);
838 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
846 Comment cmnt(masm_,
"[ VariableDeclaration");
847 __ mov(
r2, Operand(variable->name()));
858 __ LoadRoot(
r0, Heap::kTheHoleValueRootIndex);
864 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
871 void FullCodeGenerator::VisitFunctionDeclaration(
872 FunctionDeclaration* declaration) {
873 VariableProxy* proxy = declaration->proxy();
874 Variable* variable = proxy->var();
875 switch (variable->location()) {
877 globals_->Add(variable->name(), zone());
878 Handle<SharedFunctionInfo>
function =
881 if (
function.is_null())
return SetStackOverflow();
882 globals_->Add(
function, zone());
888 Comment cmnt(masm_,
"[ FunctionDeclaration");
889 VisitForAccumulatorValue(declaration->fun());
890 __ str(result_register(), StackOperand(variable));
895 Comment cmnt(masm_,
"[ FunctionDeclaration");
896 EmitDebugCheckDeclarationContext(variable);
897 VisitForAccumulatorValue(declaration->fun());
901 __ RecordWriteContextSlot(
cp,
914 Comment cmnt(masm_,
"[ FunctionDeclaration");
915 __ mov(
r2, Operand(variable->name()));
919 VisitForStackValue(declaration->fun());
920 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
927 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
928 Variable* variable = declaration->proxy()->var();
930 ASSERT(variable->interface()->IsFrozen());
932 Comment cmnt(masm_,
"[ ModuleDeclaration");
933 EmitDebugCheckDeclarationContext(variable);
943 __ RecordWriteContextSlot(
cp,
951 PrepareForBailoutForId(declaration->proxy()->id(),
NO_REGISTERS);
954 Visit(declaration->module());
958 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
959 VariableProxy* proxy = declaration->proxy();
960 Variable* variable = proxy->var();
961 switch (variable->location()) {
967 Comment cmnt(masm_,
"[ ImportDeclaration");
968 EmitDebugCheckDeclarationContext(variable);
981 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
986 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray>
pairs) {
989 __ mov(
r1, Operand(pairs));
992 __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
997 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
999 __ Push(descriptions);
1000 __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
1005 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1006 Comment cmnt(masm_,
"[ SwitchStatement");
1007 Breakable nested_statement(
this, stmt);
1008 SetStatementPosition(stmt);
1011 VisitForStackValue(stmt->tag());
1014 ZoneList<CaseClause*>* clauses = stmt->cases();
1015 CaseClause* default_clause =
NULL;
1019 for (
int i = 0; i < clauses->length(); i++) {
1020 CaseClause* clause = clauses->at(i);
1021 clause->body_target()->Unuse();
1024 if (clause->is_default()) {
1025 default_clause = clause;
1029 Comment cmnt(masm_,
"[ Case comparison");
1030 __ bind(&next_test);
1034 VisitForAccumulatorValue(clause->label());
1038 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1039 JumpPatchSite patch_site(masm_);
1040 if (inline_smi_code) {
1043 patch_site.EmitJumpIfNotSmi(
r2, &slow_case);
1046 __ b(
ne, &next_test);
1048 __ b(clause->body_target());
1049 __ bind(&slow_case);
1053 SetSourcePosition(clause->position());
1055 CallIC(ic, clause->CompareId());
1056 patch_site.EmitPatchInfo();
1060 PrepareForBailout(clause,
TOS_REG);
1061 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
1063 __ b(
ne, &next_test);
1065 __ jmp(clause->body_target());
1068 __ cmp(
r0, Operand::Zero());
1069 __ b(
ne, &next_test);
1071 __ b(clause->body_target());
1076 __ bind(&next_test);
1078 if (default_clause ==
NULL) {
1079 __ b(nested_statement.break_label());
1081 __ b(default_clause->body_target());
1085 for (
int i = 0; i < clauses->length(); i++) {
1086 Comment cmnt(masm_,
"[ Case body");
1087 CaseClause* clause = clauses->at(i);
1088 __ bind(clause->body_target());
1089 PrepareForBailoutForId(clause->EntryId(),
NO_REGISTERS);
1090 VisitStatements(clause->statements());
1093 __ bind(nested_statement.break_label());
1098 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1099 Comment cmnt(masm_,
"[ ForInStatement");
1100 int slot = stmt->ForInFeedbackSlot();
1101 SetStatementPosition(stmt);
1104 ForIn loop_statement(
this, stmt);
1105 increment_loop_depth();
1109 VisitForAccumulatorValue(stmt->enumerable());
1110 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
1113 Register null_value =
r5;
1114 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1115 __ cmp(
r0, null_value);
1118 PrepareForBailoutForId(stmt->PrepareId(),
TOS_REG);
1121 Label convert, done_convert;
1122 __ JumpIfSmi(
r0, &convert);
1124 __ b(
ge, &done_convert);
1128 __ bind(&done_convert);
1135 __ b(
le, &call_runtime);
1141 __ CheckEnumCache(null_value, &call_runtime);
1150 __ bind(&call_runtime);
1152 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1159 __ LoadRoot(
ip, Heap::kMetaMapRootIndex);
1161 __ b(
ne, &fixed_array);
1164 Label no_descriptors;
1165 __ bind(&use_cache);
1169 __ b(
eq, &no_descriptors);
1171 __ LoadInstanceDescriptors(
r0,
r2);
1182 __ bind(&no_descriptors);
1188 __ bind(&fixed_array);
1190 Handle<Object> feedback = Handle<Object>(
1193 StoreFeedbackVectorSlot(slot, feedback);
1194 __ Move(
r1, FeedbackVector());
1202 __ b(
gt, &non_proxy);
1204 __ bind(&non_proxy);
1216 __ b(
hs, loop_statement.break_label());
1221 __ ldr(
r3, MemOperand::PointerAddressFromSmiKey(
r2,
r0));
1233 __ b(
eq, &update_each);
1238 __ b(
eq, &update_each);
1247 __ b(
eq, loop_statement.continue_label());
1251 __ bind(&update_each);
1252 __ mov(result_register(),
r3);
1254 { EffectContext context(
this);
1255 EmitAssignment(stmt->each());
1259 Visit(stmt->body());
1263 __ bind(loop_statement.continue_label());
1268 EmitBackEdgeBookkeeping(stmt, &loop);
1272 __ bind(loop_statement.break_label());
1278 decrement_loop_depth();
1282 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1283 Comment cmnt(masm_,
"[ ForOfStatement");
1284 SetStatementPosition(stmt);
1286 Iteration loop_statement(
this, stmt);
1287 increment_loop_depth();
1290 VisitForAccumulatorValue(stmt->assign_iterator());
1293 __ CompareRoot(
r0, Heap::kUndefinedValueRootIndex);
1294 __ b(
eq, loop_statement.break_label());
1295 __ CompareRoot(
r0, Heap::kNullValueRootIndex);
1296 __ b(
eq, loop_statement.break_label());
1299 Label convert, done_convert;
1300 __ JumpIfSmi(
r0, &convert);
1302 __ b(
ge, &done_convert);
1306 __ bind(&done_convert);
1310 __ bind(loop_statement.continue_label());
1313 VisitForEffect(stmt->next_result());
1316 Label result_not_done;
1317 VisitForControl(stmt->result_done(),
1318 loop_statement.break_label(),
1321 __ bind(&result_not_done);
1324 VisitForEffect(stmt->assign_each());
1327 Visit(stmt->body());
1330 PrepareForBailoutForId(stmt->BackEdgeId(),
NO_REGISTERS);
1331 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1332 __ jmp(loop_statement.continue_label());
1336 __ bind(loop_statement.break_label());
1337 decrement_loop_depth();
1341 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1349 if (!FLAG_always_opt &&
1350 !FLAG_prepare_always_opt &&
1352 scope()->is_function_scope() &&
1353 info->num_literals() == 0) {
1354 FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1355 __ mov(
r2, Operand(info));
1358 __ mov(
r0, Operand(info));
1359 __ LoadRoot(
r1, pretenure ? Heap::kTrueValueRootIndex
1360 : Heap::kFalseValueRootIndex);
1362 __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1364 context()->Plug(
r0);
1368 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1369 Comment cmnt(masm_,
"[ VariableProxy");
1370 EmitVariableLoad(expr);
1374 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1377 Register current =
cp;
1383 if (s->num_heap_slots() > 0) {
1384 if (s->calls_sloppy_eval()) {
1397 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope())
break;
1398 s = s->outer_scope();
1401 if (s->is_eval_scope()) {
1403 if (!current.is(next)) {
1404 __ Move(next, current);
1409 __ LoadRoot(
ip, Heap::kNativeContextMapRootIndex);
1423 __ mov(
r2, Operand(var->name()));
1431 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1433 ASSERT(var->IsContextSlot());
1434 Register context =
cp;
1438 for (Scope* s = scope(); s != var->scope(); s = s->
outer_scope()) {
1439 if (s->num_heap_slots() > 0) {
1440 if (s->calls_sloppy_eval()) {
1463 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1473 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1476 Variable* local = var->local_if_not_shadowed();
1477 __ ldr(
r0, ContextSlotOperandCheckExtensions(local, slow));
1478 if (local->mode() ==
LET || local->mode() ==
CONST ||
1480 __ CompareRoot(
r0, Heap::kTheHoleValueRootIndex);
1482 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex,
eq);
1485 __ mov(
r0, Operand(var->name()));
1487 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1495 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1497 SetSourcePosition(proxy->position());
1498 Variable* var = proxy->var();
1502 switch (var->location()) {
1504 Comment cmnt(masm_,
"[ Global variable");
1508 __ mov(
r2, Operand(var->name()));
1510 context()->Plug(
r0);
1517 Comment cmnt(masm_, var->IsContextSlot() ?
"[ Context variable"
1518 :
"[ Stack variable");
1519 if (var->binding_needs_init()) {
1543 bool skip_init_check;
1545 skip_init_check =
false;
1548 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1549 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1551 var->initializer_position() < proxy->position();
1554 if (!skip_init_check) {
1557 __ CompareRoot(
r0, Heap::kTheHoleValueRootIndex);
1558 if (var->mode() ==
LET || var->mode() ==
CONST) {
1563 __ mov(
r0, Operand(var->name()));
1565 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1570 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex,
eq);
1572 context()->Plug(
r0);
1576 context()->Plug(var);
1581 Comment cmnt(masm_,
"[ Lookup variable");
1587 __ mov(
r1, Operand(var->name()));
1589 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1591 context()->Plug(
r0);
1597 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1598 Comment cmnt(masm_,
"[ RegExpLiteral");
1609 int literal_offset =
1612 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
1614 __ b(
ne, &materialized);
1619 __ mov(
r2, Operand(expr->pattern()));
1620 __ mov(
r1, Operand(expr->flags()));
1622 __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1625 __ bind(&materialized);
1627 Label allocated, runtime_allocate;
1631 __ bind(&runtime_allocate);
1634 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1637 __ bind(&allocated);
1642 __ CopyFields(
r0,
r5,
d0, size / kPointerSize);
1643 context()->Plug(
r0);
1647 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1648 if (expression ==
NULL) {
1649 __ LoadRoot(
r1, Heap::kNullValueRootIndex);
1652 VisitForStackValue(expression);
1657 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1658 Comment cmnt(masm_,
"[ ObjectLiteral");
1660 expr->BuildConstantProperties(isolate());
1661 Handle<FixedArray> constant_properties = expr->constant_properties();
1665 __ mov(
r1, Operand(constant_properties));
1666 int flags = expr->fast_elements()
1667 ? ObjectLiteral::kFastElements
1668 : ObjectLiteral::kNoFlags;
1669 flags |= expr->has_function()
1670 ? ObjectLiteral::kHasFunction
1671 : ObjectLiteral::kNoFlags;
1673 int properties_count = constant_properties->length() / 2;
1675 flags != ObjectLiteral::kFastElements ||
1678 __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1680 FastCloneShallowObjectStub stub(properties_count);
1686 bool result_saved =
false;
1691 expr->CalculateEmitStore(zone());
1693 AccessorTable accessor_table(zone());
1694 for (
int i = 0; i < expr->properties()->length(); i++) {
1695 ObjectLiteral::Property*
property = expr->properties()->at(i);
1696 if (property->IsCompileTimeValue())
continue;
1698 Literal* key =
property->key();
1699 Expression* value =
property->value();
1700 if (!result_saved) {
1702 result_saved =
true;
1704 switch (property->kind()) {
1707 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1710 case ObjectLiteral::Property::COMPUTED:
1711 if (key->value()->IsInternalizedString()) {
1712 if (property->emit_store()) {
1713 VisitForAccumulatorValue(value);
1714 __ mov(
r2, Operand(key->value()));
1716 CallStoreIC(key->LiteralFeedbackId());
1719 VisitForEffect(value);
1726 VisitForStackValue(key);
1727 VisitForStackValue(value);
1728 if (property->emit_store()) {
1731 __ CallRuntime(Runtime::kSetProperty, 4);
1736 case ObjectLiteral::Property::PROTOTYPE:
1740 VisitForStackValue(value);
1741 if (property->emit_store()) {
1742 __ CallRuntime(Runtime::kSetPrototype, 2);
1748 case ObjectLiteral::Property::GETTER:
1749 accessor_table.lookup(key)->second->getter = value;
1751 case ObjectLiteral::Property::SETTER:
1752 accessor_table.lookup(key)->second->setter = value;
1759 for (AccessorTable::Iterator it = accessor_table.begin();
1760 it != accessor_table.end();
1764 VisitForStackValue(it->first);
1765 EmitAccessor(it->second->getter);
1766 EmitAccessor(it->second->setter);
1769 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1772 if (expr->has_function()) {
1776 __ CallRuntime(Runtime::kToFastProperties, 1);
1780 context()->PlugTOS();
1782 context()->Plug(
r0);
1787 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1788 Comment cmnt(masm_,
"[ ArrayLiteral");
1790 expr->BuildConstantElements(isolate());
1791 int flags = expr->depth() == 1
1792 ? ArrayLiteral::kShallowElements
1793 : ArrayLiteral::kNoFlags;
1795 ZoneList<Expression*>* subexprs = expr->values();
1796 int length = subexprs->length();
1797 Handle<FixedArray> constant_elements = expr->constant_elements();
1798 ASSERT_EQ(2, constant_elements->length());
1802 Handle<FixedArrayBase> constant_elements_values(
1806 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1815 __ mov(
r1, Operand(constant_elements));
1816 if (has_fast_elements && constant_elements_values->map() ==
1817 isolate()->heap()->fixed_cow_array_map()) {
1818 FastCloneShallowArrayStub stub(
1820 allocation_site_mode,
1823 __ IncrementCounter(
1824 isolate()->counters()->cow_arrays_created_stub(), 1,
r1,
r2);
1829 __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1832 FLAG_smi_only_arrays);
1836 if (has_fast_elements) {
1840 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1844 bool result_saved =
false;
1848 for (
int i = 0; i < length; i++) {
1849 Expression* subexpr = subexprs->at(i);
1854 if (!result_saved) {
1857 result_saved =
true;
1859 VisitForAccumulatorValue(subexpr);
1867 __ RecordWriteField(
r1, offset, result_register(),
r2,
1872 StoreArrayLiteralElementStub stub;
1876 PrepareForBailoutForId(expr->GetIdForElement(i),
NO_REGISTERS);
1881 context()->PlugTOS();
1883 context()->Plug(
r0);
1888 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1889 ASSERT(expr->target()->IsValidLeftHandSide());
1891 Comment cmnt(masm_,
"[ Assignment");
1895 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1896 LhsKind assign_type = VARIABLE;
1897 Property*
property = expr->target()->AsProperty();
1898 if (property !=
NULL) {
1899 assign_type = (
property->key()->IsPropertyName())
1905 switch (assign_type) {
1909 case NAMED_PROPERTY:
1910 if (expr->is_compound()) {
1912 VisitForAccumulatorValue(property->obj());
1913 __ push(result_register());
1915 VisitForStackValue(property->obj());
1918 case KEYED_PROPERTY:
1919 if (expr->is_compound()) {
1920 VisitForStackValue(property->obj());
1921 VisitForAccumulatorValue(property->key());
1925 VisitForStackValue(property->obj());
1926 VisitForStackValue(property->key());
1933 if (expr->is_compound()) {
1934 { AccumulatorValueContext context(
this);
1935 switch (assign_type) {
1937 EmitVariableLoad(expr->target()->AsVariableProxy());
1938 PrepareForBailout(expr->target(),
TOS_REG);
1940 case NAMED_PROPERTY:
1941 EmitNamedPropertyLoad(property);
1942 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1944 case KEYED_PROPERTY:
1945 EmitKeyedPropertyLoad(property);
1946 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1953 VisitForAccumulatorValue(expr->value());
1955 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1958 SetSourcePosition(expr->position() + 1);
1959 AccumulatorValueContext context(
this);
1960 if (ShouldInlineSmiCase(op)) {
1961 EmitInlineSmiBinaryOp(expr->binary_operation(),
1967 EmitBinaryOp(expr->binary_operation(), op,
mode);
1971 PrepareForBailout(expr->binary_operation(),
TOS_REG);
1973 VisitForAccumulatorValue(expr->value());
1977 SetSourcePosition(expr->position());
1980 switch (assign_type) {
1982 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1984 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
1985 context()->Plug(
r0);
1987 case NAMED_PROPERTY:
1988 EmitNamedPropertyAssignment(expr);
1990 case KEYED_PROPERTY:
1991 EmitKeyedPropertyAssignment(expr);
1997 void FullCodeGenerator::VisitYield(Yield* expr) {
1998 Comment cmnt(masm_,
"[ Yield");
2001 VisitForStackValue(expr->expression());
2003 switch (expr->yield_kind()) {
2004 case Yield::SUSPEND:
2006 EmitCreateIteratorResult(
false);
2007 __ push(result_register());
2009 case Yield::INITIAL: {
2010 Label suspend, continuation, post_runtime, resume;
2014 __ bind(&continuation);
2018 VisitForAccumulatorValue(expr->generator_object());
2028 __ b(
eq, &post_runtime);
2030 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2032 __ bind(&post_runtime);
2033 __ pop(result_register());
2034 EmitReturnSequence();
2037 context()->Plug(result_register());
2041 case Yield::FINAL: {
2042 VisitForAccumulatorValue(expr->generator_object());
2047 EmitCreateIteratorResult(
true);
2048 EmitUnwindBeforeReturn();
2049 EmitReturnSequence();
2053 case Yield::DELEGATING: {
2054 VisitForStackValue(expr->generator_object());
2060 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2061 Label l_next, l_call, l_loop;
2063 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
2068 handler_table()->set(expr->index(),
Smi::FromInt(l_catch.pos()));
2069 __ LoadRoot(
r2, Heap::kthrow_stringRootIndex);
2079 __ PushTryHandler(StackHandler::CATCH, expr->index());
2083 __ bind(&l_continuation);
2085 __ bind(&l_suspend);
2086 const int generator_object_depth = kPointerSize + handler_size;
2096 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2099 EmitReturnSequence();
2105 __ LoadRoot(
r2, Heap::knext_stringRootIndex);
2113 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2126 __ LoadRoot(
r2, Heap::kdone_stringRootIndex);
2130 __ cmp(
r0, Operand(0));
2135 __ LoadRoot(
r2, Heap::kvalue_stringRootIndex);
2137 context()->DropAndPlug(2,
r0);
2144 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2151 VisitForStackValue(generator);
2152 VisitForAccumulatorValue(value);
2156 Label wrong_state, closed_state, done;
2161 __ b(
eq, &closed_state);
2162 __ b(
lt, &wrong_state);
2176 __ LoadRoot(
r2, Heap::kTheHoleValueRootIndex);
2177 Label push_argument_holes, push_frame;
2178 __ bind(&push_argument_holes);
2180 __ b(
mi, &push_frame);
2182 __ jmp(&push_argument_holes);
2187 __ bind(&push_frame);
2188 __ bl(&resume_frame);
2190 __ bind(&resume_frame);
2196 __ PushFixedFrame(
r4);
2209 __ cmp(
r3, Operand(0));
2210 __ b(
ne, &slow_resume);
2213 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2214 if (FLAG_enable_ool_constant_pool) {
2227 __ bind(&slow_resume);
2232 Label push_operand_holes, call_resume;
2233 __ bind(&push_operand_holes);
2235 __ b(
mi, &call_resume);
2237 __ b(&push_operand_holes);
2238 __ bind(&call_resume);
2240 __ Push(
r1, result_register());
2242 __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
2244 __ stop(
"not-reached");
2247 __ bind(&closed_state);
2250 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
2253 EmitCreateIteratorResult(
true);
2257 __ CallRuntime(Runtime::kHiddenThrow, 1);
2262 __ bind(&wrong_state);
2264 __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
2267 context()->Plug(result_register());
2271 void FullCodeGenerator::EmitCreateIteratorResult(
bool done) {
2275 Handle<Map>
map(isolate()->native_context()->generator_result_map());
2280 __ bind(&gc_required);
2282 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
2283 __ ldr(context_register(),
2286 __ bind(&allocated);
2289 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2290 __ mov(
r4, Operand(isolate()->factory()->empty_fixed_array()));
2307 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2308 SetSourcePosition(prop->position());
2309 Literal* key = prop->key()->AsLiteral();
2310 __ mov(r2, Operand(key->value()));
2316 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2317 SetSourcePosition(prop->position());
2319 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2320 CallIC(ic, prop->PropertyFeedbackId());
2324 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2327 Expression* left_expr,
2328 Expression* right_expr) {
2329 Label done, smi_case, stub_call;
2331 Register scratch1 =
r2;
2332 Register scratch2 =
r3;
2336 Register right =
r0;
2340 __ orr(scratch1, left, Operand(right));
2342 JumpPatchSite patch_site(masm_);
2343 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2345 __ bind(&stub_call);
2346 BinaryOpICStub stub(op, mode);
2347 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2348 patch_site.EmitPatchInfo();
2356 __ GetLeastBitsFromSmi(scratch1, right, 5);
2357 __ mov(right, Operand(left,
ASR, scratch1));
2361 __ SmiUntag(scratch1, left);
2362 __ GetLeastBitsFromSmi(scratch2, right, 5);
2363 __ mov(scratch1, Operand(scratch1,
LSL, scratch2));
2364 __ TrySmiTag(right, scratch1, &stub_call);
2368 __ SmiUntag(scratch1, left);
2369 __ GetLeastBitsFromSmi(scratch2, right, 5);
2370 __ mov(scratch1, Operand(scratch1,
LSR, scratch2));
2371 __ tst(scratch1, Operand(0xc0000000));
2372 __ b(
ne, &stub_call);
2373 __ SmiTag(right, scratch1);
2377 __ add(scratch1, left, Operand(right),
SetCC);
2378 __ b(
vs, &stub_call);
2379 __ mov(right, scratch1);
2382 __ sub(scratch1, left, Operand(right),
SetCC);
2383 __ b(
vs, &stub_call);
2384 __ mov(right, scratch1);
2387 __ SmiUntag(
ip, right);
2388 __ smull(scratch1, scratch2, left,
ip);
2389 __ mov(
ip, Operand(scratch1,
ASR, 31));
2390 __ cmp(
ip, Operand(scratch2));
2391 __ b(
ne, &stub_call);
2392 __ cmp(scratch1, Operand::Zero());
2395 __ add(scratch2, right, Operand(left),
SetCC);
2397 __ b(
mi, &stub_call);
2401 __ orr(right, left, Operand(right));
2403 case Token::BIT_AND:
2404 __ and_(right, left, Operand(right));
2406 case Token::BIT_XOR:
2407 __ eor(right, left, Operand(right));
2414 context()->Plug(
r0);
2418 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2422 BinaryOpICStub stub(op, mode);
2423 JumpPatchSite patch_site(masm_);
2424 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2425 patch_site.EmitPatchInfo();
2426 context()->Plug(
r0);
2430 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2431 ASSERT(expr->IsValidLeftHandSide());
2435 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2436 LhsKind assign_type = VARIABLE;
2437 Property* prop = expr->AsProperty();
2439 assign_type = (prop->key()->IsPropertyName())
2444 switch (assign_type) {
2446 Variable* var = expr->AsVariableProxy()->var();
2447 EffectContext context(
this);
2448 EmitVariableAssignment(var, Token::ASSIGN);
2451 case NAMED_PROPERTY: {
2453 VisitForAccumulatorValue(prop->obj());
2456 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
2460 case KEYED_PROPERTY: {
2462 VisitForStackValue(prop->obj());
2463 VisitForAccumulatorValue(prop->key());
2466 Handle<Code> ic = strict_mode() ==
SLOPPY
2467 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2468 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2473 context()->Plug(
r0);
2477 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2479 __ str(result_register(), location);
2480 if (var->IsContextSlot()) {
2482 __ mov(r3, result_register());
2484 __ RecordWriteContextSlot(
2490 void FullCodeGenerator::EmitCallStoreContextSlot(
2493 __ mov(
r1, Operand(name));
2496 __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2500 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Token::Value op) {
2501 if (var->IsUnallocated()) {
2503 __ mov(r2, Operand(var->name()));
2507 }
else if (op == Token::INIT_CONST_LEGACY) {
2509 ASSERT(!var->IsParameter());
2510 if (var->IsLookupSlot()) {
2512 __ mov(
r0, Operand(var->name()));
2514 __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2516 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2519 __ ldr(r2, location);
2520 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2522 EmitStoreToStackLocalOrContextSlot(var, location);
2526 }
else if (var->mode() ==
LET && op != Token::INIT_LET) {
2528 if (var->IsLookupSlot()) {
2529 EmitCallStoreContextSlot(var->name(), strict_mode());
2531 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2534 __ ldr(r3, location);
2535 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2537 __ mov(r3, Operand(var->name()));
2539 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2542 EmitStoreToStackLocalOrContextSlot(var, location);
2545 }
else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2548 if (var->IsLookupSlot()) {
2549 EmitCallStoreContextSlot(var->name(), strict_mode());
2551 ASSERT((var->IsStackAllocated() || var->IsContextSlot()));
2553 if (generate_debug_code_ && op == Token::INIT_LET) {
2555 __ ldr(r2, location);
2556 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2557 __ Check(
eq, kLetBindingReInitialization);
2559 EmitStoreToStackLocalOrContextSlot(var, location);
2566 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2568 Property* prop = expr->target()->AsProperty();
2573 SetSourcePosition(expr->position());
2574 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
2577 CallStoreIC(expr->AssignmentFeedbackId());
2579 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2580 context()->Plug(
r0);
2584 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2588 SetSourcePosition(expr->position());
2591 Handle<Code> ic = strict_mode() ==
SLOPPY
2592 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2593 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2594 CallIC(ic, expr->AssignmentFeedbackId());
2596 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2597 context()->Plug(
r0);
2601 void FullCodeGenerator::VisitProperty(Property* expr) {
2602 Comment cmnt(masm_,
"[ Property");
2603 Expression* key = expr->key();
2605 if (key->IsPropertyName()) {
2606 VisitForAccumulatorValue(expr->obj());
2607 EmitNamedPropertyLoad(expr);
2608 PrepareForBailoutForId(expr->LoadId(),
TOS_REG);
2609 context()->Plug(
r0);
2611 VisitForStackValue(expr->obj());
2612 VisitForAccumulatorValue(expr->key());
2614 EmitKeyedPropertyLoad(expr);
2615 context()->Plug(
r0);
2620 void FullCodeGenerator::CallIC(Handle<Code>
code,
2621 TypeFeedbackId ast_id) {
2625 __ Call(code, RelocInfo::CODE_TARGET, ast_id,
al,
2631 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2632 Expression* callee = expr->expression();
2633 ZoneList<Expression*>* args = expr->arguments();
2634 int arg_count = args->length();
2638 if (callee->IsVariableProxy()) {
2639 { StackValueContext context(
this);
2640 EmitVariableLoad(callee->AsVariableProxy());
2645 __ Push(isolate()->factory()->undefined_value());
2649 ASSERT(callee->IsProperty());
2651 EmitNamedPropertyLoad(callee->AsProperty());
2652 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
TOS_REG);
2661 { PreservePositionScope scope(masm()->positions_recorder());
2662 for (
int i = 0; i < arg_count; i++) {
2663 VisitForStackValue(args->at(i));
2668 SetSourcePosition(expr->position());
2669 CallFunctionStub stub(arg_count, flags);
2673 RecordJSReturnSite(expr);
2678 context()->DropAndPlug(1,
r0);
2683 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2686 VisitForAccumulatorValue(key);
2688 Expression* callee = expr->expression();
2689 ZoneList<Expression*>* args = expr->arguments();
2690 int arg_count = args->length();
2693 ASSERT(callee->IsProperty());
2695 EmitKeyedPropertyLoad(callee->AsProperty());
2696 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
TOS_REG);
2703 { PreservePositionScope scope(masm()->positions_recorder());
2704 for (
int i = 0; i < arg_count; i++) {
2705 VisitForStackValue(args->at(i));
2710 SetSourcePosition(expr->position());
2715 RecordJSReturnSite(expr);
2719 context()->DropAndPlug(1,
r0);
2723 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2725 ZoneList<Expression*>* args = expr->arguments();
2726 int arg_count = args->length();
2727 { PreservePositionScope scope(masm()->positions_recorder());
2728 for (
int i = 0; i < arg_count; i++) {
2729 VisitForStackValue(args->at(i));
2733 SetSourcePosition(expr->position());
2735 Handle<Object> uninitialized =
2737 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2738 __ Move(r2, FeedbackVector());
2745 RecordJSReturnSite(expr);
2748 context()->DropAndPlug(1,
r0);
2752 void FullCodeGenerator::EmitResolvePossiblyDirectEval(
int arg_count) {
2754 if (arg_count > 0) {
2757 __ LoadRoot(
r4, Heap::kUndefinedValueRootIndex);
2772 __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2776 void FullCodeGenerator::VisitCall(Call* expr) {
2780 expr->return_is_recorded_ =
false;
2783 Comment cmnt(masm_,
"[ Call");
2784 Expression* callee = expr->expression();
2785 Call::CallType call_type = expr->GetCallType(isolate());
2787 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2792 ZoneList<Expression*>* args = expr->arguments();
2793 int arg_count = args->length();
2795 { PreservePositionScope pos_scope(masm()->positions_recorder());
2796 VisitForStackValue(callee);
2797 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2801 for (
int i = 0; i < arg_count; i++) {
2802 VisitForStackValue(args->at(i));
2809 EmitResolvePossiblyDirectEval(arg_count);
2818 SetSourcePosition(expr->position());
2822 RecordJSReturnSite(expr);
2825 context()->DropAndPlug(1,
r0);
2826 }
else if (call_type == Call::GLOBAL_CALL) {
2827 EmitCallWithIC(expr);
2829 }
else if (call_type == Call::LOOKUP_SLOT_CALL) {
2831 VariableProxy* proxy = callee->AsVariableProxy();
2834 { PreservePositionScope scope(masm()->positions_recorder());
2843 ASSERT(!context_register().is(r2));
2844 __ mov(r2, Operand(proxy->name()));
2845 __ Push(context_register(), r2);
2846 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2852 if (done.is_linked()) {
2860 __ LoadRoot(
r1, Heap::kUndefinedValueRootIndex);
2867 EmitCallWithStub(expr);
2868 }
else if (call_type == Call::PROPERTY_CALL) {
2869 Property*
property = callee->AsProperty();
2870 { PreservePositionScope scope(masm()->positions_recorder());
2871 VisitForStackValue(property->obj());
2873 if (property->key()->IsPropertyName()) {
2874 EmitCallWithIC(expr);
2876 EmitKeyedCallWithIC(expr, property->key());
2879 ASSERT(call_type == Call::OTHER_CALL);
2881 { PreservePositionScope scope(masm()->positions_recorder());
2882 VisitForStackValue(callee);
2884 __ LoadRoot(
r1, Heap::kUndefinedValueRootIndex);
2887 EmitCallWithStub(expr);
2892 ASSERT(expr->return_is_recorded_);
2897 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2898 Comment cmnt(masm_,
"[ CallNew");
2906 VisitForStackValue(expr->expression());
2909 ZoneList<Expression*>* args = expr->arguments();
2910 int arg_count = args->length();
2911 for (
int i = 0; i < arg_count; i++) {
2912 VisitForStackValue(args->at(i));
2917 SetSourcePosition(expr->position());
2920 __ mov(
r0, Operand(arg_count));
2924 Handle<Object> uninitialized =
2926 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2927 if (FLAG_pretenuring_call_new) {
2928 StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
2929 isolate()->factory()->NewAllocationSite());
2930 ASSERT(expr->AllocationSiteFeedbackSlot() ==
2931 expr->CallNewFeedbackSlot() + 1);
2934 __ Move(r2, FeedbackVector());
2938 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2939 PrepareForBailoutForId(expr->ReturnId(),
TOS_REG);
2940 context()->Plug(
r0);
2944 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2945 ZoneList<Expression*>* args = expr->arguments();
2946 ASSERT(args->length() == 1);
2948 VisitForAccumulatorValue(args->at(0));
2950 Label materialize_true, materialize_false;
2951 Label* if_true =
NULL;
2952 Label* if_false =
NULL;
2953 Label* fall_through =
NULL;
2954 context()->PrepareTest(&materialize_true, &materialize_false,
2955 &if_true, &if_false, &fall_through);
2957 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2959 Split(
eq, if_true, if_false, fall_through);
2961 context()->Plug(if_true, if_false);
2965 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2966 ZoneList<Expression*>* args = expr->arguments();
2967 ASSERT(args->length() == 1);
2969 VisitForAccumulatorValue(args->at(0));
2971 Label materialize_true, materialize_false;
2972 Label* if_true =
NULL;
2973 Label* if_false =
NULL;
2974 Label* fall_through =
NULL;
2975 context()->PrepareTest(&materialize_true, &materialize_false,
2976 &if_true, &if_false, &fall_through);
2978 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2979 __ NonNegativeSmiTst(
r0);
2980 Split(
eq, if_true, if_false, fall_through);
2982 context()->Plug(if_true, if_false);
2986 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2987 ZoneList<Expression*>* args = expr->arguments();
2988 ASSERT(args->length() == 1);
2990 VisitForAccumulatorValue(args->at(0));
2992 Label materialize_true, materialize_false;
2993 Label* if_true =
NULL;
2994 Label* if_false =
NULL;
2995 Label* fall_through =
NULL;
2996 context()->PrepareTest(&materialize_true, &materialize_false,
2997 &if_true, &if_false, &fall_through);
2999 __ JumpIfSmi(
r0, if_false);
3000 __ LoadRoot(
ip, Heap::kNullValueRootIndex);
3012 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3013 Split(
le, if_true, if_false, fall_through);
3015 context()->Plug(if_true, if_false);
3019 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3020 ZoneList<Expression*>* args = expr->arguments();
3021 ASSERT(args->length() == 1);
3023 VisitForAccumulatorValue(args->at(0));
3025 Label materialize_true, materialize_false;
3026 Label* if_true =
NULL;
3027 Label* if_false =
NULL;
3028 Label* fall_through =
NULL;
3029 context()->PrepareTest(&materialize_true, &materialize_false,
3030 &if_true, &if_false, &fall_through);
3032 __ JumpIfSmi(
r0, if_false);
3034 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3035 Split(
ge, if_true, if_false, fall_through);
3037 context()->Plug(if_true, if_false);
3041 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3042 ZoneList<Expression*>* args = expr->arguments();
3043 ASSERT(args->length() == 1);
3045 VisitForAccumulatorValue(args->at(0));
3047 Label materialize_true, materialize_false;
3048 Label* if_true =
NULL;
3049 Label* if_false =
NULL;
3050 Label* fall_through =
NULL;
3051 context()->PrepareTest(&materialize_true, &materialize_false,
3052 &if_true, &if_false, &fall_through);
3054 __ JumpIfSmi(
r0, if_false);
3058 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3059 Split(
ne, if_true, if_false, fall_through);
3061 context()->Plug(if_true, if_false);
3065 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3066 CallRuntime* expr) {
3067 ZoneList<Expression*>* args = expr->arguments();
3068 ASSERT(args->length() == 1);
3070 VisitForAccumulatorValue(args->at(0));
3072 Label materialize_true, materialize_false, skip_lookup;
3073 Label* if_true =
NULL;
3074 Label* if_false =
NULL;
3075 Label* fall_through =
NULL;
3076 context()->PrepareTest(&materialize_true, &materialize_false,
3077 &if_true, &if_false, &fall_through);
3079 __ AssertNotSmi(
r0);
3084 __ b(
ne, &skip_lookup);
3089 __ LoadRoot(
ip, Heap::kHashTableMapRootIndex);
3096 Label entry, loop, done;
3099 __ NumberOfOwnDescriptors(r3,
r1);
3100 __ cmp(r3, Operand::Zero());
3103 __ LoadInstanceDescriptors(
r1,
r4);
3112 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
3118 __ mov(
ip, Operand(isolate()->factory()->value_of_string()));
3126 __ cmp(
r4, Operand(r2));
3136 __ bind(&skip_lookup);
3141 __ JumpIfSmi(r2, if_false);
3147 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3148 Split(
eq, if_true, if_false, fall_through);
3150 context()->Plug(if_true, if_false);
3154 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3155 ZoneList<Expression*>* args = expr->arguments();
3156 ASSERT(args->length() == 1);
3158 VisitForAccumulatorValue(args->at(0));
3160 Label materialize_true, materialize_false;
3161 Label* if_true =
NULL;
3162 Label* if_false =
NULL;
3163 Label* fall_through =
NULL;
3164 context()->PrepareTest(&materialize_true, &materialize_false,
3165 &if_true, &if_false, &fall_through);
3167 __ JumpIfSmi(
r0, if_false);
3169 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3170 Split(
eq, if_true, if_false, fall_through);
3172 context()->Plug(if_true, if_false);
3176 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3177 ZoneList<Expression*>* args = expr->arguments();
3178 ASSERT(args->length() == 1);
3180 VisitForAccumulatorValue(args->at(0));
3182 Label materialize_true, materialize_false;
3183 Label* if_true =
NULL;
3184 Label* if_false =
NULL;
3185 Label* fall_through =
NULL;
3186 context()->PrepareTest(&materialize_true, &materialize_false,
3187 &if_true, &if_false, &fall_through);
3192 __ cmp(r2, Operand(0x80000000));
3193 __ cmp(
r1, Operand(0x00000000),
eq);
3195 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3196 Split(
eq, if_true, if_false, fall_through);
3198 context()->Plug(if_true, if_false);
3202 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3203 ZoneList<Expression*>* args = expr->arguments();
3204 ASSERT(args->length() == 1);
3206 VisitForAccumulatorValue(args->at(0));
3208 Label materialize_true, materialize_false;
3209 Label* if_true =
NULL;
3210 Label* if_false =
NULL;
3211 Label* fall_through =
NULL;
3212 context()->PrepareTest(&materialize_true, &materialize_false,
3213 &if_true, &if_false, &fall_through);
3215 __ JumpIfSmi(
r0, if_false);
3217 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3218 Split(
eq, if_true, if_false, fall_through);
3220 context()->Plug(if_true, if_false);
3224 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3225 ZoneList<Expression*>* args = expr->arguments();
3226 ASSERT(args->length() == 1);
3228 VisitForAccumulatorValue(args->at(0));
3230 Label materialize_true, materialize_false;
3231 Label* if_true =
NULL;
3232 Label* if_false =
NULL;
3233 Label* fall_through =
NULL;
3234 context()->PrepareTest(&materialize_true, &materialize_false,
3235 &if_true, &if_false, &fall_through);
3237 __ JumpIfSmi(
r0, if_false);
3239 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3240 Split(
eq, if_true, if_false, fall_through);
3242 context()->Plug(if_true, if_false);
3247 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3248 ASSERT(expr->arguments()->length() == 0);
3250 Label materialize_true, materialize_false;
3251 Label* if_true =
NULL;
3252 Label* if_false =
NULL;
3253 Label* fall_through =
NULL;
3254 context()->PrepareTest(&materialize_true, &materialize_false,
3255 &if_true, &if_false, &fall_through);
3268 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3269 Split(
eq, if_true, if_false, fall_through);
3271 context()->Plug(if_true, if_false);
3275 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3276 ZoneList<Expression*>* args = expr->arguments();
3277 ASSERT(args->length() == 2);
3280 VisitForStackValue(args->at(0));
3281 VisitForAccumulatorValue(args->at(1));
3283 Label materialize_true, materialize_false;
3284 Label* if_true =
NULL;
3285 Label* if_false =
NULL;
3286 Label* fall_through =
NULL;
3287 context()->PrepareTest(&materialize_true, &materialize_false,
3288 &if_true, &if_false, &fall_through);
3292 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3293 Split(
eq, if_true, if_false, fall_through);
3295 context()->Plug(if_true, if_false);
3299 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3300 ZoneList<Expression*>* args = expr->arguments();
3301 ASSERT(args->length() == 1);
3305 VisitForAccumulatorValue(args->at(0));
3310 context()->Plug(
r0);
3314 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3315 ASSERT(expr->arguments()->length() == 0);
3329 context()->Plug(
r0);
3333 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3334 ZoneList<Expression*>* args = expr->arguments();
3335 ASSERT(args->length() == 1);
3336 Label done, null,
function, non_function_constructor;
3338 VisitForAccumulatorValue(args->at(0));
3341 __ JumpIfSmi(
r0, &null);
3353 __ b(
eq, &
function);
3358 __ b(
eq, &
function);
3365 __ b(
ne, &non_function_constructor);
3375 __ LoadRoot(
r0, Heap::kfunction_class_stringRootIndex);
3379 __ bind(&non_function_constructor);
3380 __ LoadRoot(
r0, Heap::kObject_stringRootIndex);
3385 __ LoadRoot(
r0, Heap::kNullValueRootIndex);
3390 context()->Plug(
r0);
3394 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3402 ZoneList<Expression*>* args = expr->arguments();
3405 VisitForStackValue(args->at(1));
3406 VisitForStackValue(args->at(2));
3407 __ CallRuntime(Runtime::kHiddenLog, 2);
3411 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
3412 context()->Plug(
r0);
3416 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3419 ZoneList<Expression*>* args = expr->arguments();
3420 ASSERT(args->length() == 3);
3421 VisitForStackValue(args->at(0));
3422 VisitForStackValue(args->at(1));
3423 VisitForStackValue(args->at(2));
3425 context()->Plug(
r0);
3429 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3431 RegExpExecStub stub;
3432 ZoneList<Expression*>* args = expr->arguments();
3433 ASSERT(args->length() == 4);
3434 VisitForStackValue(args->at(0));
3435 VisitForStackValue(args->at(1));
3436 VisitForStackValue(args->at(2));
3437 VisitForStackValue(args->at(3));
3439 context()->Plug(
r0);
3443 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3444 ZoneList<Expression*>* args = expr->arguments();
3445 ASSERT(args->length() == 1);
3446 VisitForAccumulatorValue(args->at(0));
3450 __ JumpIfSmi(
r0, &done);
3456 context()->Plug(
r0);
3460 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3461 ZoneList<Expression*>* args = expr->arguments();
3462 ASSERT(args->length() == 2);
3464 Smi* index =
Smi::cast(*(args->at(1)->AsLiteral()->value()));
3466 VisitForAccumulatorValue(args->at(0));
3468 Label runtime, done, not_date_object;
3469 Register
object =
r0;
3470 Register result =
r0;
3471 Register scratch0 =
r9;
3472 Register scratch1 =
r1;
3474 __ JumpIfSmi(
object, ¬_date_object);
3476 __ b(
ne, ¬_date_object);
3478 if (index->value() == 0) {
3483 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3484 __ mov(scratch1, Operand(stamp));
3487 __ cmp(scratch1, scratch0);
3490 kPointerSize * index->value()));
3494 __ PrepareCallCFunction(2, scratch1);
3495 __ mov(
r1, Operand(index));
3496 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3500 __ bind(¬_date_object);
3501 __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3503 context()->Plug(
r0);
3507 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3508 ZoneList<Expression*>* args = expr->arguments();
3511 Register
string =
r0;
3512 Register index =
r1;
3513 Register value =
r2;
3515 VisitForStackValue(args->at(1));
3516 VisitForStackValue(args->at(2));
3517 VisitForAccumulatorValue(args->at(0));
3518 __ Pop(index, value);
3520 if (FLAG_debug_code) {
3522 __ Check(
eq, kNonSmiValue);
3524 __ Check(
eq, kNonSmiIndex);
3525 __ SmiUntag(index, index);
3527 __ EmitSeqStringSetCharCheck(
string, index, value, one_byte_seq_type);
3528 __ SmiTag(index, index);
3531 __ SmiUntag(value, value);
3536 context()->Plug(
string);
3540 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3541 ZoneList<Expression*>* args = expr->arguments();
3544 Register
string =
r0;
3545 Register index =
r1;
3546 Register value =
r2;
3548 VisitForStackValue(args->at(1));
3549 VisitForStackValue(args->at(2));
3550 VisitForAccumulatorValue(args->at(0));
3551 __ Pop(index, value);
3553 if (FLAG_debug_code) {
3555 __ Check(
eq, kNonSmiValue);
3557 __ Check(
eq, kNonSmiIndex);
3558 __ SmiUntag(index, index);
3560 __ EmitSeqStringSetCharCheck(
string, index, value, two_byte_seq_type);
3561 __ SmiTag(index, index);
3564 __ SmiUntag(value, value);
3570 context()->Plug(
string);
3575 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3577 ZoneList<Expression*>* args = expr->arguments();
3578 ASSERT(args->length() == 2);
3579 VisitForStackValue(args->at(0));
3580 VisitForStackValue(args->at(1));
3583 context()->Plug(
r0);
3587 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3588 ZoneList<Expression*>* args = expr->arguments();
3589 ASSERT(args->length() == 2);
3590 VisitForStackValue(args->at(0));
3591 VisitForAccumulatorValue(args->at(1));
3596 __ JumpIfSmi(
r1, &done);
3607 __ RecordWriteField(
3611 context()->Plug(
r0);
3615 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3616 ZoneList<Expression*>* args = expr->arguments();
3619 VisitForAccumulatorValue(args->at(0));
3621 NumberToStringStub stub;
3623 context()->Plug(
r0);
3627 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3628 ZoneList<Expression*>* args = expr->arguments();
3629 ASSERT(args->length() == 1);
3630 VisitForAccumulatorValue(args->at(0));
3633 StringCharFromCodeGenerator generator(
r0,
r1);
3634 generator.GenerateFast(masm_);
3637 NopRuntimeCallHelper call_helper;
3638 generator.GenerateSlow(masm_, call_helper);
3641 context()->Plug(
r1);
3645 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3646 ZoneList<Expression*>* args = expr->arguments();
3647 ASSERT(args->length() == 2);
3648 VisitForStackValue(args->at(0));
3649 VisitForAccumulatorValue(args->at(1));
3651 Register
object =
r1;
3652 Register index =
r0;
3653 Register result =
r3;
3657 Label need_conversion;
3658 Label index_out_of_range;
3660 StringCharCodeAtGenerator generator(
object,
3665 &index_out_of_range,
3667 generator.GenerateFast(masm_);
3670 __ bind(&index_out_of_range);
3673 __ LoadRoot(result, Heap::kNanValueRootIndex);
3676 __ bind(&need_conversion);
3679 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3682 NopRuntimeCallHelper call_helper;
3683 generator.GenerateSlow(masm_, call_helper);
3686 context()->Plug(result);
3690 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3691 ZoneList<Expression*>* args = expr->arguments();
3692 ASSERT(args->length() == 2);
3693 VisitForStackValue(args->at(0));
3694 VisitForAccumulatorValue(args->at(1));
3696 Register
object =
r1;
3697 Register index =
r0;
3698 Register scratch =
r3;
3699 Register result =
r0;
3703 Label need_conversion;
3704 Label index_out_of_range;
3706 StringCharAtGenerator generator(
object,
3712 &index_out_of_range,
3714 generator.GenerateFast(masm_);
3717 __ bind(&index_out_of_range);
3720 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3723 __ bind(&need_conversion);
3729 NopRuntimeCallHelper call_helper;
3730 generator.GenerateSlow(masm_, call_helper);
3733 context()->Plug(result);
3737 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3738 ZoneList<Expression*>* args = expr->arguments();
3740 VisitForStackValue(args->at(0));
3741 VisitForAccumulatorValue(args->at(1));
3746 context()->Plug(
r0);
3750 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3751 ZoneList<Expression*>* args = expr->arguments();
3753 VisitForStackValue(args->at(0));
3754 VisitForStackValue(args->at(1));
3756 StringCompareStub stub;
3758 context()->Plug(
r0);
3762 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3764 ZoneList<Expression*>* args = expr->arguments();
3765 ASSERT(args->length() == 1);
3766 VisitForStackValue(args->at(0));
3767 __ CallRuntime(Runtime::kMath_log, 1);
3768 context()->Plug(
r0);
3772 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3774 ZoneList<Expression*>* args = expr->arguments();
3775 ASSERT(args->length() == 1);
3776 VisitForStackValue(args->at(0));
3777 __ CallRuntime(Runtime::kMath_sqrt, 1);
3778 context()->Plug(
r0);
3782 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3783 ZoneList<Expression*>* args = expr->arguments();
3784 ASSERT(args->length() >= 2);
3786 int arg_count = args->length() - 2;
3787 for (
int i = 0; i < arg_count + 1; i++) {
3788 VisitForStackValue(args->at(i));
3790 VisitForAccumulatorValue(args->last());
3792 Label runtime, done;
3794 __ JumpIfSmi(
r0, &runtime);
3799 __ mov(
r1, result_register());
3800 ParameterCount count(arg_count);
3807 __ CallRuntime(Runtime::kCall, args->length());
3810 context()->Plug(
r0);
3814 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3815 RegExpConstructResultStub stub;
3816 ZoneList<Expression*>* args = expr->arguments();
3817 ASSERT(args->length() == 3);
3818 VisitForStackValue(args->at(0));
3819 VisitForStackValue(args->at(1));
3820 VisitForAccumulatorValue(args->at(2));
3824 context()->Plug(
r0);
3828 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3829 ZoneList<Expression*>* args = expr->arguments();
3832 int cache_id =
Smi::cast(*(args->at(0)->AsLiteral()->value()))->
value();
3834 Handle<FixedArray> jsfunction_result_caches(
3835 isolate()->native_context()->jsfunction_result_caches());
3836 if (jsfunction_result_caches->length() <= cache_id) {
3837 __ Abort(kAttemptToUseUndefinedCache);
3838 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
3839 context()->Plug(
r0);
3843 VisitForAccumulatorValue(args->at(1));
3846 Register cache =
r1;
3854 Label done, not_found;
3859 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2,
PreIndex));
3862 __ b(
ne, ¬_found);
3867 __ bind(¬_found);
3869 __ Push(cache, key);
3870 __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3873 context()->Plug(
r0);
3877 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3878 ZoneList<Expression*>* args = expr->arguments();
3879 VisitForAccumulatorValue(args->at(0));
3881 Label materialize_true, materialize_false;
3882 Label* if_true =
NULL;
3883 Label* if_false =
NULL;
3884 Label* fall_through =
NULL;
3885 context()->PrepareTest(&materialize_true, &materialize_false,
3886 &if_true, &if_false, &fall_through);
3890 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3891 Split(
eq, if_true, if_false, fall_through);
3893 context()->Plug(if_true, if_false);
3897 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3898 ZoneList<Expression*>* args = expr->arguments();
3899 ASSERT(args->length() == 1);
3900 VisitForAccumulatorValue(args->at(0));
3902 __ AssertString(
r0);
3905 __ IndexFromHash(
r0,
r0);
3907 context()->Plug(
r0);
3911 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3912 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
3913 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
3914 one_char_separator_loop_entry, long_separator_loop;
3915 ZoneList<Expression*>* args = expr->arguments();
3916 ASSERT(args->length() == 2);
3917 VisitForStackValue(args->at(1));
3918 VisitForAccumulatorValue(args->at(0));
3921 Register array =
r0;
3922 Register elements =
no_reg;
3923 Register result =
no_reg;
3924 Register separator =
r1;
3925 Register array_length =
r2;
3926 Register result_pos =
no_reg;
3927 Register string_length =
r3;
3928 Register
string =
r4;
3929 Register element =
r5;
3930 Register elements_end =
r6;
3931 Register scratch =
r9;
3937 __ JumpIfSmi(array, &bailout);
3942 __ CheckFastElements(scratch, array_length, &bailout);
3946 __ SmiUntag(array_length,
SetCC);
3947 __ b(
ne, &non_trivial_array);
3948 __ LoadRoot(
r0, Heap::kempty_stringRootIndex);
3951 __ bind(&non_trivial_array);
3960 __ mov(string_length, Operand::Zero());
3972 if (generate_debug_code_) {
3973 __ cmp(array_length, Operand::Zero());
3974 __ Assert(
gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3978 __ JumpIfSmi(
string, &bailout);
3981 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
3983 __ add(string_length, string_length, Operand(scratch),
SetCC);
3985 __ cmp(element, elements_end);
3989 __ cmp(array_length, Operand(1));
3990 __ b(
ne, ¬_size_one_array);
3994 __ bind(¬_size_one_array);
4003 __ JumpIfSmi(separator, &bailout);
4006 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
4012 __ sub(string_length, string_length, Operand(scratch));
4013 __ smull(scratch,
ip, array_length, scratch);
4016 __ cmp(
ip, Operand::Zero());
4018 __ tst(scratch, Operand(0x80000000));
4020 __ add(string_length, string_length, Operand(scratch),
SetCC);
4022 __ SmiUntag(string_length);
4035 __ AllocateAsciiString(result,
4045 result_pos = array_length;
4054 __ b(
eq, &one_char_separator);
4055 __ b(
gt, &long_separator);
4058 __ bind(&empty_separator_loop);
4067 __ SmiUntag(string_length);
4071 __ CopyBytes(
string, result_pos, string_length, scratch);
4072 __ cmp(element, elements_end);
4073 __ b(
lt, &empty_separator_loop);
4078 __ bind(&one_char_separator);
4083 __ jmp(&one_char_separator_loop_entry);
4085 __ bind(&one_char_separator_loop);
4096 __ bind(&one_char_separator_loop_entry);
4099 __ SmiUntag(string_length);
4103 __ CopyBytes(
string, result_pos, string_length, scratch);
4104 __ cmp(element, elements_end);
4105 __ b(
lt, &one_char_separator_loop);
4111 __ bind(&long_separator_loop);
4120 __ SmiUntag(string_length);
4124 __ CopyBytes(
string, result_pos, string_length, scratch);
4126 __ bind(&long_separator);
4129 __ SmiUntag(string_length);
4133 __ CopyBytes(
string, result_pos, string_length, scratch);
4134 __ cmp(element, elements_end);
4135 __ b(
lt, &long_separator_loop);
4140 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
4142 context()->Plug(
r0);
4146 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4147 if (expr->function() !=
NULL &&
4149 Comment cmnt(masm_,
"[ InlineRuntimeCall");
4150 EmitInlineRuntimeCall(expr);
4154 Comment cmnt(masm_,
"[ CallRuntime");
4155 ZoneList<Expression*>* args = expr->arguments();
4156 int arg_count = args->length();
4158 if (expr->is_jsruntime()) {
4165 __ mov(r2, Operand(expr->name()));
4174 int arg_count = args->length();
4175 for (
int i = 0; i < arg_count; i++) {
4176 VisitForStackValue(args->at(i));
4180 SetSourcePosition(expr->position());
4188 context()->DropAndPlug(1,
r0);
4191 for (
int i = 0; i < arg_count; i++) {
4192 VisitForStackValue(args->at(i));
4196 __ CallRuntime(expr->function(), arg_count);
4197 context()->Plug(
r0);
4202 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4203 switch (expr->op()) {
4204 case Token::DELETE: {
4205 Comment cmnt(masm_,
"[ UnaryOperation (DELETE)");
4206 Property*
property = expr->expression()->AsProperty();
4207 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4209 if (property !=
NULL) {
4210 VisitForStackValue(property->obj());
4211 VisitForStackValue(property->key());
4215 context()->Plug(
r0);
4216 }
else if (proxy !=
NULL) {
4217 Variable* var = proxy->var();
4221 if (var->IsUnallocated()) {
4223 __ mov(
r1, Operand(var->name()));
4227 context()->Plug(
r0);
4228 }
else if (var->IsStackAllocated() || var->IsContextSlot()) {
4231 context()->Plug(var->is_this());
4235 ASSERT(!context_register().is(r2));
4236 __ mov(r2, Operand(var->name()));
4237 __ Push(context_register(), r2);
4238 __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
4239 context()->Plug(
r0);
4244 VisitForEffect(expr->expression());
4245 context()->Plug(
true);
4251 Comment cmnt(masm_,
"[ UnaryOperation (VOID)");
4252 VisitForEffect(expr->expression());
4253 context()->Plug(Heap::kUndefinedValueRootIndex);
4258 Comment cmnt(masm_,
"[ UnaryOperation (NOT)");
4259 if (context()->IsEffect()) {
4262 VisitForEffect(expr->expression());
4263 }
else if (context()->IsTest()) {
4264 const TestContext* test = TestContext::cast(context());
4266 VisitForControl(expr->expression(),
4267 test->false_label(),
4269 test->fall_through());
4270 context()->Plug(test->true_label(), test->false_label());
4276 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4277 Label materialize_true, materialize_false, done;
4278 VisitForControl(expr->expression(),
4282 __ bind(&materialize_true);
4283 PrepareForBailoutForId(expr->MaterializeTrueId(),
NO_REGISTERS);
4284 __ LoadRoot(
r0, Heap::kTrueValueRootIndex);
4285 if (context()->IsStackValue())
__ push(
r0);
4287 __ bind(&materialize_false);
4288 PrepareForBailoutForId(expr->MaterializeFalseId(),
NO_REGISTERS);
4289 __ LoadRoot(
r0, Heap::kFalseValueRootIndex);
4290 if (context()->IsStackValue())
__ push(
r0);
4296 case Token::TYPEOF: {
4297 Comment cmnt(masm_,
"[ UnaryOperation (TYPEOF)");
4298 { StackValueContext context(
this);
4299 VisitForTypeofValue(expr->expression());
4301 __ CallRuntime(Runtime::kTypeof, 1);
4302 context()->Plug(
r0);
4312 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4313 ASSERT(expr->expression()->IsValidLeftHandSide());
4315 Comment cmnt(masm_,
"[ CountOperation");
4316 SetSourcePosition(expr->position());
4320 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4321 LhsKind assign_type = VARIABLE;
4322 Property* prop = expr->expression()->AsProperty();
4327 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4331 if (assign_type == VARIABLE) {
4332 ASSERT(expr->expression()->AsVariableProxy()->var() !=
NULL);
4333 AccumulatorValueContext context(
this);
4334 EmitVariableLoad(expr->expression()->AsVariableProxy());
4337 if (expr->is_postfix() && !context()->IsEffect()) {
4341 if (assign_type == NAMED_PROPERTY) {
4343 VisitForAccumulatorValue(prop->obj());
4345 EmitNamedPropertyLoad(prop);
4347 VisitForStackValue(prop->obj());
4348 VisitForAccumulatorValue(prop->key());
4351 EmitKeyedPropertyLoad(prop);
4357 if (assign_type == VARIABLE) {
4358 PrepareForBailout(expr->expression(),
TOS_REG);
4360 PrepareForBailoutForId(prop->LoadId(),
TOS_REG);
4364 Label stub_call, done;
4365 JumpPatchSite patch_site(masm_);
4367 int count_value = expr->op() == Token::INC ? 1 : -1;
4368 if (ShouldInlineSmiCase(expr->op())) {
4370 patch_site.EmitJumpIfNotSmi(
r0, &slow);
4373 if (expr->is_postfix()) {
4374 if (!context()->IsEffect()) {
4378 switch (assign_type) {
4382 case NAMED_PROPERTY:
4385 case KEYED_PROPERTY:
4399 ToNumberStub convert_stub;
4400 __ CallStub(&convert_stub);
4403 if (expr->is_postfix()) {
4404 if (!context()->IsEffect()) {
4408 switch (assign_type) {
4412 case NAMED_PROPERTY:
4415 case KEYED_PROPERTY:
4423 __ bind(&stub_call);
4428 SetSourcePosition(expr->position());
4431 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4432 patch_site.EmitPatchInfo();
4436 switch (assign_type) {
4438 if (expr->is_postfix()) {
4439 { EffectContext context(
this);
4440 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4442 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4447 if (!context()->IsEffect()) {
4448 context()->PlugTOS();
4451 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4453 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4454 context()->Plug(
r0);
4457 case NAMED_PROPERTY: {
4458 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
4460 CallStoreIC(expr->CountStoreFeedbackId());
4461 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4462 if (expr->is_postfix()) {
4463 if (!context()->IsEffect()) {
4464 context()->PlugTOS();
4467 context()->Plug(
r0);
4471 case KEYED_PROPERTY: {
4473 Handle<Code> ic = strict_mode() ==
SLOPPY
4474 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4475 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4476 CallIC(ic, expr->CountStoreFeedbackId());
4477 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4478 if (expr->is_postfix()) {
4479 if (!context()->IsEffect()) {
4480 context()->PlugTOS();
4483 context()->Plug(
r0);
4491 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4492 ASSERT(!context()->IsEffect());
4493 ASSERT(!context()->IsTest());
4494 VariableProxy* proxy = expr->AsVariableProxy();
4495 if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
4496 Comment cmnt(masm_,
"[ Global variable");
4498 __ mov(r2, Operand(proxy->name()));
4502 PrepareForBailout(expr,
TOS_REG);
4503 context()->Plug(
r0);
4504 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
4505 Comment cmnt(masm_,
"[ Lookup slot");
4510 EmitDynamicLookupFastCase(proxy->var(),
INSIDE_TYPEOF, &slow, &done);
4513 __ mov(
r0, Operand(proxy->name()));
4515 __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4516 PrepareForBailout(expr,
TOS_REG);
4519 context()->Plug(
r0);
4522 VisitInDuplicateContext(expr);
4527 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4528 Expression* sub_expr,
4529 Handle<String>
check) {
4530 Label materialize_true, materialize_false;
4531 Label* if_true =
NULL;
4532 Label* if_false =
NULL;
4533 Label* fall_through =
NULL;
4534 context()->PrepareTest(&materialize_true, &materialize_false,
4535 &if_true, &if_false, &fall_through);
4537 { AccumulatorValueContext context(
this);
4538 VisitForTypeofValue(sub_expr);
4540 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4542 if (check->Equals(isolate()->heap()->number_string())) {
4543 __ JumpIfSmi(
r0, if_true);
4545 __ LoadRoot(
ip, Heap::kHeapNumberMapRootIndex);
4547 Split(
eq, if_true, if_false, fall_through);
4548 }
else if (check->Equals(isolate()->heap()->string_string())) {
4549 __ JumpIfSmi(
r0, if_false);
4555 Split(
eq, if_true, if_false, fall_through);
4556 }
else if (check->Equals(isolate()->heap()->symbol_string())) {
4557 __ JumpIfSmi(
r0, if_false);
4559 Split(
eq, if_true, if_false, fall_through);
4560 }
else if (check->Equals(isolate()->heap()->boolean_string())) {
4561 __ CompareRoot(
r0, Heap::kTrueValueRootIndex);
4563 __ CompareRoot(
r0, Heap::kFalseValueRootIndex);
4564 Split(
eq, if_true, if_false, fall_through);
4565 }
else if (FLAG_harmony_typeof &&
4566 check->Equals(isolate()->heap()->null_string())) {
4567 __ CompareRoot(
r0, Heap::kNullValueRootIndex);
4568 Split(
eq, if_true, if_false, fall_through);
4569 }
else if (check->Equals(isolate()->heap()->undefined_string())) {
4570 __ CompareRoot(
r0, Heap::kUndefinedValueRootIndex);
4572 __ JumpIfSmi(
r0, if_false);
4577 Split(
ne, if_true, if_false, fall_through);
4579 }
else if (check->Equals(isolate()->heap()->function_string())) {
4580 __ JumpIfSmi(
r0, if_false);
4585 Split(
eq, if_true, if_false, fall_through);
4586 }
else if (check->Equals(isolate()->heap()->object_string())) {
4587 __ JumpIfSmi(
r0, if_false);
4588 if (!FLAG_harmony_typeof) {
4589 __ CompareRoot(
r0, Heap::kNullValueRootIndex);
4600 Split(
eq, if_true, if_false, fall_through);
4602 if (if_false != fall_through)
__ jmp(if_false);
4604 context()->Plug(if_true, if_false);
4608 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4609 Comment cmnt(masm_,
"[ CompareOperation");
4610 SetSourcePosition(expr->position());
4614 if (TryLiteralCompare(expr))
return;
4618 Label materialize_true, materialize_false;
4619 Label* if_true =
NULL;
4620 Label* if_false =
NULL;
4621 Label* fall_through =
NULL;
4622 context()->PrepareTest(&materialize_true, &materialize_false,
4623 &if_true, &if_false, &fall_through);
4626 VisitForStackValue(expr->left());
4629 VisitForStackValue(expr->right());
4631 PrepareForBailoutBeforeSplit(expr,
false,
NULL,
NULL);
4632 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
4634 Split(
eq, if_true, if_false, fall_through);
4637 case Token::INSTANCEOF: {
4638 VisitForStackValue(expr->right());
4641 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4644 Split(
eq, if_true, if_false, fall_through);
4649 VisitForAccumulatorValue(expr->right());
4653 bool inline_smi_code = ShouldInlineSmiCase(op);
4654 JumpPatchSite patch_site(masm_);
4655 if (inline_smi_code) {
4657 __ orr(r2,
r0, Operand(
r1));
4658 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4660 Split(cond, if_true, if_false,
NULL);
4661 __ bind(&slow_case);
4665 SetSourcePosition(expr->position());
4667 CallIC(ic, expr->CompareOperationFeedbackId());
4668 patch_site.EmitPatchInfo();
4669 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4670 __ cmp(
r0, Operand::Zero());
4671 Split(cond, if_true, if_false, fall_through);
4677 context()->Plug(if_true, if_false);
4681 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4682 Expression* sub_expr,
4684 Label materialize_true, materialize_false;
4685 Label* if_true =
NULL;
4686 Label* if_false =
NULL;
4687 Label* fall_through =
NULL;
4688 context()->PrepareTest(&materialize_true, &materialize_false,
4689 &if_true, &if_false, &fall_through);
4691 VisitForAccumulatorValue(sub_expr);
4692 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4693 if (expr->op() == Token::EQ_STRICT) {
4695 Heap::kNullValueRootIndex :
4696 Heap::kUndefinedValueRootIndex;
4697 __ LoadRoot(
r1, nil_value);
4699 Split(
eq, if_true, if_false, fall_through);
4702 CallIC(ic, expr->CompareOperationFeedbackId());
4703 __ cmp(
r0, Operand(0));
4704 Split(
ne, if_true, if_false, fall_through);
4706 context()->Plug(if_true, if_false);
4710 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4712 context()->Plug(
r0);
4716 Register FullCodeGenerator::result_register() {
4721 Register FullCodeGenerator::context_register() {
4726 void FullCodeGenerator::StoreToFrameField(
int frame_offset, Register value) {
4732 void FullCodeGenerator::LoadContextField(Register dst,
int context_index) {
4737 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4739 if (declaration_scope->is_global_scope() ||
4740 declaration_scope->is_module_scope()) {
4746 }
else if (declaration_scope->is_eval_scope()) {
4752 ASSERT(declaration_scope->is_function_scope());
4762 void FullCodeGenerator::EnterFinallyBlock() {
4765 __ push(result_register());
4767 __ sub(
r1,
lr, Operand(masm_->CodeObject()));
4774 ExternalReference pending_message_obj =
4775 ExternalReference::address_of_pending_message_obj(isolate());
4776 __ mov(
ip, Operand(pending_message_obj));
4780 ExternalReference has_pending_message =
4781 ExternalReference::address_of_has_pending_message(isolate());
4782 __ mov(
ip, Operand(has_pending_message));
4787 ExternalReference pending_message_script =
4788 ExternalReference::address_of_pending_message_script(isolate());
4789 __ mov(
ip, Operand(pending_message_script));
4795 void FullCodeGenerator::ExitFinallyBlock() {
4799 ExternalReference pending_message_script =
4800 ExternalReference::address_of_pending_message_script(isolate());
4801 __ mov(
ip, Operand(pending_message_script));
4806 ExternalReference has_pending_message =
4807 ExternalReference::address_of_has_pending_message(isolate());
4808 __ mov(
ip, Operand(has_pending_message));
4812 ExternalReference pending_message_obj =
4813 ExternalReference::address_of_pending_message_obj(isolate());
4814 __ mov(
ip, Operand(pending_message_obj));
4821 __ pop(result_register());
4823 __ add(
pc,
r1, Operand(masm_->CodeObject()));
4829 #define __ ACCESS_MASM(masm())
4833 int* context_length) {
4840 __ Drop(*stack_depth);
4841 if (*context_length > 0) {
4847 __ bl(finally_entry_);
4850 *context_length = 0;
4860 if (!FLAG_enable_ool_constant_pool) {
4868 return load_address;
4874 BackEdgeState target_state,
4875 Code* replacement_code) {
4877 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4878 Address branch_address = pc_immediate_load_address - kInstrSize;
4879 CodePatcher patcher(branch_address, 1);
4880 switch (target_state) {
4893 int branch_offset = pc + kInstrSize - branch_address;
4894 patcher.masm()->b(branch_offset,
pl);
4906 patcher.masm()->nop();
4912 replacement_code->entry());
4914 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4915 unoptimized_code, pc_immediate_load_address, replacement_code);
4921 Code* unoptimized_code,
4926 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4927 Address branch_address = pc_immediate_load_address - kInstrSize;
4929 pc_immediate_load_address, unoptimized_code);
4932 ASSERT(interrupt_address ==
4933 isolate->builtins()->InterruptCheck()->entry());
4939 if (interrupt_address ==
4940 isolate->builtins()->OnStackReplacement()->entry()) {
4944 ASSERT(interrupt_address ==
4945 isolate->builtins()->OsrAfterStackCheck()->entry());
4952 #endif // V8_TARGET_ARCH_ARM
static const int kFunctionOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
static bool IsBranch(Instr instr)
static const int kLengthOffset
static const int kHashFieldOffset
static const int kBitFieldOffset
Scope * DeclarationScope()
const intptr_t kSmiTagMask
static const int kForInFastCaseMarker
VariableDeclaration * function() const
static const int kCodeEntryOffset
static const int kValueOffset
static int SlotOffset(int index)
static const int kFingerOffset
static const int kBuiltinsOffset
static Handle< Code > GetUninitialized(Isolate *isolate)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static const int kEnumCacheOffset
static String * cast(Object *obj)
const uint32_t kTwoByteStringTag
const LowDwVfpRegister d0
static Smi * FromInt(int value)
static const int kResultValuePropertyOffset
bool IsFastObjectElementsKind(ElementsKind kind)
static TypeFeedbackId None()
static bool IsMovW(Instr instr)
static Handle< Code > GetUninitialized(Isolate *isolate, Token::Value op)
static const int kGlobalReceiverOffset
Scope * outer_scope() const
static const int kGeneratorClosed
static const unsigned int kContainsCachedArrayIndexMask
static bool IsLdrPpImmediateOffset(Instr instr)
static const int kForInSlowCaseMarker
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
static const int kResultDonePropertyOffset
#define ASSERT(condition)
friend class BlockConstPoolScope
static const int kContextOffset
const int kPointerSizeLog2
static const int kMaxBackEdgeWeight
static const int kInObjectFieldCount
static bool IsCompileTimeValue(Expression *expression)
#define POINTER_SIZE_ALIGN(value)
static const int kReceiverOffset
MemOperand GlobalObjectOperand()
static const int kCallerFPOffset
static const int kInstanceClassNameOffset
bool IsOptimizable() const
Variable * parameter(int index) const
MemOperand ContextOperand(Register context, int index)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
kInstanceClassNameOffset flag
static const int kLiteralsOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static Condition ComputeCondition(Token::Value op)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
static const int kLengthOffset
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Variable * arguments() const
static const int kFirstOffset
static bool IsValid(intptr_t value)
static BailoutId Declarations()
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
void check(i::Vector< const uint8_t > string)
static const int kStringWrapperSafeForDefaultValueOf
static void MaybeCallEntryHook(MacroAssembler *masm)
bool ShouldSelfOptimize()
static int32_t & int32_at(Address addr)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kCallerSPOffset
static const int kCacheStampOffset
static bool IsMovT(Instr instr)
static const int kDescriptorSize
static const int kPropertiesOffset
int num_parameters() const
static const int kMarkerOffset
static const int kExpressionsOffset
static const int kHeaderSize
void CheckConstPool(bool force_emit, bool require_jump)
static const int kElementsOffset
static BailoutId FunctionEntry()
bool IsDeclaredVariableMode(VariableMode mode)
friend class NestedStatement
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
static const int kContextOffset
static const int kMapOffset
static const int kValueOffset
static const int kFixedFrameSizeFromFp
static const int kEnumCacheBridgeCacheOffset
void VisitIllegalRedeclaration(AstVisitor *visitor)
static const int kLengthOffset
static Handle< Object > UninitializedSentinel(Isolate *isolate)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
MemOperand FieldMemOperand(Register object, int offset)
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
static const int kContextOffset
static const int kFunctionOffset
static const int kFormalParameterCountOffset
void CopyBytes(uint8_t *target, uint8_t *source)
static const int kHeaderSize
static const int kGeneratorExecuting
Condition NegateCondition(Condition cond)
static bool ShouldGenerateLog(Isolate *isolate, Expression *type)
#define ASSERT_EQ(v1, v2)
static const int kContinuationOffset
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
static const int kJSReturnSequenceInstructions
static const int kConstructorOffset
const uint32_t kOneByteStringTag
#define ASSERT_NE(v1, v2)
static const int kIsUndetectable
static const int kMaximumClonedProperties
static const int kInstrSize
static const int kPrototypeOffset
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target)
static const int kValueOffset
bool IsImmutableVariableMode(VariableMode mode)
static const int kNativeContextOffset
void AddNoFrameRange(int from, int to)
static const int kSharedFunctionInfoOffset
static FixedArrayBase * cast(Object *object)
static const int kLocal0Offset
static const int kBitField2Offset
static const int kConstantPoolOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
static const int kExponentOffset
static const int kMaximumClonedLength
static bool IsLdrPcImmediateOffset(Instr instr)
static const int kInstanceTypeOffset
static const int kOperandStackOffset
static const int kMantissaOffset