30 #if V8_TARGET_ARCH_MIPS
56 #define __ ACCESS_MASM(masm_)
69 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
71 info_emitted_ =
false;
76 ASSERT(patch_site_.is_bound() == info_emitted_);
81 void EmitJumpIfNotSmi(Register reg, Label* target) {
82 ASSERT(!patch_site_.is_bound() && !info_emitted_);
84 __ bind(&patch_site_);
87 __ BranchShort(target,
eq, at, Operand(zero_reg));
92 void EmitJumpIfSmi(Register reg, Label* target) {
94 ASSERT(!patch_site_.is_bound() && !info_emitted_);
95 __ bind(&patch_site_);
98 __ BranchShort(target,
ne, at, Operand(zero_reg));
101 void EmitPatchInfo() {
102 if (patch_site_.is_bound()) {
103 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
105 __ andi(zero_reg, reg, delta_to_patch_site %
kImm16Mask);
107 info_emitted_ =
true;
115 MacroAssembler* masm_;
123 static void EmitStackCheck(MacroAssembler* masm_,
124 Register stack_limit_scratch,
126 Register scratch =
sp) {
127 Isolate* isolate = masm_->isolate();
129 ASSERT(scratch.is(
sp) == (pointers == 0));
133 __ LoadRoot(stack_limit_scratch, Heap::kStackLimitRootIndex);
134 __ Branch(&ok,
hs, scratch, Operand(stack_limit_scratch));
136 __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
155 void FullCodeGenerator::Generate() {
156 CompilationInfo*
info = info_;
158 isolate()->factory()->NewFixedArray(
function()->handler_count(),
TENURED);
160 InitializeFeedbackVector();
162 profiling_counter_ = isolate()->factory()->NewCell(
163 Handle<Smi>(
Smi::FromInt(FLAG_interrupt_budget), isolate()));
164 SetFunctionPosition(
function());
165 Comment cmnt(masm_,
"[ function compiled by full code generator");
170 if (strlen(FLAG_stop_at) > 0 &&
171 info->function()->name()->IsUtf8EqualTo(
CStrVector(FLAG_stop_at))) {
179 if (info->strict_mode() ==
SLOPPY && !info->is_native()) {
181 int receiver_offset = info->scope()->num_parameters() *
kPointerSize;
183 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
184 __ Branch(&ok,
ne, a2, Operand(at));
197 FrameScope frame_scope(masm_, StackFrame::MANUAL);
199 info->set_prologue_offset(masm_->pc_offset());
201 info->AddNoFrameRange(0, masm_->pc_offset());
203 { Comment cmnt(masm_,
"[ Allocate locals");
204 int locals_count = info->scope()->num_stack_slots();
206 ASSERT(!info->function()->is_generator() || locals_count == 0);
207 if (locals_count > 0) {
208 if (locals_count >= 128) {
209 EmitStackCheck(masm_, a2, locals_count, t5);
211 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
212 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
213 if (locals_count >= kMaxPushes) {
214 int loop_iterations = locals_count / kMaxPushes;
215 __ li(a2, Operand(loop_iterations));
217 __ bind(&loop_header);
220 for (
int i = 0; i < kMaxPushes; i++) {
224 __ Subu(a2, a2, Operand(1));
225 __ Branch(&loop_header,
ne, a2, Operand(zero_reg));
227 int remaining = locals_count % kMaxPushes;
230 for (
int i = 0; i < remaining; i++) {
236 bool function_in_register =
true;
240 if (heap_slots > 0) {
241 Comment cmnt(masm_,
"[ Allocate context");
243 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
245 __ Push(info->scope()->GetScopeInfo());
246 __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
247 }
else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
248 FastNewContextStub stub(heap_slots);
252 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
254 function_in_register =
false;
260 int num_parameters = info->scope()->num_parameters();
261 for (
int i = 0; i < num_parameters; i++) {
263 if (var->IsContextSlot()) {
273 __ RecordWriteContextSlot(
279 Variable* arguments = scope()->
arguments();
280 if (arguments !=
NULL) {
282 Comment cmnt(masm_,
"[ Allocate arguments object");
283 if (!function_in_register) {
290 int num_parameters = info->scope()->num_parameters();
302 if (strict_mode() ==
STRICT) {
304 }
else if (
function()->has_duplicate_parameters()) {
309 ArgumentsAccessStub stub(type);
312 SetVar(arguments, v0, a1, a2);
316 __ CallRuntime(Runtime::kTraceEnter, 0);
321 if (scope()->HasIllegalRedeclaration()) {
322 Comment cmnt(masm_,
"[ Declarations");
327 { Comment cmnt(masm_,
"[ Declarations");
330 if (scope()->is_function_scope() && scope()->
function() !=
NULL) {
331 VariableDeclaration*
function = scope()->
function();
332 ASSERT(function->proxy()->var()->mode() ==
CONST ||
335 VisitVariableDeclaration(
function);
337 VisitDeclarations(scope()->declarations());
340 { Comment cmnt(masm_,
"[ Stack check");
342 EmitStackCheck(masm_, at);
345 { Comment cmnt(masm_,
"[ Body");
346 ASSERT(loop_depth() == 0);
347 VisitStatements(
function()->body());
348 ASSERT(loop_depth() == 0);
354 { Comment cmnt(masm_,
"[ return <undefined>;");
355 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
357 EmitReturnSequence();
361 void FullCodeGenerator::ClearAccumulator() {
363 __ mov(v0, zero_reg);
367 void FullCodeGenerator::EmitProfilingCounterDecrement(
int delta) {
368 __ li(a2, Operand(profiling_counter_));
375 void FullCodeGenerator::EmitProfilingCounterReset() {
376 int reset_value = FLAG_interrupt_budget;
377 if (isolate()->IsDebuggerActive()) {
379 reset_value = FLAG_interrupt_budget >> 4;
381 __ li(a2, Operand(profiling_counter_));
387 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
388 Label* back_edge_target) {
395 Comment cmnt(masm_,
"[ Back edge bookkeeping");
397 ASSERT(back_edge_target->is_bound());
398 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
400 Max(1, distance / kCodeSizeMultiplier));
401 EmitProfilingCounterDecrement(weight);
402 __ slt(at, a3, zero_reg);
403 __ beq(at, zero_reg, &ok);
405 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
409 RecordBackEdge(stmt->OsrEntryId());
410 EmitProfilingCounterReset();
417 PrepareForBailoutForId(stmt->OsrEntryId(),
NO_REGISTERS);
421 void FullCodeGenerator::EmitReturnSequence() {
422 Comment cmnt(masm_,
"[ Return sequence");
423 if (return_label_.is_bound()) {
424 __ Branch(&return_label_);
426 __ bind(&return_label_);
431 __ CallRuntime(Runtime::kTraceExit, 1);
436 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
438 int distance = masm_->pc_offset();
440 Max(1, distance / kCodeSizeMultiplier));
442 EmitProfilingCounterDecrement(weight);
444 __ Branch(&ok,
ge, a3, Operand(zero_reg));
446 __ Call(isolate()->builtins()->InterruptCheck(),
447 RelocInfo::CODE_TARGET);
449 EmitProfilingCounterReset();
454 Label check_exit_codesize;
455 masm_->bind(&check_exit_codesize);
466 int no_frame_start = masm_->pc_offset();
467 masm_->MultiPop(static_cast<RegList>(
fp.
bit() | ra.bit()));
468 masm_->Addu(
sp,
sp, Operand(sp_delta));
477 masm_->InstructionsGeneratedSince(&check_exit_codesize));
483 void FullCodeGenerator::EffectContext::Plug(Variable* var)
const {
484 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
488 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var)
const {
489 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
490 codegen()->GetVar(result_register(), var);
494 void FullCodeGenerator::StackValueContext::Plug(Variable* var)
const {
495 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
496 codegen()->GetVar(result_register(), var);
497 __ push(result_register());
501 void FullCodeGenerator::TestContext::Plug(Variable* var)
const {
503 codegen()->GetVar(result_register(), var);
504 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
505 codegen()->DoTest(
this);
513 void FullCodeGenerator::AccumulatorValueContext::Plug(
515 __ LoadRoot(result_register(), index);
519 void FullCodeGenerator::StackValueContext::Plug(
521 __ LoadRoot(result_register(), index);
522 __ push(result_register());
527 codegen()->PrepareForBailoutBeforeSplit(condition(),
531 if (index == Heap::kUndefinedValueRootIndex ||
532 index == Heap::kNullValueRootIndex ||
533 index == Heap::kFalseValueRootIndex) {
534 if (false_label_ != fall_through_)
__ Branch(false_label_);
535 }
else if (index == Heap::kTrueValueRootIndex) {
536 if (true_label_ != fall_through_)
__ Branch(true_label_);
538 __ LoadRoot(result_register(), index);
539 codegen()->DoTest(
this);
544 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit)
const {
548 void FullCodeGenerator::AccumulatorValueContext::Plug(
549 Handle<Object> lit)
const {
550 __ li(result_register(), Operand(lit));
554 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit)
const {
556 __ li(result_register(), Operand(lit));
557 __ push(result_register());
561 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit)
const {
562 codegen()->PrepareForBailoutBeforeSplit(condition(),
566 ASSERT(!lit->IsUndetectableObject());
567 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
568 if (false_label_ != fall_through_)
__ Branch(false_label_);
569 }
else if (lit->IsTrue() || lit->IsJSObject()) {
570 if (true_label_ != fall_through_)
__ Branch(true_label_);
571 }
else if (lit->IsString()) {
573 if (false_label_ != fall_through_)
__ Branch(false_label_);
575 if (true_label_ != fall_through_)
__ Branch(true_label_);
577 }
else if (lit->IsSmi()) {
579 if (false_label_ != fall_through_)
__ Branch(false_label_);
581 if (true_label_ != fall_through_)
__ Branch(true_label_);
585 __ li(result_register(), Operand(lit));
586 codegen()->DoTest(
this);
591 void FullCodeGenerator::EffectContext::DropAndPlug(
int count,
592 Register reg)
const {
598 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
600 Register reg)
const {
603 __ Move(result_register(), reg);
607 void FullCodeGenerator::StackValueContext::DropAndPlug(
int count,
608 Register reg)
const {
610 if (count > 1)
__ Drop(count - 1);
615 void FullCodeGenerator::TestContext::DropAndPlug(
int count,
616 Register reg)
const {
620 __ Move(result_register(), reg);
621 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
622 codegen()->DoTest(
this);
626 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
627 Label* materialize_false)
const {
628 ASSERT(materialize_true == materialize_false);
629 __ bind(materialize_true);
633 void FullCodeGenerator::AccumulatorValueContext::Plug(
634 Label* materialize_true,
635 Label* materialize_false)
const {
637 __ bind(materialize_true);
638 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
640 __ bind(materialize_false);
641 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
646 void FullCodeGenerator::StackValueContext::Plug(
647 Label* materialize_true,
648 Label* materialize_false)
const {
650 __ bind(materialize_true);
651 __ LoadRoot(at, Heap::kTrueValueRootIndex);
655 __ bind(materialize_false);
656 __ LoadRoot(at, Heap::kFalseValueRootIndex);
662 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
663 Label* materialize_false)
const {
664 ASSERT(materialize_true == true_label_);
665 ASSERT(materialize_false == false_label_);
669 void FullCodeGenerator::EffectContext::Plug(
bool flag)
const {
673 void FullCodeGenerator::AccumulatorValueContext::Plug(
bool flag)
const {
675 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
676 __ LoadRoot(result_register(), value_root_index);
680 void FullCodeGenerator::StackValueContext::Plug(
bool flag)
const {
682 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
683 __ LoadRoot(at, value_root_index);
688 void FullCodeGenerator::TestContext::Plug(
bool flag)
const {
689 codegen()->PrepareForBailoutBeforeSplit(condition(),
694 if (true_label_ != fall_through_)
__ Branch(true_label_);
696 if (false_label_ != fall_through_)
__ Branch(false_label_);
701 void FullCodeGenerator::DoTest(Expression* condition,
704 Label* fall_through) {
705 __ mov(a0, result_register());
707 CallIC(ic, condition->test_id());
708 __ mov(at, zero_reg);
709 Split(
ne, v0, Operand(at), if_true, if_false, fall_through);
718 Label* fall_through) {
719 if (if_false == fall_through) {
720 __ Branch(if_true, cc, lhs, rhs);
721 }
else if (if_true == fall_through) {
724 __ Branch(if_true, cc, lhs, rhs);
730 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
731 ASSERT(var->IsStackAllocated());
735 if (var->IsParameter()) {
744 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
745 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
746 if (var->IsContextSlot()) {
748 __ LoadContext(scratch, context_chain_length);
751 return StackOperand(var);
756 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
759 __ lw(dest, location);
763 void FullCodeGenerator::SetVar(Variable* var,
767 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
768 ASSERT(!scratch0.is(src));
769 ASSERT(!scratch0.is(scratch1));
770 ASSERT(!scratch1.is(src));
771 MemOperand location = VarOperand(var, scratch0);
772 __ sw(src, location);
774 if (var->IsContextSlot()) {
775 __ RecordWriteContextSlot(scratch0,
785 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
786 bool should_normalize,
795 if (should_normalize)
__ Branch(&skip);
796 PrepareForBailout(expr,
TOS_REG);
797 if (should_normalize) {
798 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
799 Split(
eq, a0, Operand(t0), if_true, if_false,
NULL);
805 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
808 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
809 if (generate_debug_code_) {
812 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
813 __ Check(
ne, kDeclarationInWithContext,
815 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
816 __ Check(
ne, kDeclarationInCatchContext,
822 void FullCodeGenerator::VisitVariableDeclaration(
823 VariableDeclaration* declaration) {
827 VariableProxy* proxy = declaration->proxy();
829 Variable* variable = proxy->var();
831 switch (variable->location()) {
833 globals_->Add(variable->name(), zone());
834 globals_->Add(variable->binding_needs_init()
835 ? isolate()->factory()->the_hole_value()
836 : isolate()->factory()->undefined_value(),
843 Comment cmnt(masm_,
"[ VariableDeclaration");
844 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
845 __ sw(t0, StackOperand(variable));
851 Comment cmnt(masm_,
"[ VariableDeclaration");
852 EmitDebugCheckDeclarationContext(variable);
853 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
861 Comment cmnt(masm_,
"[ VariableDeclaration");
862 __ li(a2, Operand(variable->name()));
873 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
874 __ Push(
cp, a2, a1, a0);
877 __ mov(a0, zero_reg);
878 __ Push(
cp, a2, a1, a0);
880 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
887 void FullCodeGenerator::VisitFunctionDeclaration(
888 FunctionDeclaration* declaration) {
889 VariableProxy* proxy = declaration->proxy();
890 Variable* variable = proxy->var();
891 switch (variable->location()) {
893 globals_->Add(variable->name(), zone());
894 Handle<SharedFunctionInfo>
function =
897 if (
function.is_null())
return SetStackOverflow();
898 globals_->Add(
function, zone());
904 Comment cmnt(masm_,
"[ FunctionDeclaration");
905 VisitForAccumulatorValue(declaration->fun());
906 __ sw(result_register(), StackOperand(variable));
911 Comment cmnt(masm_,
"[ FunctionDeclaration");
912 EmitDebugCheckDeclarationContext(variable);
913 VisitForAccumulatorValue(declaration->fun());
917 __ RecordWriteContextSlot(
cp,
930 Comment cmnt(masm_,
"[ FunctionDeclaration");
931 __ li(a2, Operand(variable->name()));
935 VisitForStackValue(declaration->fun());
936 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
943 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
944 Variable* variable = declaration->proxy()->var();
946 ASSERT(variable->interface()->IsFrozen());
948 Comment cmnt(masm_,
"[ ModuleDeclaration");
949 EmitDebugCheckDeclarationContext(variable);
959 __ RecordWriteContextSlot(
cp,
967 PrepareForBailoutForId(declaration->proxy()->id(),
NO_REGISTERS);
970 Visit(declaration->module());
974 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
975 VariableProxy* proxy = declaration->proxy();
976 Variable* variable = proxy->var();
977 switch (variable->location()) {
983 Comment cmnt(masm_,
"[ ImportDeclaration");
984 EmitDebugCheckDeclarationContext(variable);
997 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
1002 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray>
pairs) {
1005 __ li(a1, Operand(pairs));
1007 __ Push(
cp, a1, a0);
1008 __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
1013 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1015 __ Push(descriptions);
1016 __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
1021 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1022 Comment cmnt(masm_,
"[ SwitchStatement");
1023 Breakable nested_statement(
this, stmt);
1024 SetStatementPosition(stmt);
1027 VisitForStackValue(stmt->tag());
1030 ZoneList<CaseClause*>* clauses = stmt->cases();
1031 CaseClause* default_clause =
NULL;
1035 for (
int i = 0; i < clauses->length(); i++) {
1036 CaseClause* clause = clauses->at(i);
1037 clause->body_target()->Unuse();
1040 if (clause->is_default()) {
1041 default_clause = clause;
1045 Comment cmnt(masm_,
"[ Case comparison");
1046 __ bind(&next_test);
1050 VisitForAccumulatorValue(clause->label());
1051 __ mov(a0, result_register());
1055 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1056 JumpPatchSite patch_site(masm_);
1057 if (inline_smi_code) {
1060 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1062 __ Branch(&next_test,
ne, a1, Operand(a0));
1064 __ Branch(clause->body_target());
1066 __ bind(&slow_case);
1070 SetSourcePosition(clause->position());
1072 CallIC(ic, clause->CompareId());
1073 patch_site.EmitPatchInfo();
1077 PrepareForBailout(clause,
TOS_REG);
1078 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1079 __ Branch(&next_test,
ne, v0, Operand(at));
1081 __ Branch(clause->body_target());
1084 __ Branch(&next_test,
ne, v0, Operand(zero_reg));
1086 __ Branch(clause->body_target());
1091 __ bind(&next_test);
1093 if (default_clause ==
NULL) {
1094 __ Branch(nested_statement.break_label());
1096 __ Branch(default_clause->body_target());
1100 for (
int i = 0; i < clauses->length(); i++) {
1101 Comment cmnt(masm_,
"[ Case body");
1102 CaseClause* clause = clauses->at(i);
1103 __ bind(clause->body_target());
1104 PrepareForBailoutForId(clause->EntryId(),
NO_REGISTERS);
1105 VisitStatements(clause->statements());
1108 __ bind(nested_statement.break_label());
1113 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1114 Comment cmnt(masm_,
"[ ForInStatement");
1115 int slot = stmt->ForInFeedbackSlot();
1116 SetStatementPosition(stmt);
1119 ForIn loop_statement(
this, stmt);
1120 increment_loop_depth();
1124 VisitForAccumulatorValue(stmt->enumerable());
1125 __ mov(a0, result_register());
1126 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1127 __ Branch(&exit,
eq, a0, Operand(at));
1128 Register null_value = t1;
1129 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1130 __ Branch(&exit,
eq, a0, Operand(null_value));
1131 PrepareForBailoutForId(stmt->PrepareId(),
TOS_REG);
1134 Label convert, done_convert;
1135 __ JumpIfSmi(a0, &convert);
1136 __ GetObjectType(a0, a1, a1);
1142 __ bind(&done_convert);
1148 __ GetObjectType(a0, a1, a1);
1155 __ CheckEnumCache(null_value, &call_runtime);
1161 __ Branch(&use_cache);
1164 __ bind(&call_runtime);
1166 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1173 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1174 __ Branch(&fixed_array,
ne, a2, Operand(at));
1177 Label no_descriptors;
1178 __ bind(&use_cache);
1180 __ EnumLength(a1, v0);
1183 __ LoadInstanceDescriptors(v0, a2);
1190 __ Push(v0, a2, a1, a0);
1193 __ bind(&no_descriptors);
1199 __ bind(&fixed_array);
1201 Handle<Object> feedback = Handle<Object>(
1204 StoreFeedbackVectorSlot(slot, feedback);
1205 __ li(a1, FeedbackVector());
1212 __ GetObjectType(a2, a3, a3);
1215 __ bind(&non_proxy);
1227 __ Branch(loop_statement.break_label(),
hs, a0, Operand(a1));
1233 __ addu(t0, a2, t0);
1245 __ Branch(&update_each,
eq, t0, Operand(a2));
1250 __ Branch(&update_each,
eq, a2, Operand(zero_reg));
1257 __ mov(a3, result_register());
1258 __ Branch(loop_statement.continue_label(),
eq, a3, Operand(zero_reg));
1262 __ bind(&update_each);
1263 __ mov(result_register(), a3);
1265 { EffectContext context(
this);
1266 EmitAssignment(stmt->each());
1270 Visit(stmt->body());
1274 __ bind(loop_statement.continue_label());
1279 EmitBackEdgeBookkeeping(stmt, &loop);
1283 __ bind(loop_statement.break_label());
1289 decrement_loop_depth();
1293 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1294 Comment cmnt(masm_,
"[ ForOfStatement");
1295 SetStatementPosition(stmt);
1297 Iteration loop_statement(
this, stmt);
1298 increment_loop_depth();
1301 VisitForAccumulatorValue(stmt->assign_iterator());
1305 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1306 __ Branch(loop_statement.break_label(),
eq, a0, Operand(at));
1307 __ LoadRoot(at, Heap::kNullValueRootIndex);
1308 __ Branch(loop_statement.break_label(),
eq, a0, Operand(at));
1311 Label convert, done_convert;
1312 __ JumpIfSmi(a0, &convert);
1313 __ GetObjectType(a0, a1, a1);
1319 __ bind(&done_convert);
1323 __ bind(loop_statement.continue_label());
1326 VisitForEffect(stmt->next_result());
1329 Label result_not_done;
1330 VisitForControl(stmt->result_done(),
1331 loop_statement.break_label(),
1334 __ bind(&result_not_done);
1337 VisitForEffect(stmt->assign_each());
1340 Visit(stmt->body());
1343 PrepareForBailoutForId(stmt->BackEdgeId(),
NO_REGISTERS);
1344 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1345 __ jmp(loop_statement.continue_label());
1349 __ bind(loop_statement.break_label());
1350 decrement_loop_depth();
1354 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1362 if (!FLAG_always_opt &&
1363 !FLAG_prepare_always_opt &&
1365 scope()->is_function_scope() &&
1366 info->num_literals() == 0) {
1367 FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1368 __ li(a2, Operand(info));
1371 __ li(a0, Operand(info));
1372 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1373 : Heap::kFalseValueRootIndex);
1374 __ Push(
cp, a0, a1);
1375 __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1377 context()->Plug(v0);
1381 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1382 Comment cmnt(masm_,
"[ VariableProxy");
1383 EmitVariableLoad(expr);
1387 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1390 Register current =
cp;
1396 if (s->num_heap_slots() > 0) {
1397 if (s->calls_sloppy_eval()) {
1400 __ Branch(slow,
ne, temp, Operand(zero_reg));
1409 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope())
break;
1410 s = s->outer_scope();
1413 if (s->is_eval_scope()) {
1415 if (!current.is(next)) {
1416 __ Move(next, current);
1421 __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
1422 __ Branch(&fast,
eq, temp, Operand(t0));
1425 __ Branch(slow,
ne, temp, Operand(zero_reg));
1433 __ li(a2, Operand(var->name()));
1441 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1443 ASSERT(var->IsContextSlot());
1444 Register context =
cp;
1448 for (Scope* s = scope(); s != var->scope(); s = s->
outer_scope()) {
1449 if (s->num_heap_slots() > 0) {
1450 if (s->calls_sloppy_eval()) {
1453 __ Branch(slow,
ne, temp, Operand(zero_reg));
1462 __ Branch(slow,
ne, temp, Operand(zero_reg));
1471 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1481 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1484 Variable* local = var->local_if_not_shadowed();
1485 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1486 if (local->mode() ==
LET || local->mode() ==
CONST ||
1488 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1489 __ subu(at, v0, at);
1491 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1492 __ Movz(v0, a0, at);
1494 __ Branch(done,
ne, at, Operand(zero_reg));
1495 __ li(a0, Operand(var->name()));
1497 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1505 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1507 SetSourcePosition(proxy->position());
1508 Variable* var = proxy->var();
1512 switch (var->location()) {
1514 Comment cmnt(masm_,
"[ Global variable");
1518 __ li(a2, Operand(var->name()));
1520 context()->Plug(v0);
1527 Comment cmnt(masm_, var->IsContextSlot() ?
"[ Context variable"
1528 :
"[ Stack variable");
1529 if (var->binding_needs_init()) {
1553 bool skip_init_check;
1555 skip_init_check =
false;
1558 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1559 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1561 var->initializer_position() < proxy->position();
1564 if (!skip_init_check) {
1567 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1568 __ subu(at, v0, at);
1569 if (var->mode() ==
LET || var->mode() ==
CONST) {
1573 __ Branch(&done,
ne, at, Operand(zero_reg));
1574 __ li(a0, Operand(var->name()));
1576 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1581 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1582 __ Movz(v0, a0, at);
1584 context()->Plug(v0);
1588 context()->Plug(var);
1593 Comment cmnt(masm_,
"[ Lookup variable");
1599 __ li(a1, Operand(var->name()));
1601 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1603 context()->Plug(v0);
1609 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1610 Comment cmnt(masm_,
"[ RegExpLiteral");
1621 int literal_offset =
1624 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1625 __ Branch(&materialized,
ne, t1, Operand(at));
1630 __ li(a2, Operand(expr->pattern()));
1631 __ li(a1, Operand(expr->flags()));
1632 __ Push(t0, a3, a2, a1);
1633 __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1636 __ bind(&materialized);
1638 Label allocated, runtime_allocate;
1639 __ Allocate(size, v0, a2, a3, &runtime_allocate,
TAG_OBJECT);
1642 __ bind(&runtime_allocate);
1645 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1648 __ bind(&allocated);
1655 context()->Plug(v0);
1659 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1660 if (expression ==
NULL) {
1661 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1664 VisitForStackValue(expression);
1669 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1670 Comment cmnt(masm_,
"[ ObjectLiteral");
1672 expr->BuildConstantProperties(isolate());
1673 Handle<FixedArray> constant_properties = expr->constant_properties();
1677 __ li(a1, Operand(constant_properties));
1678 int flags = expr->fast_elements()
1679 ? ObjectLiteral::kFastElements
1680 : ObjectLiteral::kNoFlags;
1681 flags |= expr->has_function()
1682 ? ObjectLiteral::kHasFunction
1683 : ObjectLiteral::kNoFlags;
1685 int properties_count = constant_properties->length() / 2;
1687 flags != ObjectLiteral::kFastElements ||
1689 __ Push(a3, a2, a1, a0);
1690 __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1692 FastCloneShallowObjectStub stub(properties_count);
1698 bool result_saved =
false;
1703 expr->CalculateEmitStore(zone());
1705 AccessorTable accessor_table(zone());
1706 for (
int i = 0; i < expr->properties()->length(); i++) {
1707 ObjectLiteral::Property*
property = expr->properties()->at(i);
1708 if (property->IsCompileTimeValue())
continue;
1710 Literal* key =
property->key();
1711 Expression* value =
property->value();
1712 if (!result_saved) {
1714 result_saved =
true;
1716 switch (property->kind()) {
1719 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1722 case ObjectLiteral::Property::COMPUTED:
1723 if (key->value()->IsInternalizedString()) {
1724 if (property->emit_store()) {
1725 VisitForAccumulatorValue(value);
1726 __ mov(a0, result_register());
1727 __ li(a2, Operand(key->value()));
1729 CallStoreIC(key->LiteralFeedbackId());
1732 VisitForEffect(value);
1739 VisitForStackValue(key);
1740 VisitForStackValue(value);
1741 if (property->emit_store()) {
1744 __ CallRuntime(Runtime::kSetProperty, 4);
1749 case ObjectLiteral::Property::PROTOTYPE:
1753 VisitForStackValue(value);
1754 if (property->emit_store()) {
1755 __ CallRuntime(Runtime::kSetPrototype, 2);
1760 case ObjectLiteral::Property::GETTER:
1761 accessor_table.lookup(key)->second->getter = value;
1763 case ObjectLiteral::Property::SETTER:
1764 accessor_table.lookup(key)->second->setter = value;
1771 for (AccessorTable::Iterator it = accessor_table.begin();
1772 it != accessor_table.end();
1776 VisitForStackValue(it->first);
1777 EmitAccessor(it->second->getter);
1778 EmitAccessor(it->second->setter);
1781 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1784 if (expr->has_function()) {
1788 __ CallRuntime(Runtime::kToFastProperties, 1);
1792 context()->PlugTOS();
1794 context()->Plug(v0);
1799 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1800 Comment cmnt(masm_,
"[ ArrayLiteral");
1802 expr->BuildConstantElements(isolate());
1803 int flags = expr->depth() == 1
1804 ? ArrayLiteral::kShallowElements
1805 : ArrayLiteral::kNoFlags;
1807 ZoneList<Expression*>* subexprs = expr->values();
1808 int length = subexprs->length();
1810 Handle<FixedArray> constant_elements = expr->constant_elements();
1811 ASSERT_EQ(2, constant_elements->length());
1814 bool has_fast_elements =
1816 Handle<FixedArrayBase> constant_elements_values(
1820 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1826 __ mov(a0, result_register());
1830 __ li(a1, Operand(constant_elements));
1831 if (has_fast_elements && constant_elements_values->map() ==
1832 isolate()->heap()->fixed_cow_array_map()) {
1833 FastCloneShallowArrayStub stub(
1835 allocation_site_mode,
1838 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1843 __ Push(a3, a2, a1, a0);
1844 __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1847 FLAG_smi_only_arrays);
1851 if (has_fast_elements) {
1855 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1859 bool result_saved =
false;
1863 for (
int i = 0; i < length; i++) {
1864 Expression* subexpr = subexprs->at(i);
1869 if (!result_saved) {
1872 result_saved =
true;
1875 VisitForAccumulatorValue(subexpr);
1883 __ RecordWriteField(a1, offset, result_register(), a2,
1888 __ mov(a0, result_register());
1889 StoreArrayLiteralElementStub stub;
1893 PrepareForBailoutForId(expr->GetIdForElement(i),
NO_REGISTERS);
1897 context()->PlugTOS();
1899 context()->Plug(v0);
1904 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1905 ASSERT(expr->target()->IsValidLeftHandSide());
1907 Comment cmnt(masm_,
"[ Assignment");
1911 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1912 LhsKind assign_type = VARIABLE;
1913 Property*
property = expr->target()->AsProperty();
1914 if (property !=
NULL) {
1915 assign_type = (
property->key()->IsPropertyName())
1921 switch (assign_type) {
1925 case NAMED_PROPERTY:
1926 if (expr->is_compound()) {
1928 VisitForAccumulatorValue(property->obj());
1929 __ push(result_register());
1931 VisitForStackValue(property->obj());
1934 case KEYED_PROPERTY:
1936 if (expr->is_compound()) {
1937 VisitForStackValue(property->obj());
1938 VisitForAccumulatorValue(property->key());
1942 VisitForStackValue(property->obj());
1943 VisitForStackValue(property->key());
1950 if (expr->is_compound()) {
1951 { AccumulatorValueContext context(
this);
1952 switch (assign_type) {
1954 EmitVariableLoad(expr->target()->AsVariableProxy());
1955 PrepareForBailout(expr->target(),
TOS_REG);
1957 case NAMED_PROPERTY:
1958 EmitNamedPropertyLoad(property);
1959 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1961 case KEYED_PROPERTY:
1962 EmitKeyedPropertyLoad(property);
1963 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1970 VisitForAccumulatorValue(expr->value());
1972 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1975 SetSourcePosition(expr->position() + 1);
1976 AccumulatorValueContext context(
this);
1977 if (ShouldInlineSmiCase(op)) {
1978 EmitInlineSmiBinaryOp(expr->binary_operation(),
1984 EmitBinaryOp(expr->binary_operation(), op,
mode);
1988 PrepareForBailout(expr->binary_operation(),
TOS_REG);
1990 VisitForAccumulatorValue(expr->value());
1994 SetSourcePosition(expr->position());
1997 switch (assign_type) {
1999 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2001 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2002 context()->Plug(v0);
2004 case NAMED_PROPERTY:
2005 EmitNamedPropertyAssignment(expr);
2007 case KEYED_PROPERTY:
2008 EmitKeyedPropertyAssignment(expr);
2014 void FullCodeGenerator::VisitYield(Yield* expr) {
2015 Comment cmnt(masm_,
"[ Yield");
2018 VisitForStackValue(expr->expression());
2020 switch (expr->yield_kind()) {
2021 case Yield::SUSPEND:
2023 EmitCreateIteratorResult(
false);
2024 __ push(result_register());
2026 case Yield::INITIAL: {
2027 Label suspend, continuation, post_runtime, resume;
2031 __ bind(&continuation);
2035 VisitForAccumulatorValue(expr->generator_object());
2044 __ Branch(&post_runtime,
eq,
sp, Operand(a1));
2046 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2048 __ bind(&post_runtime);
2049 __ pop(result_register());
2050 EmitReturnSequence();
2053 context()->Plug(result_register());
2057 case Yield::FINAL: {
2058 VisitForAccumulatorValue(expr->generator_object());
2063 EmitCreateIteratorResult(
true);
2064 EmitUnwindBeforeReturn();
2065 EmitReturnSequence();
2069 case Yield::DELEGATING: {
2070 VisitForStackValue(expr->generator_object());
2076 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2077 Label l_next, l_call, l_loop;
2079 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2085 handler_table()->set(expr->index(),
Smi::FromInt(l_catch.pos()));
2086 __ LoadRoot(a2, Heap::kthrow_stringRootIndex);
2088 __ Push(a2, a3, a0);
2096 __ PushTryHandler(StackHandler::CATCH, expr->index());
2100 __ bind(&l_continuation);
2103 __ bind(&l_suspend);
2104 const int generator_object_depth = kPointerSize + handler_size;
2114 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2117 EmitReturnSequence();
2124 __ LoadRoot(a2, Heap::knext_stringRootIndex);
2126 __ Push(a2, a3, a0);
2132 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2147 __ LoadRoot(a2, Heap::kdone_stringRootIndex);
2152 __ Branch(&l_try,
eq, v0, Operand(zero_reg));
2156 __ LoadRoot(a2, Heap::kvalue_stringRootIndex);
2158 context()->DropAndPlug(2, v0);
2165 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2172 VisitForStackValue(generator);
2173 VisitForAccumulatorValue(value);
2177 Label wrong_state, closed_state, done;
2181 __ Branch(&closed_state,
eq, a3, Operand(zero_reg));
2182 __ Branch(&wrong_state,
lt, a3, Operand(zero_reg));
2196 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2197 Label push_argument_holes, push_frame;
2198 __ bind(&push_argument_holes);
2200 __ Branch(&push_frame,
lt, a3, Operand(zero_reg));
2202 __ jmp(&push_argument_holes);
2207 __ bind(&push_frame);
2208 __ Call(&resume_frame);
2210 __ bind(&resume_frame);
2217 __ Addu(
fp,
sp, 2 * kPointerSize);
2228 __ Branch(&slow_resume,
ne, a3, Operand(zero_reg));
2232 __ Addu(a3, a3, Operand(a2));
2236 __ bind(&slow_resume);
2241 Label push_operand_holes, call_resume;
2242 __ bind(&push_operand_holes);
2243 __ Subu(a3, a3, Operand(1));
2244 __ Branch(&call_resume,
lt, a3, Operand(zero_reg));
2246 __ Branch(&push_operand_holes);
2247 __ bind(&call_resume);
2248 ASSERT(!result_register().is(a1));
2249 __ Push(a1, result_register());
2251 __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
2253 __ stop(
"not-reached");
2256 __ bind(&closed_state);
2259 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2262 EmitCreateIteratorResult(
true);
2266 __ CallRuntime(Runtime::kHiddenThrow, 1);
2271 __ bind(&wrong_state);
2273 __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
2276 context()->Plug(result_register());
2280 void FullCodeGenerator::EmitCreateIteratorResult(
bool done) {
2284 Handle<Map>
map(isolate()->native_context()->generator_result_map());
2286 __ Allocate(
map->instance_size(), v0, a2, a3, &gc_required,
TAG_OBJECT);
2289 __ bind(&gc_required);
2291 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
2292 __ lw(context_register(),
2295 __ bind(&allocated);
2296 __ li(a1, Operand(
map));
2298 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2299 __ li(t0, Operand(isolate()->factory()->empty_fixed_array()));
2316 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2317 SetSourcePosition(prop->position());
2318 Literal* key = prop->key()->AsLiteral();
2319 __ mov(a0, result_register());
2320 __ li(a2, Operand(key->value()));
2326 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2327 SetSourcePosition(prop->position());
2328 __ mov(a0, result_register());
2330 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2331 CallIC(ic, prop->PropertyFeedbackId());
2335 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2338 Expression* left_expr,
2339 Expression* right_expr) {
2340 Label done, smi_case, stub_call;
2342 Register scratch1 = a2;
2343 Register scratch2 = a3;
2347 Register right = a0;
2349 __ mov(a0, result_register());
2352 __ Or(scratch1, left, Operand(right));
2354 JumpPatchSite patch_site(masm_);
2355 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2357 __ bind(&stub_call);
2358 BinaryOpICStub stub(op, mode);
2359 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2360 patch_site.EmitPatchInfo();
2368 __ GetLeastBitsFromSmi(scratch1, right, 5);
2369 __ srav(right, left, scratch1);
2373 __ SmiUntag(scratch1, left);
2374 __ GetLeastBitsFromSmi(scratch2, right, 5);
2375 __ sllv(scratch1, scratch1, scratch2);
2376 __ Addu(scratch2, scratch1, Operand(0x40000000));
2377 __ Branch(&stub_call,
lt, scratch2, Operand(zero_reg));
2378 __ SmiTag(v0, scratch1);
2382 __ SmiUntag(scratch1, left);
2383 __ GetLeastBitsFromSmi(scratch2, right, 5);
2384 __ srlv(scratch1, scratch1, scratch2);
2385 __ And(scratch2, scratch1, 0xc0000000);
2386 __ Branch(&stub_call,
ne, scratch2, Operand(zero_reg));
2387 __ SmiTag(v0, scratch1);
2391 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2392 __ BranchOnOverflow(&stub_call, scratch1);
2395 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2396 __ BranchOnOverflow(&stub_call, scratch1);
2399 __ SmiUntag(scratch1, right);
2400 __ Mult(left, scratch1);
2403 __ sra(scratch1, scratch1, 31);
2404 __ Branch(&stub_call,
ne, scratch1, Operand(scratch2));
2406 __ Branch(&done,
ne, v0, Operand(zero_reg));
2407 __ Addu(scratch2, right, left);
2408 __ Branch(&stub_call,
lt, scratch2, Operand(zero_reg));
2410 __ mov(v0, zero_reg);
2414 __ Or(v0, left, Operand(right));
2416 case Token::BIT_AND:
2417 __ And(v0, left, Operand(right));
2419 case Token::BIT_XOR:
2420 __ Xor(v0, left, Operand(right));
2427 context()->Plug(v0);
2431 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2434 __ mov(a0, result_register());
2436 BinaryOpICStub stub(op, mode);
2437 JumpPatchSite patch_site(masm_);
2438 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2439 patch_site.EmitPatchInfo();
2440 context()->Plug(v0);
2444 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2445 ASSERT(expr->IsValidLeftHandSide());
2449 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2450 LhsKind assign_type = VARIABLE;
2451 Property* prop = expr->AsProperty();
2453 assign_type = (prop->key()->IsPropertyName())
2458 switch (assign_type) {
2460 Variable* var = expr->AsVariableProxy()->var();
2461 EffectContext context(
this);
2462 EmitVariableAssignment(var, Token::ASSIGN);
2465 case NAMED_PROPERTY: {
2466 __ push(result_register());
2467 VisitForAccumulatorValue(prop->obj());
2468 __ mov(a1, result_register());
2470 __ li(a2, Operand(prop->key()->AsLiteral()->value()));
2474 case KEYED_PROPERTY: {
2475 __ push(result_register());
2476 VisitForStackValue(prop->obj());
2477 VisitForAccumulatorValue(prop->key());
2478 __ mov(a1, result_register());
2480 Handle<Code> ic = strict_mode() ==
SLOPPY
2481 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2482 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2487 context()->Plug(v0);
2491 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2493 __ sw(result_register(), location);
2494 if (var->IsContextSlot()) {
2496 __ Move(a3, result_register());
2498 __ RecordWriteContextSlot(
2504 void FullCodeGenerator::EmitCallStoreContextSlot(
2506 __ li(a1, Operand(name));
2508 __ Push(v0,
cp, a1, a0);
2509 __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2513 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Token::Value op) {
2514 if (var->IsUnallocated()) {
2516 __ mov(a0, result_register());
2517 __ li(a2, Operand(var->name()));
2521 }
else if (op == Token::INIT_CONST_LEGACY) {
2523 ASSERT(!var->IsParameter());
2524 if (var->IsLookupSlot()) {
2525 __ li(a0, Operand(var->name()));
2526 __ Push(v0,
cp, a0);
2527 __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2529 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2532 __ lw(a2, location);
2533 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2534 __ Branch(&skip,
ne, a2, Operand(at));
2535 EmitStoreToStackLocalOrContextSlot(var, location);
2539 }
else if (var->mode() ==
LET && op != Token::INIT_LET) {
2541 if (var->IsLookupSlot()) {
2542 EmitCallStoreContextSlot(var->name(), strict_mode());
2544 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2547 __ lw(a3, location);
2548 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2549 __ Branch(&assign,
ne, a3, Operand(t0));
2550 __ li(a3, Operand(var->name()));
2552 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2555 EmitStoreToStackLocalOrContextSlot(var, location);
2558 }
else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2561 if (var->IsLookupSlot()) {
2562 EmitCallStoreContextSlot(var->name(), strict_mode());
2564 ASSERT((var->IsStackAllocated() || var->IsContextSlot()));
2566 if (generate_debug_code_ && op == Token::INIT_LET) {
2568 __ lw(a2, location);
2569 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2570 __ Check(
eq, kLetBindingReInitialization, a2, Operand(t0));
2572 EmitStoreToStackLocalOrContextSlot(var, location);
2579 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2581 Property* prop = expr->target()->AsProperty();
2586 SetSourcePosition(expr->position());
2587 __ mov(a0, result_register());
2588 __ li(a2, Operand(prop->key()->AsLiteral()->value()));
2591 CallStoreIC(expr->AssignmentFeedbackId());
2593 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2594 context()->Plug(v0);
2598 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2602 SetSourcePosition(expr->position());
2608 __ mov(a0, result_register());
2611 Handle<Code> ic = strict_mode() ==
SLOPPY
2612 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2613 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2614 CallIC(ic, expr->AssignmentFeedbackId());
2616 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2617 context()->Plug(v0);
2621 void FullCodeGenerator::VisitProperty(Property* expr) {
2622 Comment cmnt(masm_,
"[ Property");
2623 Expression* key = expr->key();
2625 if (key->IsPropertyName()) {
2626 VisitForAccumulatorValue(expr->obj());
2627 EmitNamedPropertyLoad(expr);
2628 PrepareForBailoutForId(expr->LoadId(),
TOS_REG);
2629 context()->Plug(v0);
2631 VisitForStackValue(expr->obj());
2632 VisitForAccumulatorValue(expr->key());
2634 EmitKeyedPropertyLoad(expr);
2635 context()->Plug(v0);
2640 void FullCodeGenerator::CallIC(Handle<Code>
code,
2641 TypeFeedbackId
id) {
2643 __ Call(code, RelocInfo::CODE_TARGET,
id);
2648 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2649 Expression* callee = expr->expression();
2650 ZoneList<Expression*>* args = expr->arguments();
2651 int arg_count = args->length();
2655 if (callee->IsVariableProxy()) {
2656 { StackValueContext context(
this);
2657 EmitVariableLoad(callee->AsVariableProxy());
2662 __ Push(isolate()->factory()->undefined_value());
2666 ASSERT(callee->IsProperty());
2668 EmitNamedPropertyLoad(callee->AsProperty());
2669 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
TOS_REG);
2678 { PreservePositionScope scope(masm()->positions_recorder());
2679 for (
int i = 0; i < arg_count; i++) {
2680 VisitForStackValue(args->at(i));
2684 SetSourcePosition(expr->position());
2685 CallFunctionStub stub(arg_count, flags);
2689 RecordJSReturnSite(expr);
2694 context()->DropAndPlug(1, v0);
2699 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2702 VisitForAccumulatorValue(key);
2704 Expression* callee = expr->expression();
2705 ZoneList<Expression*>* args = expr->arguments();
2706 int arg_count = args->length();
2709 ASSERT(callee->IsProperty());
2711 EmitKeyedPropertyLoad(callee->AsProperty());
2712 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
TOS_REG);
2719 { PreservePositionScope scope(masm()->positions_recorder());
2720 for (
int i = 0; i < arg_count; i++) {
2721 VisitForStackValue(args->at(i));
2726 SetSourcePosition(expr->position());
2731 RecordJSReturnSite(expr);
2735 context()->DropAndPlug(1, v0);
2739 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2741 ZoneList<Expression*>* args = expr->arguments();
2742 int arg_count = args->length();
2743 { PreservePositionScope scope(masm()->positions_recorder());
2744 for (
int i = 0; i < arg_count; i++) {
2745 VisitForStackValue(args->at(i));
2749 SetSourcePosition(expr->position());
2751 Handle<Object> uninitialized =
2753 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2754 __ li(a2, FeedbackVector());
2761 RecordJSReturnSite(expr);
2764 context()->DropAndPlug(1, v0);
2768 void FullCodeGenerator::EmitResolvePossiblyDirectEval(
int arg_count) {
2770 if (arg_count > 0) {
2773 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
2787 __ Push(t2, t1, t0, a1);
2788 __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2792 void FullCodeGenerator::VisitCall(Call* expr) {
2796 expr->return_is_recorded_ =
false;
2799 Comment cmnt(masm_,
"[ Call");
2800 Expression* callee = expr->expression();
2801 Call::CallType call_type = expr->GetCallType(isolate());
2803 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2808 ZoneList<Expression*>* args = expr->arguments();
2809 int arg_count = args->length();
2811 { PreservePositionScope pos_scope(masm()->positions_recorder());
2812 VisitForStackValue(callee);
2813 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2817 for (
int i = 0; i < arg_count; i++) {
2818 VisitForStackValue(args->at(i));
2825 EmitResolvePossiblyDirectEval(arg_count);
2833 SetSourcePosition(expr->position());
2837 RecordJSReturnSite(expr);
2840 context()->DropAndPlug(1, v0);
2841 }
else if (call_type == Call::GLOBAL_CALL) {
2842 EmitCallWithIC(expr);
2843 }
else if (call_type == Call::LOOKUP_SLOT_CALL) {
2845 VariableProxy* proxy = callee->AsVariableProxy();
2848 { PreservePositionScope scope(masm()->positions_recorder());
2857 ASSERT(!context_register().is(a2));
2858 __ li(a2, Operand(proxy->name()));
2859 __ Push(context_register(), a2);
2860 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2866 if (done.is_linked()) {
2874 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2881 EmitCallWithStub(expr);
2882 }
else if (call_type == Call::PROPERTY_CALL) {
2883 Property*
property = callee->AsProperty();
2884 { PreservePositionScope scope(masm()->positions_recorder());
2885 VisitForStackValue(property->obj());
2887 if (property->key()->IsPropertyName()) {
2888 EmitCallWithIC(expr);
2890 EmitKeyedCallWithIC(expr, property->key());
2893 ASSERT(call_type == Call::OTHER_CALL);
2895 { PreservePositionScope scope(masm()->positions_recorder());
2896 VisitForStackValue(callee);
2898 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2901 EmitCallWithStub(expr);
2906 ASSERT(expr->return_is_recorded_);
2911 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2912 Comment cmnt(masm_,
"[ CallNew");
2920 VisitForStackValue(expr->expression());
2923 ZoneList<Expression*>* args = expr->arguments();
2924 int arg_count = args->length();
2925 for (
int i = 0; i < arg_count; i++) {
2926 VisitForStackValue(args->at(i));
2931 SetSourcePosition(expr->position());
2934 __ li(a0, Operand(arg_count));
2938 Handle<Object> uninitialized =
2940 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2941 if (FLAG_pretenuring_call_new) {
2942 StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
2943 isolate()->factory()->NewAllocationSite());
2944 ASSERT(expr->AllocationSiteFeedbackSlot() ==
2945 expr->CallNewFeedbackSlot() + 1);
2948 __ li(a2, FeedbackVector());
2952 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2953 PrepareForBailoutForId(expr->ReturnId(),
TOS_REG);
2954 context()->Plug(v0);
2958 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2959 ZoneList<Expression*>* args = expr->arguments();
2960 ASSERT(args->length() == 1);
2962 VisitForAccumulatorValue(args->at(0));
2964 Label materialize_true, materialize_false;
2965 Label* if_true =
NULL;
2966 Label* if_false =
NULL;
2967 Label* fall_through =
NULL;
2968 context()->PrepareTest(&materialize_true, &materialize_false,
2969 &if_true, &if_false, &fall_through);
2971 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2973 Split(
eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
2975 context()->Plug(if_true, if_false);
2979 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2980 ZoneList<Expression*>* args = expr->arguments();
2981 ASSERT(args->length() == 1);
2983 VisitForAccumulatorValue(args->at(0));
2985 Label materialize_true, materialize_false;
2986 Label* if_true =
NULL;
2987 Label* if_false =
NULL;
2988 Label* fall_through =
NULL;
2989 context()->PrepareTest(&materialize_true, &materialize_false,
2990 &if_true, &if_false, &fall_through);
2992 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2993 __ NonNegativeSmiTst(v0, at);
2994 Split(
eq, at, Operand(zero_reg), if_true, if_false, fall_through);
2996 context()->Plug(if_true, if_false);
3000 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3001 ZoneList<Expression*>* args = expr->arguments();
3002 ASSERT(args->length() == 1);
3004 VisitForAccumulatorValue(args->at(0));
3006 Label materialize_true, materialize_false;
3007 Label* if_true =
NULL;
3008 Label* if_false =
NULL;
3009 Label* fall_through =
NULL;
3010 context()->PrepareTest(&materialize_true, &materialize_false,
3011 &if_true, &if_false, &fall_through);
3013 __ JumpIfSmi(v0, if_false);
3014 __ LoadRoot(at, Heap::kNullValueRootIndex);
3015 __ Branch(if_true,
eq, v0, Operand(at));
3020 __ Branch(if_false,
ne, at, Operand(zero_reg));
3023 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3025 if_true, if_false, fall_through);
3027 context()->Plug(if_true, if_false);
3031 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3032 ZoneList<Expression*>* args = expr->arguments();
3033 ASSERT(args->length() == 1);
3035 VisitForAccumulatorValue(args->at(0));
3037 Label materialize_true, materialize_false;
3038 Label* if_true =
NULL;
3039 Label* if_false =
NULL;
3040 Label* fall_through =
NULL;
3041 context()->PrepareTest(&materialize_true, &materialize_false,
3042 &if_true, &if_false, &fall_through);
3044 __ JumpIfSmi(v0, if_false);
3045 __ GetObjectType(v0, a1, a1);
3046 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3048 if_true, if_false, fall_through);
3050 context()->Plug(if_true, if_false);
3054 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3055 ZoneList<Expression*>* args = expr->arguments();
3056 ASSERT(args->length() == 1);
3058 VisitForAccumulatorValue(args->at(0));
3060 Label materialize_true, materialize_false;
3061 Label* if_true =
NULL;
3062 Label* if_false =
NULL;
3063 Label* fall_through =
NULL;
3064 context()->PrepareTest(&materialize_true, &materialize_false,
3065 &if_true, &if_false, &fall_through);
3067 __ JumpIfSmi(v0, if_false);
3070 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3072 Split(
ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3074 context()->Plug(if_true, if_false);
3078 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3079 CallRuntime* expr) {
3080 ZoneList<Expression*>* args = expr->arguments();
3081 ASSERT(args->length() == 1);
3083 VisitForAccumulatorValue(args->at(0));
3085 Label materialize_true, materialize_false, skip_lookup;
3086 Label* if_true =
NULL;
3087 Label* if_false =
NULL;
3088 Label* fall_through =
NULL;
3089 context()->PrepareTest(&materialize_true, &materialize_false,
3090 &if_true, &if_false, &fall_through);
3092 __ AssertNotSmi(v0);
3097 __ Branch(&skip_lookup,
ne, t0, Operand(zero_reg));
3102 __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
3103 __ Branch(if_false,
eq, a2, Operand(t0));
3108 Label entry, loop, done;
3111 __ NumberOfOwnDescriptors(a3, a1);
3112 __ Branch(&done,
eq, a3, Operand(zero_reg));
3114 __ LoadInstanceDescriptors(a1, t0);
3127 __ Addu(a2, a2, t1);
3133 __ li(t2, Operand(isolate()->factory()->value_of_string()));
3137 __ Branch(if_false,
eq, a3, Operand(t2));
3140 __ Branch(&loop,
ne, t0, Operand(a2));
3149 __ bind(&skip_lookup);
3154 __ JumpIfSmi(a2, if_false);
3159 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3160 Split(
eq, a2, Operand(a3), if_true, if_false, fall_through);
3162 context()->Plug(if_true, if_false);
3166 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3167 ZoneList<Expression*>* args = expr->arguments();
3168 ASSERT(args->length() == 1);
3170 VisitForAccumulatorValue(args->at(0));
3172 Label materialize_true, materialize_false;
3173 Label* if_true =
NULL;
3174 Label* if_false =
NULL;
3175 Label* fall_through =
NULL;
3176 context()->PrepareTest(&materialize_true, &materialize_false,
3177 &if_true, &if_false, &fall_through);
3179 __ JumpIfSmi(v0, if_false);
3180 __ GetObjectType(v0, a1, a2);
3181 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3183 __ Branch(if_false);
3185 context()->Plug(if_true, if_false);
3189 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3190 ZoneList<Expression*>* args = expr->arguments();
3191 ASSERT(args->length() == 1);
3193 VisitForAccumulatorValue(args->at(0));
3195 Label materialize_true, materialize_false;
3196 Label* if_true =
NULL;
3197 Label* if_false =
NULL;
3198 Label* fall_through =
NULL;
3199 context()->PrepareTest(&materialize_true, &materialize_false,
3200 &if_true, &if_false, &fall_through);
3202 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false,
DO_SMI_CHECK);
3205 __ li(t0, 0x80000000);
3207 __ Branch(¬_nan,
ne, a2, Operand(t0));
3208 __ mov(t0, zero_reg);
3212 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3213 Split(
eq, a2, Operand(t0), if_true, if_false, fall_through);
3215 context()->Plug(if_true, if_false);
3219 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3220 ZoneList<Expression*>* args = expr->arguments();
3221 ASSERT(args->length() == 1);
3223 VisitForAccumulatorValue(args->at(0));
3225 Label materialize_true, materialize_false;
3226 Label* if_true =
NULL;
3227 Label* if_false =
NULL;
3228 Label* fall_through =
NULL;
3229 context()->PrepareTest(&materialize_true, &materialize_false,
3230 &if_true, &if_false, &fall_through);
3232 __ JumpIfSmi(v0, if_false);
3233 __ GetObjectType(v0, a1, a1);
3234 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3236 if_true, if_false, fall_through);
3238 context()->Plug(if_true, if_false);
3242 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3243 ZoneList<Expression*>* args = expr->arguments();
3244 ASSERT(args->length() == 1);
3246 VisitForAccumulatorValue(args->at(0));
3248 Label materialize_true, materialize_false;
3249 Label* if_true =
NULL;
3250 Label* if_false =
NULL;
3251 Label* fall_through =
NULL;
3252 context()->PrepareTest(&materialize_true, &materialize_false,
3253 &if_true, &if_false, &fall_through);
3255 __ JumpIfSmi(v0, if_false);
3256 __ GetObjectType(v0, a1, a1);
3257 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3260 context()->Plug(if_true, if_false);
3264 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3265 ASSERT(expr->arguments()->length() == 0);
3267 Label materialize_true, materialize_false;
3268 Label* if_true =
NULL;
3269 Label* if_false =
NULL;
3270 Label* fall_through =
NULL;
3271 context()->PrepareTest(&materialize_true, &materialize_false,
3272 &if_true, &if_false, &fall_through);
3278 Label check_frame_marker;
3280 __ Branch(&check_frame_marker,
ne,
3285 __ bind(&check_frame_marker);
3287 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3289 if_true, if_false, fall_through);
3291 context()->Plug(if_true, if_false);
3295 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3296 ZoneList<Expression*>* args = expr->arguments();
3297 ASSERT(args->length() == 2);
3300 VisitForStackValue(args->at(0));
3301 VisitForAccumulatorValue(args->at(1));
3303 Label materialize_true, materialize_false;
3304 Label* if_true =
NULL;
3305 Label* if_false =
NULL;
3306 Label* fall_through =
NULL;
3307 context()->PrepareTest(&materialize_true, &materialize_false,
3308 &if_true, &if_false, &fall_through);
3311 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3312 Split(
eq, v0, Operand(a1), if_true, if_false, fall_through);
3314 context()->Plug(if_true, if_false);
3318 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3319 ZoneList<Expression*>* args = expr->arguments();
3320 ASSERT(args->length() == 1);
3324 VisitForAccumulatorValue(args->at(0));
3329 context()->Plug(v0);
3333 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3334 ASSERT(expr->arguments()->length() == 0);
3342 __ Branch(&exit,
ne, a3,
3350 context()->Plug(v0);
3354 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3355 ZoneList<Expression*>* args = expr->arguments();
3356 ASSERT(args->length() == 1);
3357 Label done, null,
function, non_function_constructor;
3359 VisitForAccumulatorValue(args->at(0));
3362 __ JumpIfSmi(v0, &null);
3369 __ GetObjectType(v0, v0, a1);
3384 __ GetObjectType(v0, a1, a1);
3395 __ LoadRoot(v0, Heap::kfunction_class_stringRootIndex);
3399 __ bind(&non_function_constructor);
3400 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3405 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3410 context()->Plug(v0);
3414 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3422 ZoneList<Expression*>* args = expr->arguments();
3425 VisitForStackValue(args->at(1));
3426 VisitForStackValue(args->at(2));
3427 __ CallRuntime(Runtime::kHiddenLog, 2);
3431 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3432 context()->Plug(v0);
3436 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3439 ZoneList<Expression*>* args = expr->arguments();
3440 ASSERT(args->length() == 3);
3441 VisitForStackValue(args->at(0));
3442 VisitForStackValue(args->at(1));
3443 VisitForStackValue(args->at(2));
3445 context()->Plug(v0);
3449 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3451 RegExpExecStub stub;
3452 ZoneList<Expression*>* args = expr->arguments();
3453 ASSERT(args->length() == 4);
3454 VisitForStackValue(args->at(0));
3455 VisitForStackValue(args->at(1));
3456 VisitForStackValue(args->at(2));
3457 VisitForStackValue(args->at(3));
3459 context()->Plug(v0);
3463 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3464 ZoneList<Expression*>* args = expr->arguments();
3465 ASSERT(args->length() == 1);
3467 VisitForAccumulatorValue(args->at(0));
3471 __ JumpIfSmi(v0, &done);
3473 __ GetObjectType(v0, a1, a1);
3479 context()->Plug(v0);
3483 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3484 ZoneList<Expression*>* args = expr->arguments();
3485 ASSERT(args->length() == 2);
3487 Smi* index =
Smi::cast(*(args->at(1)->AsLiteral()->value()));
3489 VisitForAccumulatorValue(args->at(0));
3491 Label runtime, done, not_date_object;
3492 Register
object = v0;
3493 Register result = v0;
3494 Register scratch0 = t5;
3495 Register scratch1 = a1;
3497 __ JumpIfSmi(
object, ¬_date_object);
3498 __ GetObjectType(
object, scratch1, scratch1);
3501 if (index->value() == 0) {
3506 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3507 __ li(scratch1, Operand(stamp));
3510 __ Branch(&runtime,
ne, scratch1, Operand(scratch0));
3512 kPointerSize * index->value()));
3516 __ PrepareCallCFunction(2, scratch1);
3517 __ li(a1, Operand(index));
3518 __ Move(a0,
object);
3519 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3523 __ bind(¬_date_object);
3524 __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3526 context()->Plug(v0);
3530 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3531 ZoneList<Expression*>* args = expr->arguments();
3534 Register
string = v0;
3535 Register index = a1;
3536 Register value = a2;
3538 VisitForStackValue(args->at(1));
3539 VisitForStackValue(args->at(2));
3540 VisitForAccumulatorValue(args->at(0));
3541 __ Pop(index, value);
3543 if (FLAG_debug_code) {
3544 __ SmiTst(value, at);
3545 __ Check(
eq, kNonSmiValue, at, Operand(zero_reg));
3546 __ SmiTst(index, at);
3547 __ Check(
eq, kNonSmiIndex, at, Operand(zero_reg));
3548 __ SmiUntag(index, index);
3550 Register scratch = t5;
3551 __ EmitSeqStringSetCharCheck(
3552 string, index, value, scratch, one_byte_seq_type);
3553 __ SmiTag(index, index);
3556 __ SmiUntag(value, value);
3561 __ Addu(at, at, index);
3563 context()->Plug(
string);
3567 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3568 ZoneList<Expression*>* args = expr->arguments();
3571 Register
string = v0;
3572 Register index = a1;
3573 Register value = a2;
3575 VisitForStackValue(args->at(1));
3576 VisitForStackValue(args->at(2));
3577 VisitForAccumulatorValue(args->at(0));
3578 __ Pop(index, value);
3580 if (FLAG_debug_code) {
3581 __ SmiTst(value, at);
3582 __ Check(
eq, kNonSmiValue, at, Operand(zero_reg));
3583 __ SmiTst(index, at);
3584 __ Check(
eq, kNonSmiIndex, at, Operand(zero_reg));
3585 __ SmiUntag(index, index);
3587 Register scratch = t5;
3588 __ EmitSeqStringSetCharCheck(
3589 string, index, value, scratch, two_byte_seq_type);
3590 __ SmiTag(index, index);
3593 __ SmiUntag(value, value);
3597 __ Addu(at, at, index);
3600 context()->Plug(
string);
3604 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3606 ZoneList<Expression*>* args = expr->arguments();
3607 ASSERT(args->length() == 2);
3608 VisitForStackValue(args->at(0));
3609 VisitForStackValue(args->at(1));
3612 context()->Plug(v0);
3616 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3617 ZoneList<Expression*>* args = expr->arguments();
3618 ASSERT(args->length() == 2);
3620 VisitForStackValue(args->at(0));
3621 VisitForAccumulatorValue(args->at(1));
3626 __ JumpIfSmi(a1, &done);
3629 __ GetObjectType(a1, a2, a2);
3637 __ RecordWriteField(
3641 context()->Plug(v0);
3645 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3646 ZoneList<Expression*>* args = expr->arguments();
3650 VisitForAccumulatorValue(args->at(0));
3651 __ mov(a0, result_register());
3653 NumberToStringStub stub;
3655 context()->Plug(v0);
3659 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3660 ZoneList<Expression*>* args = expr->arguments();
3661 ASSERT(args->length() == 1);
3663 VisitForAccumulatorValue(args->at(0));
3666 StringCharFromCodeGenerator generator(v0, a1);
3667 generator.GenerateFast(masm_);
3670 NopRuntimeCallHelper call_helper;
3671 generator.GenerateSlow(masm_, call_helper);
3674 context()->Plug(a1);
3678 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3679 ZoneList<Expression*>* args = expr->arguments();
3680 ASSERT(args->length() == 2);
3682 VisitForStackValue(args->at(0));
3683 VisitForAccumulatorValue(args->at(1));
3684 __ mov(a0, result_register());
3686 Register
object = a1;
3687 Register index = a0;
3688 Register result = v0;
3692 Label need_conversion;
3693 Label index_out_of_range;
3695 StringCharCodeAtGenerator generator(
object,
3700 &index_out_of_range,
3702 generator.GenerateFast(masm_);
3705 __ bind(&index_out_of_range);
3708 __ LoadRoot(result, Heap::kNanValueRootIndex);
3711 __ bind(&need_conversion);
3714 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3717 NopRuntimeCallHelper call_helper;
3718 generator.GenerateSlow(masm_, call_helper);
3721 context()->Plug(result);
3725 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3726 ZoneList<Expression*>* args = expr->arguments();
3727 ASSERT(args->length() == 2);
3729 VisitForStackValue(args->at(0));
3730 VisitForAccumulatorValue(args->at(1));
3731 __ mov(a0, result_register());
3733 Register
object = a1;
3734 Register index = a0;
3735 Register scratch = a3;
3736 Register result = v0;
3740 Label need_conversion;
3741 Label index_out_of_range;
3743 StringCharAtGenerator generator(
object,
3749 &index_out_of_range,
3751 generator.GenerateFast(masm_);
3754 __ bind(&index_out_of_range);
3757 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3760 __ bind(&need_conversion);
3766 NopRuntimeCallHelper call_helper;
3767 generator.GenerateSlow(masm_, call_helper);
3770 context()->Plug(result);
3774 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3775 ZoneList<Expression*>* args = expr->arguments();
3777 VisitForStackValue(args->at(0));
3778 VisitForAccumulatorValue(args->at(1));
3781 __ mov(a0, result_register());
3784 context()->Plug(v0);
3788 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3789 ZoneList<Expression*>* args = expr->arguments();
3792 VisitForStackValue(args->at(0));
3793 VisitForStackValue(args->at(1));
3795 StringCompareStub stub;
3797 context()->Plug(v0);
3801 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3803 ZoneList<Expression*>* args = expr->arguments();
3804 ASSERT(args->length() == 1);
3805 VisitForStackValue(args->at(0));
3806 __ CallRuntime(Runtime::kMath_log, 1);
3807 context()->Plug(v0);
3811 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3813 ZoneList<Expression*>* args = expr->arguments();
3814 ASSERT(args->length() == 1);
3815 VisitForStackValue(args->at(0));
3816 __ CallRuntime(Runtime::kMath_sqrt, 1);
3817 context()->Plug(v0);
3821 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3822 ZoneList<Expression*>* args = expr->arguments();
3823 ASSERT(args->length() >= 2);
3825 int arg_count = args->length() - 2;
3826 for (
int i = 0; i < arg_count + 1; i++) {
3827 VisitForStackValue(args->at(i));
3829 VisitForAccumulatorValue(args->last());
3831 Label runtime, done;
3833 __ JumpIfSmi(v0, &runtime);
3834 __ GetObjectType(v0, a1, a1);
3838 __ mov(a1, result_register());
3839 ParameterCount count(arg_count);
3846 __ CallRuntime(Runtime::kCall, args->length());
3849 context()->Plug(v0);
3853 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3854 RegExpConstructResultStub stub;
3855 ZoneList<Expression*>* args = expr->arguments();
3856 ASSERT(args->length() == 3);
3857 VisitForStackValue(args->at(0));
3858 VisitForStackValue(args->at(1));
3859 VisitForAccumulatorValue(args->at(2));
3860 __ mov(a0, result_register());
3864 context()->Plug(v0);
3868 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3869 ZoneList<Expression*>* args = expr->arguments();
3873 int cache_id =
Smi::cast(*(args->at(0)->AsLiteral()->value()))->
value();
3875 Handle<FixedArray> jsfunction_result_caches(
3876 isolate()->native_context()->jsfunction_result_caches());
3877 if (jsfunction_result_caches->length() <= cache_id) {
3878 __ Abort(kAttemptToUseUndefinedCache);
3879 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3880 context()->Plug(v0);
3884 VisitForAccumulatorValue(args->at(1));
3887 Register cache = a1;
3897 Label done, not_found;
3904 __ addu(a3, a3, at);
3907 __ Branch(¬_found,
ne, key, Operand(a2));
3912 __ bind(¬_found);
3914 __ Push(cache, key);
3915 __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3918 context()->Plug(v0);
3922 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3923 ZoneList<Expression*>* args = expr->arguments();
3924 VisitForAccumulatorValue(args->at(0));
3926 Label materialize_true, materialize_false;
3927 Label* if_true =
NULL;
3928 Label* if_false =
NULL;
3929 Label* fall_through =
NULL;
3930 context()->PrepareTest(&materialize_true, &materialize_false,
3931 &if_true, &if_false, &fall_through);
3936 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3937 Split(
eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3939 context()->Plug(if_true, if_false);
3943 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3944 ZoneList<Expression*>* args = expr->arguments();
3945 ASSERT(args->length() == 1);
3946 VisitForAccumulatorValue(args->at(0));
3948 __ AssertString(v0);
3951 __ IndexFromHash(v0, v0);
3953 context()->Plug(v0);
3957 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3958 Label bailout, done, one_char_separator, long_separator,
3959 non_trivial_array, not_size_one_array, loop,
3960 empty_separator_loop, one_char_separator_loop,
3961 one_char_separator_loop_entry, long_separator_loop;
3962 ZoneList<Expression*>* args = expr->arguments();
3963 ASSERT(args->length() == 2);
3964 VisitForStackValue(args->at(1));
3965 VisitForAccumulatorValue(args->at(0));
3968 Register array = v0;
3969 Register elements =
no_reg;
3970 Register result =
no_reg;
3971 Register separator = a1;
3972 Register array_length = a2;
3973 Register result_pos =
no_reg;
3974 Register string_length = a3;
3975 Register
string = t0;
3976 Register element = t1;
3977 Register elements_end = t2;
3978 Register scratch1 = t3;
3979 Register scratch2 = t5;
3980 Register scratch3 = t4;
3986 __ JumpIfSmi(array, &bailout);
3987 __ GetObjectType(array, scratch1, scratch2);
3991 __ CheckFastElements(scratch1, scratch2, &bailout);
3995 __ SmiUntag(array_length);
3996 __ Branch(&non_trivial_array,
ne, array_length, Operand(zero_reg));
3997 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
4000 __ bind(&non_trivial_array);
4009 __ mov(string_length, zero_reg);
4013 __ Addu(elements_end, element, elements_end);
4022 if (generate_debug_code_) {
4023 __ Assert(
gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin,
4024 array_length, Operand(zero_reg));
4028 __ Addu(element, element, kPointerSize);
4029 __ JumpIfSmi(
string, &bailout);
4032 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
4034 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4035 __ BranchOnOverflow(&bailout, scratch3);
4036 __ Branch(&loop,
lt, element, Operand(elements_end));
4039 __ Branch(¬_size_one_array,
ne, array_length, Operand(1));
4043 __ bind(¬_size_one_array);
4052 __ JumpIfSmi(separator, &bailout);
4055 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
4061 __ Subu(string_length, string_length, Operand(scratch1));
4062 __ Mult(array_length, scratch1);
4066 __ Branch(&bailout,
ne, scratch2, Operand(zero_reg));
4068 __ And(scratch3, scratch2, Operand(0x80000000));
4069 __ Branch(&bailout,
ne, scratch3, Operand(zero_reg));
4070 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4071 __ BranchOnOverflow(&bailout, scratch3);
4072 __ SmiUntag(string_length);
4085 __ AllocateAsciiString(result,
4095 __ Addu(elements_end, element, elements_end);
4096 result_pos = array_length;
4105 __ Branch(&one_char_separator,
eq, scratch1, Operand(at));
4106 __ Branch(&long_separator,
gt, scratch1, Operand(at));
4109 __ bind(&empty_separator_loop);
4117 __ Addu(element, element, kPointerSize);
4119 __ SmiUntag(string_length);
4121 __ CopyBytes(
string, result_pos, string_length, scratch1);
4123 __ Branch(&empty_separator_loop,
lt, element, Operand(elements_end));
4128 __ bind(&one_char_separator);
4133 __ jmp(&one_char_separator_loop_entry);
4135 __ bind(&one_char_separator_loop);
4144 __ Addu(result_pos, result_pos, 1);
4147 __ bind(&one_char_separator_loop_entry);
4149 __ Addu(element, element, kPointerSize);
4151 __ SmiUntag(string_length);
4153 __ CopyBytes(
string, result_pos, string_length, scratch1);
4155 __ Branch(&one_char_separator_loop,
lt, element, Operand(elements_end));
4161 __ bind(&long_separator_loop);
4170 __ SmiUntag(string_length);
4174 __ CopyBytes(
string, result_pos, string_length, scratch1);
4176 __ bind(&long_separator);
4178 __ Addu(element, element, kPointerSize);
4180 __ SmiUntag(string_length);
4182 __ CopyBytes(
string, result_pos, string_length, scratch1);
4184 __ Branch(&long_separator_loop,
lt, element, Operand(elements_end));
4189 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4191 context()->Plug(v0);
4195 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4196 if (expr->function() !=
NULL &&
4198 Comment cmnt(masm_,
"[ InlineRuntimeCall");
4199 EmitInlineRuntimeCall(expr);
4203 Comment cmnt(masm_,
"[ CallRuntime");
4204 ZoneList<Expression*>* args = expr->arguments();
4205 int arg_count = args->length();
4207 if (expr->is_jsruntime()) {
4213 __ li(a2, Operand(expr->name()));
4222 int arg_count = args->length();
4223 for (
int i = 0; i < arg_count; i++) {
4224 VisitForStackValue(args->at(i));
4228 SetSourcePosition(expr->position());
4236 context()->DropAndPlug(1, v0);
4239 for (
int i = 0; i < arg_count; i++) {
4240 VisitForStackValue(args->at(i));
4244 __ CallRuntime(expr->function(), arg_count);
4245 context()->Plug(v0);
4250 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4251 switch (expr->op()) {
4252 case Token::DELETE: {
4253 Comment cmnt(masm_,
"[ UnaryOperation (DELETE)");
4254 Property*
property = expr->expression()->AsProperty();
4255 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4257 if (property !=
NULL) {
4258 VisitForStackValue(property->obj());
4259 VisitForStackValue(property->key());
4263 context()->Plug(v0);
4264 }
else if (proxy !=
NULL) {
4265 Variable* var = proxy->var();
4269 if (var->IsUnallocated()) {
4271 __ li(a1, Operand(var->name()));
4273 __ Push(a2, a1, a0);
4275 context()->Plug(v0);
4276 }
else if (var->IsStackAllocated() || var->IsContextSlot()) {
4279 context()->Plug(var->is_this());
4283 ASSERT(!context_register().is(a2));
4284 __ li(a2, Operand(var->name()));
4285 __ Push(context_register(), a2);
4286 __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
4287 context()->Plug(v0);
4292 VisitForEffect(expr->expression());
4293 context()->Plug(
true);
4299 Comment cmnt(masm_,
"[ UnaryOperation (VOID)");
4300 VisitForEffect(expr->expression());
4301 context()->Plug(Heap::kUndefinedValueRootIndex);
4306 Comment cmnt(masm_,
"[ UnaryOperation (NOT)");
4307 if (context()->IsEffect()) {
4310 VisitForEffect(expr->expression());
4311 }
else if (context()->IsTest()) {
4312 const TestContext* test = TestContext::cast(context());
4314 VisitForControl(expr->expression(),
4315 test->false_label(),
4317 test->fall_through());
4318 context()->Plug(test->true_label(), test->false_label());
4324 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4325 Label materialize_true, materialize_false, done;
4326 VisitForControl(expr->expression(),
4330 __ bind(&materialize_true);
4331 PrepareForBailoutForId(expr->MaterializeTrueId(),
NO_REGISTERS);
4332 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4333 if (context()->IsStackValue())
__ push(v0);
4335 __ bind(&materialize_false);
4336 PrepareForBailoutForId(expr->MaterializeFalseId(),
NO_REGISTERS);
4337 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4338 if (context()->IsStackValue())
__ push(v0);
4344 case Token::TYPEOF: {
4345 Comment cmnt(masm_,
"[ UnaryOperation (TYPEOF)");
4346 { StackValueContext context(
this);
4347 VisitForTypeofValue(expr->expression());
4349 __ CallRuntime(Runtime::kTypeof, 1);
4350 context()->Plug(v0);
4360 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4361 ASSERT(expr->expression()->IsValidLeftHandSide());
4363 Comment cmnt(masm_,
"[ CountOperation");
4364 SetSourcePosition(expr->position());
4368 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4369 LhsKind assign_type = VARIABLE;
4370 Property* prop = expr->expression()->AsProperty();
4375 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4379 if (assign_type == VARIABLE) {
4380 ASSERT(expr->expression()->AsVariableProxy()->var() !=
NULL);
4381 AccumulatorValueContext context(
this);
4382 EmitVariableLoad(expr->expression()->AsVariableProxy());
4385 if (expr->is_postfix() && !context()->IsEffect()) {
4389 if (assign_type == NAMED_PROPERTY) {
4391 VisitForAccumulatorValue(prop->obj());
4393 EmitNamedPropertyLoad(prop);
4395 VisitForStackValue(prop->obj());
4396 VisitForAccumulatorValue(prop->key());
4399 EmitKeyedPropertyLoad(prop);
4405 if (assign_type == VARIABLE) {
4406 PrepareForBailout(expr->expression(),
TOS_REG);
4408 PrepareForBailoutForId(prop->LoadId(),
TOS_REG);
4412 Label stub_call, done;
4413 JumpPatchSite patch_site(masm_);
4415 int count_value = expr->op() == Token::INC ? 1 : -1;
4417 if (ShouldInlineSmiCase(expr->op())) {
4419 patch_site.EmitJumpIfNotSmi(v0, &slow);
4422 if (expr->is_postfix()) {
4423 if (!context()->IsEffect()) {
4427 switch (assign_type) {
4431 case NAMED_PROPERTY:
4434 case KEYED_PROPERTY:
4441 Register scratch1 = a1;
4442 Register scratch2 = t0;
4444 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4445 __ BranchOnNoOverflow(&done, scratch2);
4451 ToNumberStub convert_stub;
4452 __ CallStub(&convert_stub);
4455 if (expr->is_postfix()) {
4456 if (!context()->IsEffect()) {
4460 switch (assign_type) {
4464 case NAMED_PROPERTY:
4467 case KEYED_PROPERTY:
4474 __ bind(&stub_call);
4479 SetSourcePosition(expr->position());
4482 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4483 patch_site.EmitPatchInfo();
4487 switch (assign_type) {
4489 if (expr->is_postfix()) {
4490 { EffectContext context(
this);
4491 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4493 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4498 if (!context()->IsEffect()) {
4499 context()->PlugTOS();
4502 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4504 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4505 context()->Plug(v0);
4508 case NAMED_PROPERTY: {
4509 __ mov(a0, result_register());
4510 __ li(a2, Operand(prop->key()->AsLiteral()->value()));
4512 CallStoreIC(expr->CountStoreFeedbackId());
4513 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4514 if (expr->is_postfix()) {
4515 if (!context()->IsEffect()) {
4516 context()->PlugTOS();
4519 context()->Plug(v0);
4523 case KEYED_PROPERTY: {
4524 __ mov(a0, result_register());
4526 Handle<Code> ic = strict_mode() ==
SLOPPY
4527 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4528 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4529 CallIC(ic, expr->CountStoreFeedbackId());
4530 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4531 if (expr->is_postfix()) {
4532 if (!context()->IsEffect()) {
4533 context()->PlugTOS();
4536 context()->Plug(v0);
4544 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4545 ASSERT(!context()->IsEffect());
4546 ASSERT(!context()->IsTest());
4547 VariableProxy* proxy = expr->AsVariableProxy();
4548 if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
4549 Comment cmnt(masm_,
"[ Global variable");
4551 __ li(a2, Operand(proxy->name()));
4555 PrepareForBailout(expr,
TOS_REG);
4556 context()->Plug(v0);
4557 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
4558 Comment cmnt(masm_,
"[ Lookup slot");
4563 EmitDynamicLookupFastCase(proxy->var(),
INSIDE_TYPEOF, &slow, &done);
4566 __ li(a0, Operand(proxy->name()));
4568 __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4569 PrepareForBailout(expr,
TOS_REG);
4572 context()->Plug(v0);
4575 VisitInDuplicateContext(expr);
4579 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4580 Expression* sub_expr,
4581 Handle<String>
check) {
4582 Label materialize_true, materialize_false;
4583 Label* if_true =
NULL;
4584 Label* if_false =
NULL;
4585 Label* fall_through =
NULL;
4586 context()->PrepareTest(&materialize_true, &materialize_false,
4587 &if_true, &if_false, &fall_through);
4589 { AccumulatorValueContext context(
this);
4590 VisitForTypeofValue(sub_expr);
4592 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4594 if (check->Equals(isolate()->heap()->number_string())) {
4595 __ JumpIfSmi(v0, if_true);
4597 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4598 Split(
eq, v0, Operand(at), if_true, if_false, fall_through);
4599 }
else if (check->Equals(isolate()->heap()->string_string())) {
4600 __ JumpIfSmi(v0, if_false);
4602 __ GetObjectType(v0, v0, a1);
4606 Split(
eq, a1, Operand(zero_reg),
4607 if_true, if_false, fall_through);
4608 }
else if (check->Equals(isolate()->heap()->symbol_string())) {
4609 __ JumpIfSmi(v0, if_false);
4610 __ GetObjectType(v0, v0, a1);
4611 Split(
eq, a1, Operand(
SYMBOL_TYPE), if_true, if_false, fall_through);
4612 }
else if (check->Equals(isolate()->heap()->boolean_string())) {
4613 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4614 __ Branch(if_true,
eq, v0, Operand(at));
4615 __ LoadRoot(at, Heap::kFalseValueRootIndex);
4616 Split(
eq, v0, Operand(at), if_true, if_false, fall_through);
4617 }
else if (FLAG_harmony_typeof &&
4618 check->Equals(isolate()->heap()->null_string())) {
4619 __ LoadRoot(at, Heap::kNullValueRootIndex);
4620 Split(
eq, v0, Operand(at), if_true, if_false, fall_through);
4621 }
else if (check->Equals(isolate()->heap()->undefined_string())) {
4622 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4623 __ Branch(if_true,
eq, v0, Operand(at));
4624 __ JumpIfSmi(v0, if_false);
4629 Split(
ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4630 }
else if (check->Equals(isolate()->heap()->function_string())) {
4631 __ JumpIfSmi(v0, if_false);
4633 __ GetObjectType(v0, v0, a1);
4636 if_true, if_false, fall_through);
4637 }
else if (check->Equals(isolate()->heap()->object_string())) {
4638 __ JumpIfSmi(v0, if_false);
4639 if (!FLAG_harmony_typeof) {
4640 __ LoadRoot(at, Heap::kNullValueRootIndex);
4641 __ Branch(if_true,
eq, v0, Operand(at));
4644 __ GetObjectType(v0, v0, a1);
4651 Split(
eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4653 if (if_false != fall_through)
__ jmp(if_false);
4655 context()->Plug(if_true, if_false);
4659 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4660 Comment cmnt(masm_,
"[ CompareOperation");
4661 SetSourcePosition(expr->position());
4665 if (TryLiteralCompare(expr))
return;
4669 Label materialize_true, materialize_false;
4670 Label* if_true =
NULL;
4671 Label* if_false =
NULL;
4672 Label* fall_through =
NULL;
4673 context()->PrepareTest(&materialize_true, &materialize_false,
4674 &if_true, &if_false, &fall_through);
4677 VisitForStackValue(expr->left());
4680 VisitForStackValue(expr->right());
4682 PrepareForBailoutBeforeSplit(expr,
false,
NULL,
NULL);
4683 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
4684 Split(
eq, v0, Operand(t0), if_true, if_false, fall_through);
4687 case Token::INSTANCEOF: {
4688 VisitForStackValue(expr->right());
4691 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4693 Split(
eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
4698 VisitForAccumulatorValue(expr->right());
4700 __ mov(a0, result_register());
4703 bool inline_smi_code = ShouldInlineSmiCase(op);
4704 JumpPatchSite patch_site(masm_);
4705 if (inline_smi_code) {
4707 __ Or(a2, a0, Operand(a1));
4708 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4709 Split(cc, a1, Operand(a0), if_true, if_false,
NULL);
4710 __ bind(&slow_case);
4713 SetSourcePosition(expr->position());
4715 CallIC(ic, expr->CompareOperationFeedbackId());
4716 patch_site.EmitPatchInfo();
4717 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4718 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4724 context()->Plug(if_true, if_false);
4728 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4729 Expression* sub_expr,
4731 Label materialize_true, materialize_false;
4732 Label* if_true =
NULL;
4733 Label* if_false =
NULL;
4734 Label* fall_through =
NULL;
4735 context()->PrepareTest(&materialize_true, &materialize_false,
4736 &if_true, &if_false, &fall_through);
4738 VisitForAccumulatorValue(sub_expr);
4739 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4740 __ mov(a0, result_register());
4741 if (expr->op() == Token::EQ_STRICT) {
4743 Heap::kNullValueRootIndex :
4744 Heap::kUndefinedValueRootIndex;
4745 __ LoadRoot(a1, nil_value);
4746 Split(
eq, a0, Operand(a1), if_true, if_false, fall_through);
4749 CallIC(ic, expr->CompareOperationFeedbackId());
4750 Split(
ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
4752 context()->Plug(if_true, if_false);
4756 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4758 context()->Plug(v0);
4762 Register FullCodeGenerator::result_register() {
4767 Register FullCodeGenerator::context_register() {
4772 void FullCodeGenerator::StoreToFrameField(
int frame_offset, Register value) {
4778 void FullCodeGenerator::LoadContextField(Register dst,
int context_index) {
4783 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4785 if (declaration_scope->is_global_scope() ||
4786 declaration_scope->is_module_scope()) {
4792 }
else if (declaration_scope->is_eval_scope()) {
4798 ASSERT(declaration_scope->is_function_scope());
4808 void FullCodeGenerator::EnterFinallyBlock() {
4809 ASSERT(!result_register().is(a1));
4811 __ push(result_register());
4813 __ Subu(a1, ra, Operand(masm_->CodeObject()));
4816 __ Addu(a1, a1, Operand(a1));
4822 ExternalReference pending_message_obj =
4823 ExternalReference::address_of_pending_message_obj(isolate());
4824 __ li(at, Operand(pending_message_obj));
4828 ExternalReference has_pending_message =
4829 ExternalReference::address_of_has_pending_message(isolate());
4830 __ li(at, Operand(has_pending_message));
4835 ExternalReference pending_message_script =
4836 ExternalReference::address_of_pending_message_script(isolate());
4837 __ li(at, Operand(pending_message_script));
4843 void FullCodeGenerator::ExitFinallyBlock() {
4844 ASSERT(!result_register().is(a1));
4847 ExternalReference pending_message_script =
4848 ExternalReference::address_of_pending_message_script(isolate());
4849 __ li(at, Operand(pending_message_script));
4854 ExternalReference has_pending_message =
4855 ExternalReference::address_of_has_pending_message(isolate());
4856 __ li(at, Operand(has_pending_message));
4860 ExternalReference pending_message_obj =
4861 ExternalReference::address_of_pending_message_obj(isolate());
4862 __ li(at, Operand(pending_message_obj));
4869 __ pop(result_register());
4872 __ Addu(at, a1, Operand(masm_->CodeObject()));
4879 #define __ ACCESS_MASM(masm())
4883 int* context_length) {
4890 __ Drop(*stack_depth);
4891 if (*context_length > 0) {
4897 __ Call(finally_entry_);
4900 *context_length = 0;
4910 BackEdgeState target_state,
4911 Code* replacement_code) {
4913 Address branch_address = pc - 6 * kInstrSize;
4914 CodePatcher patcher(branch_address, 1);
4916 switch (target_state) {
4925 patcher.masm()->slt(at, a3, zero_reg);
4936 patcher.masm()->addiu(at, zero_reg, 1);
4939 Address pc_immediate_load_address = pc - 4 * kInstrSize;
4943 replacement_code->entry());
4945 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4946 unoptimized_code, pc_immediate_load_address, replacement_code);
4952 Code* unoptimized_code,
4955 Address branch_address = pc - 6 * kInstrSize;
4956 Address pc_immediate_load_address = pc - 4 * kInstrSize;
4960 ASSERT(reinterpret_cast<uint32_t>(
4962 reinterpret_cast<uint32_t>(
4963 isolate->builtins()->InterruptCheck()->entry()));
4969 if (reinterpret_cast<uint32_t>(
4971 reinterpret_cast<uint32_t>(
4972 isolate->builtins()->OnStackReplacement()->entry())) {
4976 ASSERT(reinterpret_cast<uint32_t>(
4978 reinterpret_cast<uint32_t>(
4979 isolate->builtins()->OsrAfterStackCheck()->entry()));
4986 #endif // V8_TARGET_ARCH_MIPS
static const int kFunctionOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
static const int kLengthOffset
static const int kHashFieldOffset
static const int kBitFieldOffset
Scope * DeclarationScope()
const intptr_t kSmiTagMask
static const int kForInFastCaseMarker
VariableDeclaration * function() const
static const int kCodeEntryOffset
static const int kValueOffset
static int SlotOffset(int index)
static const int kFingerOffset
static const int kBuiltinsOffset
static bool IsAddImmediate(Instr instr)
static Handle< Code > GetUninitialized(Isolate *isolate)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static const int kEnumCacheOffset
static String * cast(Object *obj)
const uint32_t kTwoByteStringTag
static Smi * FromInt(int value)
static const int kResultValuePropertyOffset
bool IsFastObjectElementsKind(ElementsKind kind)
static TypeFeedbackId None()
static Handle< Code > GetUninitialized(Isolate *isolate, Token::Value op)
static const int kGlobalReceiverOffset
Scope * outer_scope() const
static const int kGeneratorClosed
static const unsigned int kContainsCachedArrayIndexMask
static const int kForInSlowCaseMarker
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
static const int kResultDonePropertyOffset
#define ASSERT(condition)
static const int kContextOffset
const int kPointerSizeLog2
static const int kMaxBackEdgeWeight
static const int kInObjectFieldCount
static bool IsCompileTimeValue(Expression *expression)
#define POINTER_SIZE_ALIGN(value)
static const int kReceiverOffset
MemOperand GlobalObjectOperand()
static const int kCallerFPOffset
static const int kInstanceClassNameOffset
bool IsOptimizable() const
Variable * parameter(int index) const
MemOperand ContextOperand(Register context, int index)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
kInstanceClassNameOffset flag
static const int kLiteralsOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static Condition ComputeCondition(Token::Value op)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
static const int kLengthOffset
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Variable * arguments() const
static const int kFirstOffset
static bool IsValid(intptr_t value)
static BailoutId Declarations()
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
void check(i::Vector< const uint8_t > string)
static const int kStringWrapperSafeForDefaultValueOf
static void MaybeCallEntryHook(MacroAssembler *masm)
bool ShouldSelfOptimize()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kCallerSPOffset
static const int kCacheStampOffset
static const int kDescriptorSize
static const int kPropertiesOffset
static Register from_code(int code)
int num_parameters() const
static const int kMarkerOffset
static const int kExpressionsOffset
static const int kHeaderSize
static const int kElementsOffset
static BailoutId FunctionEntry()
friend class BlockTrampolinePoolScope
bool IsDeclaredVariableMode(VariableMode mode)
friend class NestedStatement
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
static const int kContextOffset
static const int kMapOffset
static const int kValueOffset
static const int kEnumCacheBridgeCacheOffset
void VisitIllegalRedeclaration(AstVisitor *visitor)
static const int kLengthOffset
static Handle< Object > UninitializedSentinel(Isolate *isolate)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
MemOperand FieldMemOperand(Register object, int offset)
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
static const int kContextOffset
static const int kFunctionOffset
static const int kFormalParameterCountOffset
void CopyBytes(uint8_t *target, uint8_t *source)
static const int kGeneratorExecuting
Condition NegateCondition(Condition cond)
static bool ShouldGenerateLog(Isolate *isolate, Expression *type)
#define ASSERT_EQ(v1, v2)
static const int kContinuationOffset
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
static const int kJSReturnSequenceInstructions
static const int kConstructorOffset
const uint32_t kOneByteStringTag
#define ASSERT_NE(v1, v2)
static const int kIsUndetectable
static const int kMaximumClonedProperties
static bool IsBeq(Instr instr)
static const int kInstrSize
static const int kPrototypeOffset
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target)
static const int kValueOffset
bool IsImmutableVariableMode(VariableMode mode)
static const int kNativeContextOffset
void AddNoFrameRange(int from, int to)
static const int kSharedFunctionInfoOffset
static FixedArrayBase * cast(Object *object)
static const int kLocal0Offset
static const int kBitField2Offset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
static const int kExponentOffset
static const int kMaximumClonedLength
static const int kInstanceTypeOffset
static const int kOperandStackOffset
static const int kMantissaOffset