30 #if defined(V8_TARGET_ARCH_MIPS)
56 #define __ ACCESS_MASM(masm_)
69 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
71 info_emitted_ =
false;
76 ASSERT(patch_site_.is_bound() == info_emitted_);
81 void EmitJumpIfNotSmi(Register reg, Label* target) {
82 ASSERT(!patch_site_.is_bound() && !info_emitted_);
84 __ bind(&patch_site_);
87 __ Branch(target,
eq, at, Operand(zero_reg));
92 void EmitJumpIfSmi(Register reg, Label* target) {
94 ASSERT(!patch_site_.is_bound() && !info_emitted_);
95 __ bind(&patch_site_);
98 __ Branch(target,
ne, at, Operand(zero_reg));
101 void EmitPatchInfo() {
102 if (patch_site_.is_bound()) {
103 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
105 __ andi(zero_reg, reg, delta_to_patch_site %
kImm16Mask);
107 info_emitted_ =
true;
115 MacroAssembler* masm_;
137 void FullCodeGenerator::Generate() {
138 CompilationInfo* info = info_;
140 isolate()->factory()->NewFixedArray(
function()->handler_count(),
TENURED);
141 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
143 SetFunctionPosition(
function());
144 Comment cmnt(masm_,
"[ function compiled by full code generator");
149 if (strlen(FLAG_stop_at) > 0 &&
150 info->function()->name()->IsEqualTo(
CStrVector(FLAG_stop_at))) {
159 if (!info->is_classic_mode() || info->is_native()) {
161 __ Branch(&ok,
eq, t1, Operand(zero_reg));
162 int receiver_offset = info->scope()->num_parameters() *
kPointerSize;
163 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
171 FrameScope frame_scope(masm_, StackFrame::MANUAL);
173 int locals_count = info->scope()->num_stack_slots();
176 if (locals_count > 0) {
179 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
184 { Comment cmnt(masm_,
"[ Allocate locals");
185 for (
int i = 0; i < locals_count; i++) {
190 bool function_in_register =
true;
194 if (heap_slots > 0) {
195 Comment cmnt(masm_,
"[ Allocate context");
198 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
199 __ Push(info->scope()->GetScopeInfo());
200 __ CallRuntime(Runtime::kNewGlobalContext, 2);
202 FastNewContextStub stub(heap_slots);
205 __ CallRuntime(Runtime::kNewFunctionContext, 1);
207 function_in_register =
false;
212 int num_parameters = info->scope()->num_parameters();
213 for (
int i = 0; i < num_parameters; i++) {
215 if (var->IsContextSlot()) {
225 __ RecordWriteContextSlot(
231 Variable* arguments = scope()->
arguments();
232 if (arguments !=
NULL) {
234 Comment cmnt(masm_,
"[ Allocate arguments object");
235 if (!function_in_register) {
242 int num_parameters = info->scope()->num_parameters();
254 if (!is_classic_mode()) {
256 }
else if (
function()->has_duplicate_parameters()) {
261 ArgumentsAccessStub stub(type);
264 SetVar(arguments, v0, a1, a2);
268 __ CallRuntime(Runtime::kTraceEnter, 0);
273 if (scope()->HasIllegalRedeclaration()) {
274 Comment cmnt(masm_,
"[ Declarations");
279 { Comment cmnt(masm_,
"[ Declarations");
282 if (scope()->is_function_scope() && scope()->
function() !=
NULL) {
283 VariableDeclaration*
function = scope()->
function();
284 ASSERT(function->proxy()->var()->mode() ==
CONST ||
287 VisitVariableDeclaration(
function);
289 VisitDeclarations(scope()->declarations());
292 { Comment cmnt(masm_,
"[ Stack check");
295 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
296 __ Branch(&ok,
hs,
sp, Operand(t0));
302 { Comment cmnt(masm_,
"[ Body");
303 ASSERT(loop_depth() == 0);
304 VisitStatements(
function()->body());
305 ASSERT(loop_depth() == 0);
311 { Comment cmnt(masm_,
"[ return <undefined>;");
312 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
314 EmitReturnSequence();
318 void FullCodeGenerator::ClearAccumulator() {
320 __ mov(v0, zero_reg);
324 void FullCodeGenerator::EmitProfilingCounterDecrement(
int delta) {
325 __ li(a2, Operand(profiling_counter_));
332 void FullCodeGenerator::EmitProfilingCounterReset() {
333 int reset_value = FLAG_interrupt_budget;
338 if (isolate()->IsDebuggerActive()) {
340 reset_value = FLAG_interrupt_budget >> 4;
342 __ li(a2, Operand(profiling_counter_));
348 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
349 Label* back_edge_target) {
356 Comment cmnt(masm_,
"[ Stack check");
358 if (FLAG_count_based_interrupts) {
360 if (FLAG_weighted_back_edges) {
361 ASSERT(back_edge_target->is_bound());
364 Max(1, distance / kBackEdgeDistanceUnit));
366 EmitProfilingCounterDecrement(weight);
367 __ slt(at, a3, zero_reg);
368 __ beq(at, zero_reg, &ok);
373 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
375 __ beq(at, zero_reg, &ok);
383 RecordStackCheck(stmt->OsrEntryId());
384 if (FLAG_count_based_interrupts) {
385 EmitProfilingCounterReset();
393 PrepareForBailoutForId(stmt->OsrEntryId(),
NO_REGISTERS);
397 void FullCodeGenerator::EmitReturnSequence() {
398 Comment cmnt(masm_,
"[ Return sequence");
399 if (return_label_.is_bound()) {
400 __ Branch(&return_label_);
402 __ bind(&return_label_);
407 __ CallRuntime(Runtime::kTraceExit, 1);
409 if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
413 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
414 }
else if (FLAG_weighted_back_edges) {
417 Max(1, distance / kBackEdgeDistanceUnit));
419 EmitProfilingCounterDecrement(weight);
421 __ Branch(&ok,
ge, a3, Operand(zero_reg));
426 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
432 EmitProfilingCounterReset();
438 Label check_exit_codesize;
439 masm_->
bind(&check_exit_codesize);
451 masm_->Addu(
sp,
sp, Operand(sp_delta));
465 void FullCodeGenerator::EffectContext::Plug(Variable* var)
const {
466 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
470 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var)
const {
471 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
472 codegen()->GetVar(result_register(), var);
476 void FullCodeGenerator::StackValueContext::Plug(Variable* var)
const {
477 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
478 codegen()->GetVar(result_register(), var);
479 __ push(result_register());
483 void FullCodeGenerator::TestContext::Plug(Variable* var)
const {
485 codegen()->GetVar(result_register(), var);
486 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
487 codegen()->DoTest(
this);
495 void FullCodeGenerator::AccumulatorValueContext::Plug(
497 __ LoadRoot(result_register(), index);
501 void FullCodeGenerator::StackValueContext::Plug(
503 __ LoadRoot(result_register(), index);
504 __ push(result_register());
509 codegen()->PrepareForBailoutBeforeSplit(condition(),
513 if (index == Heap::kUndefinedValueRootIndex ||
514 index == Heap::kNullValueRootIndex ||
515 index == Heap::kFalseValueRootIndex) {
516 if (false_label_ != fall_through_)
__ Branch(false_label_);
517 }
else if (index == Heap::kTrueValueRootIndex) {
518 if (true_label_ != fall_through_)
__ Branch(true_label_);
520 __ LoadRoot(result_register(), index);
521 codegen()->DoTest(
this);
526 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit)
const {
530 void FullCodeGenerator::AccumulatorValueContext::Plug(
531 Handle<Object> lit)
const {
532 __ li(result_register(), Operand(lit));
536 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit)
const {
538 __ li(result_register(), Operand(lit));
539 __ push(result_register());
543 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit)
const {
544 codegen()->PrepareForBailoutBeforeSplit(condition(),
548 ASSERT(!lit->IsUndetectableObject());
549 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
550 if (false_label_ != fall_through_)
__ Branch(false_label_);
551 }
else if (lit->IsTrue() || lit->IsJSObject()) {
552 if (true_label_ != fall_through_)
__ Branch(true_label_);
553 }
else if (lit->IsString()) {
555 if (false_label_ != fall_through_)
__ Branch(false_label_);
557 if (true_label_ != fall_through_)
__ Branch(true_label_);
559 }
else if (lit->IsSmi()) {
561 if (false_label_ != fall_through_)
__ Branch(false_label_);
563 if (true_label_ != fall_through_)
__ Branch(true_label_);
567 __ li(result_register(), Operand(lit));
568 codegen()->DoTest(
this);
573 void FullCodeGenerator::EffectContext::DropAndPlug(
int count,
574 Register reg)
const {
580 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
582 Register reg)
const {
585 __ Move(result_register(), reg);
589 void FullCodeGenerator::StackValueContext::DropAndPlug(
int count,
590 Register reg)
const {
592 if (count > 1)
__ Drop(count - 1);
597 void FullCodeGenerator::TestContext::DropAndPlug(
int count,
598 Register reg)
const {
602 __ Move(result_register(), reg);
603 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
604 codegen()->DoTest(
this);
608 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
609 Label* materialize_false)
const {
610 ASSERT(materialize_true == materialize_false);
611 __ bind(materialize_true);
615 void FullCodeGenerator::AccumulatorValueContext::Plug(
616 Label* materialize_true,
617 Label* materialize_false)
const {
619 __ bind(materialize_true);
620 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
622 __ bind(materialize_false);
623 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
628 void FullCodeGenerator::StackValueContext::Plug(
629 Label* materialize_true,
630 Label* materialize_false)
const {
632 __ bind(materialize_true);
633 __ LoadRoot(at, Heap::kTrueValueRootIndex);
636 __ bind(materialize_false);
637 __ LoadRoot(at, Heap::kFalseValueRootIndex);
643 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
644 Label* materialize_false)
const {
645 ASSERT(materialize_true == true_label_);
646 ASSERT(materialize_false == false_label_);
650 void FullCodeGenerator::EffectContext::Plug(
bool flag)
const {
654 void FullCodeGenerator::AccumulatorValueContext::Plug(
bool flag)
const {
656 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
657 __ LoadRoot(result_register(), value_root_index);
661 void FullCodeGenerator::StackValueContext::Plug(
bool flag)
const {
663 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
664 __ LoadRoot(at, value_root_index);
669 void FullCodeGenerator::TestContext::Plug(
bool flag)
const {
670 codegen()->PrepareForBailoutBeforeSplit(condition(),
675 if (true_label_ != fall_through_)
__ Branch(true_label_);
677 if (false_label_ != fall_through_)
__ Branch(false_label_);
682 void FullCodeGenerator::DoTest(Expression* condition,
685 Label* fall_through) {
687 ToBooleanStub stub(result_register());
689 __ mov(at, zero_reg);
693 __ push(result_register());
694 __ CallRuntime(Runtime::kToBool, 1);
695 __ LoadRoot(at, Heap::kFalseValueRootIndex);
697 Split(
ne, v0, Operand(at), if_true, if_false, fall_through);
706 Label* fall_through) {
707 if (if_false == fall_through) {
708 __ Branch(if_true, cc, lhs, rhs);
709 }
else if (if_true == fall_through) {
712 __ Branch(if_true, cc, lhs, rhs);
718 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
719 ASSERT(var->IsStackAllocated());
723 if (var->IsParameter()) {
732 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
733 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
734 if (var->IsContextSlot()) {
736 __ LoadContext(scratch, context_chain_length);
739 return StackOperand(var);
744 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
747 __ lw(dest, location);
751 void FullCodeGenerator::SetVar(Variable* var,
755 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
756 ASSERT(!scratch0.is(src));
757 ASSERT(!scratch0.is(scratch1));
758 ASSERT(!scratch1.is(src));
759 MemOperand location = VarOperand(var, scratch0);
760 __ sw(src, location);
762 if (var->IsContextSlot()) {
763 __ RecordWriteContextSlot(scratch0,
773 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
774 bool should_normalize,
783 if (should_normalize)
__ Branch(&skip);
784 PrepareForBailout(expr,
TOS_REG);
785 if (should_normalize) {
786 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
787 Split(
eq, a0, Operand(t0), if_true, if_false,
NULL);
793 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
796 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
797 if (generate_debug_code_) {
800 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
801 __ Check(
ne,
"Declaration in with context.",
803 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
804 __ Check(
ne,
"Declaration in catch context.",
810 void FullCodeGenerator::VisitVariableDeclaration(
811 VariableDeclaration* declaration) {
815 VariableProxy* proxy = declaration->proxy();
817 Variable* variable = proxy->var();
819 switch (variable->location()) {
821 globals_->
Add(variable->name(),
zone());
822 globals_->
Add(variable->binding_needs_init()
823 ? isolate()->factory()->the_hole_value()
824 : isolate()->factory()->undefined_value(),
831 Comment cmnt(masm_,
"[ VariableDeclaration");
832 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
833 __ sw(t0, StackOperand(variable));
839 Comment cmnt(masm_,
"[ VariableDeclaration");
840 EmitDebugCheckDeclarationContext(variable);
841 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
849 Comment cmnt(masm_,
"[ VariableDeclaration");
850 __ li(a2, Operand(variable->name()));
861 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
862 __ Push(
cp, a2, a1, a0);
865 __ mov(a0, zero_reg);
866 __ Push(
cp, a2, a1, a0);
868 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
875 void FullCodeGenerator::VisitFunctionDeclaration(
876 FunctionDeclaration* declaration) {
877 VariableProxy* proxy = declaration->proxy();
878 Variable* variable = proxy->var();
879 switch (variable->location()) {
881 globals_->
Add(variable->name(),
zone());
882 Handle<SharedFunctionInfo>
function =
885 if (
function.is_null())
return SetStackOverflow();
886 globals_->
Add(
function,
zone());
892 Comment cmnt(masm_,
"[ FunctionDeclaration");
893 VisitForAccumulatorValue(declaration->fun());
894 __ sw(result_register(), StackOperand(variable));
899 Comment cmnt(masm_,
"[ FunctionDeclaration");
900 EmitDebugCheckDeclarationContext(variable);
901 VisitForAccumulatorValue(declaration->fun());
905 __ RecordWriteContextSlot(
cp,
918 Comment cmnt(masm_,
"[ FunctionDeclaration");
919 __ li(a2, Operand(variable->name()));
923 VisitForStackValue(declaration->fun());
924 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
931 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
932 VariableProxy* proxy = declaration->proxy();
933 Variable* variable = proxy->var();
934 Handle<JSModule> instance = declaration->module()->interface()->Instance();
935 ASSERT(!instance.is_null());
937 switch (variable->location()) {
939 Comment cmnt(masm_,
"[ ModuleDeclaration");
940 globals_->
Add(variable->name(),
zone());
941 globals_->
Add(instance,
zone());
942 Visit(declaration->module());
947 Comment cmnt(masm_,
"[ ModuleDeclaration");
948 EmitDebugCheckDeclarationContext(variable);
949 __ li(a1, Operand(instance));
951 Visit(declaration->module());
963 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
964 VariableProxy* proxy = declaration->proxy();
965 Variable* variable = proxy->var();
966 switch (variable->location()) {
972 Comment cmnt(masm_,
"[ ImportDeclaration");
973 EmitDebugCheckDeclarationContext(variable);
986 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
991 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray>
pairs) {
994 __ li(a1, Operand(pairs));
997 __ CallRuntime(Runtime::kDeclareGlobals, 3);
1002 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1003 Comment cmnt(masm_,
"[ SwitchStatement");
1004 Breakable nested_statement(
this, stmt);
1005 SetStatementPosition(stmt);
1008 VisitForStackValue(stmt->tag());
1011 ZoneList<CaseClause*>* clauses = stmt->cases();
1012 CaseClause* default_clause =
NULL;
1016 for (
int i = 0; i < clauses->length(); i++) {
1017 CaseClause* clause = clauses->at(i);
1018 clause->body_target()->Unuse();
1021 if (clause->is_default()) {
1022 default_clause = clause;
1026 Comment cmnt(masm_,
"[ Case comparison");
1027 __ bind(&next_test);
1031 VisitForAccumulatorValue(clause->label());
1032 __ mov(a0, result_register());
1036 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1037 JumpPatchSite patch_site(masm_);
1038 if (inline_smi_code) {
1041 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1043 __ Branch(&next_test,
ne, a1, Operand(a0));
1045 __ Branch(clause->body_target());
1047 __ bind(&slow_case);
1051 SetSourcePosition(clause->position());
1053 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1054 patch_site.EmitPatchInfo();
1056 __ Branch(&next_test,
ne, v0, Operand(zero_reg));
1058 __ Branch(clause->body_target());
1063 __ bind(&next_test);
1065 if (default_clause ==
NULL) {
1066 __ Branch(nested_statement.break_label());
1068 __ Branch(default_clause->body_target());
1072 for (
int i = 0; i < clauses->length(); i++) {
1073 Comment cmnt(masm_,
"[ Case body");
1074 CaseClause* clause = clauses->at(i);
1075 __ bind(clause->body_target());
1076 PrepareForBailoutForId(clause->EntryId(),
NO_REGISTERS);
1077 VisitStatements(clause->statements());
1080 __ bind(nested_statement.break_label());
1085 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1086 Comment cmnt(masm_,
"[ ForInStatement");
1087 SetStatementPosition(stmt);
1090 ForIn loop_statement(
this, stmt);
1091 increment_loop_depth();
1096 VisitForAccumulatorValue(stmt->enumerable());
1097 __ mov(a0, result_register());
1098 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1099 __ Branch(&exit,
eq, a0, Operand(at));
1100 Register null_value = t1;
1101 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1102 __ Branch(&exit,
eq, a0, Operand(null_value));
1103 PrepareForBailoutForId(stmt->PrepareId(),
TOS_REG);
1106 Label convert, done_convert;
1107 __ JumpIfSmi(a0, &convert);
1108 __ GetObjectType(a0, a1, a1);
1114 __ bind(&done_convert);
1120 __ GetObjectType(a0, a1, a1);
1127 __ CheckEnumCache(null_value, &call_runtime);
1133 __ Branch(&use_cache);
1136 __ bind(&call_runtime);
1138 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1145 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1146 __ Branch(&fixed_array,
ne, a2, Operand(at));
1149 Label no_descriptors;
1150 __ bind(&use_cache);
1152 __ EnumLength(a1, v0);
1155 __ LoadInstanceDescriptors(v0, a2);
1163 __ Push(a2, a1, a0);
1166 __ bind(&no_descriptors);
1172 __ bind(&fixed_array);
1174 Handle<JSGlobalPropertyCell> cell =
1175 isolate()->factory()->NewJSGlobalPropertyCell(
1178 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1179 __ LoadHeapObject(a1, cell);
1186 __ GetObjectType(a2, a3, a3);
1189 __ bind(&non_proxy);
1201 __ Branch(loop_statement.break_label(),
hs, a0, Operand(a1));
1207 __ addu(t0, a2, t0);
1219 __ Branch(&update_each,
eq, t0, Operand(a2));
1224 __ Branch(&update_each,
eq, a2, Operand(zero_reg));
1232 __ mov(a3, result_register());
1233 __ Branch(loop_statement.continue_label(),
eq, a3, Operand(zero_reg));
1237 __ bind(&update_each);
1238 __ mov(result_register(), a3);
1240 { EffectContext context(
this);
1241 EmitAssignment(stmt->each());
1245 Visit(stmt->body());
1249 __ bind(loop_statement.continue_label());
1254 EmitStackCheck(stmt, &loop);
1258 __ bind(loop_statement.break_label());
1264 decrement_loop_depth();
1268 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1276 if (!FLAG_always_opt &&
1277 !FLAG_prepare_always_opt &&
1279 scope()->is_function_scope() &&
1280 info->num_literals() == 0) {
1281 FastNewClosureStub stub(info->language_mode());
1282 __ li(a0, Operand(info));
1286 __ li(a0, Operand(info));
1287 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1288 : Heap::kFalseValueRootIndex);
1289 __ Push(
cp, a0, a1);
1290 __ CallRuntime(Runtime::kNewClosure, 3);
1292 context()->Plug(v0);
1296 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1297 Comment cmnt(masm_,
"[ VariableProxy");
1298 EmitVariableLoad(expr);
1302 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1305 Register current =
cp;
1311 if (s->num_heap_slots() > 0) {
1312 if (s->calls_non_strict_eval()) {
1315 __ Branch(slow,
ne, temp, Operand(zero_reg));
1324 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope())
break;
1325 s = s->outer_scope();
1328 if (s->is_eval_scope()) {
1330 if (!current.is(next)) {
1331 __ Move(next, current);
1336 __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
1337 __ Branch(&fast,
eq, temp, Operand(t0));
1340 __ Branch(slow,
ne, temp, Operand(zero_reg));
1348 __ li(a2, Operand(var->name()));
1350 ? RelocInfo::CODE_TARGET
1351 : RelocInfo::CODE_TARGET_CONTEXT;
1352 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1357 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1359 ASSERT(var->IsContextSlot());
1360 Register context =
cp;
1364 for (Scope* s = scope(); s != var->scope(); s = s->
outer_scope()) {
1365 if (s->num_heap_slots() > 0) {
1366 if (s->calls_non_strict_eval()) {
1369 __ Branch(slow,
ne, temp, Operand(zero_reg));
1378 __ Branch(slow,
ne, temp, Operand(zero_reg));
1387 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1397 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1400 Variable* local = var->local_if_not_shadowed();
1401 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1402 if (local->mode() ==
CONST ||
1404 local->mode() ==
LET) {
1405 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1406 __ subu(at, v0, at);
1407 if (local->mode() ==
CONST) {
1408 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1409 __ Movz(v0, a0, at);
1411 __ Branch(done,
ne, at, Operand(zero_reg));
1412 __ li(a0, Operand(var->name()));
1414 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1422 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1424 SetSourcePosition(proxy->position());
1425 Variable* var = proxy->var();
1429 switch (var->location()) {
1431 Comment cmnt(masm_,
"Global variable");
1435 __ li(a2, Operand(var->name()));
1436 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1437 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1438 context()->Plug(v0);
1445 Comment cmnt(masm_, var->IsContextSlot()
1446 ?
"Context variable"
1447 :
"Stack variable");
1448 if (var->binding_needs_init()) {
1472 bool skip_init_check;
1474 skip_init_check =
false;
1477 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1478 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1479 skip_init_check = var->mode() !=
CONST &&
1480 var->initializer_position() < proxy->position();
1483 if (!skip_init_check) {
1486 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1487 __ subu(at, v0, at);
1492 __ Branch(&done,
ne, at, Operand(zero_reg));
1493 __ li(a0, Operand(var->name()));
1495 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1500 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1501 __ Movz(v0, a0, at);
1503 context()->Plug(v0);
1507 context()->Plug(var);
1517 Comment cmnt(masm_,
"Lookup variable");
1518 __ li(a1, Operand(var->name()));
1520 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1522 context()->Plug(v0);
1528 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1529 Comment cmnt(masm_,
"[ RegExpLiteral");
1540 int literal_offset =
1543 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1544 __ Branch(&materialized,
ne, t1, Operand(at));
1549 __ li(a2, Operand(expr->pattern()));
1550 __ li(a1, Operand(expr->flags()));
1551 __ Push(t0, a3, a2, a1);
1552 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1555 __ bind(&materialized);
1557 Label allocated, runtime_allocate;
1558 __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate,
TAG_OBJECT);
1561 __ bind(&runtime_allocate);
1565 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1568 __ bind(&allocated);
1575 context()->Plug(v0);
1579 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1580 if (expression ==
NULL) {
1581 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1584 VisitForStackValue(expression);
1589 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1590 Comment cmnt(masm_,
"[ ObjectLiteral");
1591 Handle<FixedArray> constant_properties = expr->constant_properties();
1595 __ li(a1, Operand(constant_properties));
1596 int flags = expr->fast_elements()
1599 flags |= expr->has_function()
1603 __ Push(a3, a2, a1, a0);
1604 int properties_count = constant_properties->length() / 2;
1605 if (expr->depth() > 1) {
1606 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1609 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1611 FastCloneShallowObjectStub stub(properties_count);
1617 bool result_saved =
false;
1622 expr->CalculateEmitStore(
zone());
1624 AccessorTable accessor_table(
zone());
1625 for (
int i = 0; i < expr->properties()->length(); i++) {
1626 ObjectLiteral::Property*
property = expr->properties()->at(i);
1627 if (property->IsCompileTimeValue())
continue;
1629 Literal* key =
property->key();
1630 Expression* value =
property->value();
1631 if (!result_saved) {
1633 result_saved =
true;
1635 switch (property->kind()) {
1642 if (key->handle()->IsSymbol()) {
1643 if (property->emit_store()) {
1644 VisitForAccumulatorValue(value);
1645 __ mov(a0, result_register());
1646 __ li(a2, Operand(key->handle()));
1648 Handle<Code> ic = is_classic_mode()
1649 ? isolate()->builtins()->StoreIC_Initialize()
1650 : isolate()->builtins()->StoreIC_Initialize_Strict();
1651 CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
1654 VisitForEffect(value);
1663 VisitForStackValue(key);
1664 VisitForStackValue(value);
1665 if (property->emit_store()) {
1668 __ CallRuntime(Runtime::kSetProperty, 4);
1674 accessor_table.lookup(key)->second->getter = value;
1677 accessor_table.lookup(key)->second->setter = value;
1684 for (AccessorTable::Iterator it = accessor_table.begin();
1685 it != accessor_table.end();
1689 VisitForStackValue(it->first);
1690 EmitAccessor(it->second->getter);
1691 EmitAccessor(it->second->setter);
1694 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1697 if (expr->has_function()) {
1701 __ CallRuntime(Runtime::kToFastProperties, 1);
1705 context()->PlugTOS();
1707 context()->Plug(v0);
1712 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1713 Comment cmnt(masm_,
"[ ArrayLiteral");
1715 ZoneList<Expression*>* subexprs = expr->values();
1716 int length = subexprs->length();
1718 Handle<FixedArray> constant_elements = expr->constant_elements();
1719 ASSERT_EQ(2, constant_elements->length());
1722 bool has_fast_elements =
1724 Handle<FixedArrayBase> constant_elements_values(
1727 __ mov(a0, result_register());
1731 __ li(a1, Operand(constant_elements));
1732 __ Push(a3, a2, a1);
1733 if (has_fast_elements && constant_elements_values->map() ==
1734 isolate()->heap()->fixed_cow_array_map()) {
1735 FastCloneShallowArrayStub stub(
1738 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1740 }
else if (expr->depth() > 1) {
1741 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1743 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1746 FLAG_smi_only_arrays);
1750 FastCloneShallowArrayStub stub(mode, length);
1754 bool result_saved =
false;
1758 for (
int i = 0; i < length; i++) {
1759 Expression* subexpr = subexprs->at(i);
1762 if (subexpr->AsLiteral() !=
NULL ||
1767 if (!result_saved) {
1769 result_saved =
true;
1772 VisitForAccumulatorValue(subexpr);
1780 __ RecordWriteField(a1, offset, result_register(), a2,
1788 __ mov(a0, result_register());
1789 StoreArrayLiteralElementStub stub;
1793 PrepareForBailoutForId(expr->GetIdForElement(i),
NO_REGISTERS);
1796 context()->PlugTOS();
1798 context()->Plug(v0);
1803 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1804 Comment cmnt(masm_,
"[ Assignment");
1807 if (!expr->target()->IsValidLeftHandSide()) {
1808 VisitForEffect(expr->target());
1814 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1815 LhsKind assign_type = VARIABLE;
1816 Property*
property = expr->target()->AsProperty();
1817 if (property !=
NULL) {
1818 assign_type = (
property->key()->IsPropertyName())
1824 switch (assign_type) {
1828 case NAMED_PROPERTY:
1829 if (expr->is_compound()) {
1831 VisitForAccumulatorValue(property->obj());
1832 __ push(result_register());
1834 VisitForStackValue(property->obj());
1837 case KEYED_PROPERTY:
1839 if (expr->is_compound()) {
1840 VisitForStackValue(property->obj());
1841 VisitForAccumulatorValue(property->key());
1845 VisitForStackValue(property->obj());
1846 VisitForStackValue(property->key());
1853 if (expr->is_compound()) {
1854 { AccumulatorValueContext context(
this);
1855 switch (assign_type) {
1857 EmitVariableLoad(expr->target()->AsVariableProxy());
1858 PrepareForBailout(expr->target(),
TOS_REG);
1860 case NAMED_PROPERTY:
1861 EmitNamedPropertyLoad(property);
1862 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1864 case KEYED_PROPERTY:
1865 EmitKeyedPropertyLoad(property);
1866 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1873 VisitForAccumulatorValue(expr->value());
1875 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1878 SetSourcePosition(expr->position() + 1);
1879 AccumulatorValueContext context(
this);
1880 if (ShouldInlineSmiCase(op)) {
1881 EmitInlineSmiBinaryOp(expr->binary_operation(),
1887 EmitBinaryOp(expr->binary_operation(), op, mode);
1891 PrepareForBailout(expr->binary_operation(),
TOS_REG);
1893 VisitForAccumulatorValue(expr->value());
1897 SetSourcePosition(expr->position());
1900 switch (assign_type) {
1902 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1904 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
1905 context()->Plug(v0);
1907 case NAMED_PROPERTY:
1908 EmitNamedPropertyAssignment(expr);
1910 case KEYED_PROPERTY:
1911 EmitKeyedPropertyAssignment(expr);
1917 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1918 SetSourcePosition(prop->position());
1919 Literal* key = prop->key()->AsLiteral();
1920 __ mov(a0, result_register());
1921 __ li(a2, Operand(key->handle()));
1923 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1924 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1928 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1929 SetSourcePosition(prop->position());
1930 __ mov(a0, result_register());
1932 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1933 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1937 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1940 Expression* left_expr,
1941 Expression* right_expr) {
1942 Label done, smi_case, stub_call;
1944 Register scratch1 = a2;
1945 Register scratch2 = a3;
1949 Register right = a0;
1951 __ mov(a0, result_register());
1954 __ Or(scratch1, left, Operand(right));
1956 JumpPatchSite patch_site(masm_);
1957 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1959 __ bind(&stub_call);
1960 BinaryOpStub stub(op, mode);
1961 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
1962 expr->BinaryOperationFeedbackId());
1963 patch_site.EmitPatchInfo();
1972 __ Branch(&stub_call);
1973 __ GetLeastBitsFromSmi(scratch1, right, 5);
1974 __ srav(right, left, scratch1);
1978 __ Branch(&stub_call);
1979 __ SmiUntag(scratch1, left);
1980 __ GetLeastBitsFromSmi(scratch2, right, 5);
1981 __ sllv(scratch1, scratch1, scratch2);
1982 __ Addu(scratch2, scratch1, Operand(0x40000000));
1983 __ Branch(&stub_call,
lt, scratch2, Operand(zero_reg));
1984 __ SmiTag(v0, scratch1);
1988 __ Branch(&stub_call);
1989 __ SmiUntag(scratch1, left);
1990 __ GetLeastBitsFromSmi(scratch2, right, 5);
1991 __ srlv(scratch1, scratch1, scratch2);
1992 __ And(scratch2, scratch1, 0xc0000000);
1993 __ Branch(&stub_call,
ne, scratch2, Operand(zero_reg));
1994 __ SmiTag(v0, scratch1);
1998 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
1999 __ BranchOnOverflow(&stub_call, scratch1);
2002 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2003 __ BranchOnOverflow(&stub_call, scratch1);
2006 __ SmiUntag(scratch1, right);
2007 __ Mult(left, scratch1);
2010 __ sra(scratch1, scratch1, 31);
2011 __ Branch(&stub_call,
ne, scratch1, Operand(scratch2));
2013 __ Branch(&done,
ne, v0, Operand(zero_reg));
2014 __ Addu(scratch2, right, left);
2015 __ Branch(&stub_call,
lt, scratch2, Operand(zero_reg));
2017 __ mov(v0, zero_reg);
2021 __ Or(v0, left, Operand(right));
2023 case Token::BIT_AND:
2024 __ And(v0, left, Operand(right));
2026 case Token::BIT_XOR:
2027 __ Xor(v0, left, Operand(right));
2034 context()->Plug(v0);
2038 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2041 __ mov(a0, result_register());
2043 BinaryOpStub stub(op, mode);
2044 JumpPatchSite patch_site(masm_);
2045 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
2046 expr->BinaryOperationFeedbackId());
2047 patch_site.EmitPatchInfo();
2048 context()->Plug(v0);
2052 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2055 if (!expr->IsValidLeftHandSide()) {
2056 VisitForEffect(expr);
2062 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2063 LhsKind assign_type = VARIABLE;
2064 Property* prop = expr->AsProperty();
2066 assign_type = (prop->key()->IsPropertyName())
2071 switch (assign_type) {
2073 Variable* var = expr->AsVariableProxy()->var();
2074 EffectContext context(
this);
2075 EmitVariableAssignment(var, Token::ASSIGN);
2078 case NAMED_PROPERTY: {
2079 __ push(result_register());
2080 VisitForAccumulatorValue(prop->obj());
2081 __ mov(a1, result_register());
2083 __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
2084 Handle<Code> ic = is_classic_mode()
2085 ? isolate()->builtins()->StoreIC_Initialize()
2086 : isolate()->builtins()->StoreIC_Initialize_Strict();
2090 case KEYED_PROPERTY: {
2091 __ push(result_register());
2092 VisitForStackValue(prop->obj());
2093 VisitForAccumulatorValue(prop->key());
2094 __ mov(a1, result_register());
2097 Handle<Code> ic = is_classic_mode()
2098 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2099 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2104 context()->Plug(v0);
2108 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2110 if (var->IsUnallocated()) {
2112 __ mov(a0, result_register());
2113 __ li(a2, Operand(var->name()));
2115 Handle<Code> ic = is_classic_mode()
2116 ? isolate()->builtins()->StoreIC_Initialize()
2117 : isolate()->builtins()->StoreIC_Initialize_Strict();
2118 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2120 }
else if (op == Token::INIT_CONST) {
2122 ASSERT(!var->IsParameter());
2123 if (var->IsStackLocal()) {
2125 __ lw(a1, StackOperand(var));
2126 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2127 __ Branch(&skip,
ne, a1, Operand(t0));
2128 __ sw(result_register(), StackOperand(var));
2131 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2138 __ li(a0, Operand(var->name()));
2140 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2143 }
else if (var->mode() ==
LET && op != Token::INIT_LET) {
2145 if (var->IsLookupSlot()) {
2147 __ li(a1, Operand(var->name()));
2149 __ Push(
cp, a1, a0);
2150 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2152 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2155 __ lw(a3, location);
2156 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2157 __ Branch(&assign,
ne, a3, Operand(t0));
2158 __ li(a3, Operand(var->name()));
2160 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2163 __ sw(result_register(), location);
2164 if (var->IsContextSlot()) {
2166 __ mov(a3, result_register());
2168 __ RecordWriteContextSlot(
2173 }
else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2176 if (var->IsStackAllocated() || var->IsContextSlot()) {
2178 if (generate_debug_code_ && op == Token::INIT_LET) {
2180 __ lw(a2, location);
2181 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2182 __ Check(
eq,
"Let binding re-initialization.", a2, Operand(t0));
2185 __ sw(v0, location);
2186 if (var->IsContextSlot()) {
2189 __ RecordWriteContextSlot(
2193 ASSERT(var->IsLookupSlot());
2195 __ li(a1, Operand(var->name()));
2197 __ Push(
cp, a1, a0);
2198 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2205 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2207 Property* prop = expr->target()->AsProperty();
2212 SetSourcePosition(expr->position());
2213 __ mov(a0, result_register());
2214 __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
2217 Handle<Code> ic = is_classic_mode()
2218 ? isolate()->builtins()->StoreIC_Initialize()
2219 : isolate()->builtins()->StoreIC_Initialize_Strict();
2220 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2222 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2223 context()->Plug(v0);
2227 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2231 SetSourcePosition(expr->position());
2237 __ mov(a0, result_register());
2241 Handle<Code> ic = is_classic_mode()
2242 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2243 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2244 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2246 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2247 context()->Plug(v0);
2251 void FullCodeGenerator::VisitProperty(Property* expr) {
2252 Comment cmnt(masm_,
"[ Property");
2253 Expression* key = expr->key();
2255 if (key->IsPropertyName()) {
2256 VisitForAccumulatorValue(expr->obj());
2257 EmitNamedPropertyLoad(expr);
2258 PrepareForBailoutForId(expr->LoadId(),
TOS_REG);
2259 context()->Plug(v0);
2261 VisitForStackValue(expr->obj());
2262 VisitForAccumulatorValue(expr->key());
2264 EmitKeyedPropertyLoad(expr);
2265 context()->Plug(v0);
2270 void FullCodeGenerator::CallIC(Handle<Code>
code,
2271 RelocInfo::Mode rmode,
2272 TypeFeedbackId
id) {
2274 __ Call(code, rmode,
id);
2278 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2279 Handle<Object> name,
2280 RelocInfo::Mode mode) {
2282 ZoneList<Expression*>* args = expr->arguments();
2283 int arg_count = args->length();
2284 { PreservePositionScope scope(masm()->positions_recorder());
2285 for (
int i = 0; i < arg_count; i++) {
2286 VisitForStackValue(args->at(i));
2288 __ li(a2, Operand(name));
2291 SetSourcePosition(expr->position());
2294 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2295 CallIC(ic, mode, expr->CallFeedbackId());
2296 RecordJSReturnSite(expr);
2299 context()->Plug(v0);
2303 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2306 VisitForAccumulatorValue(key);
2315 ZoneList<Expression*>* args = expr->arguments();
2316 int arg_count = args->length();
2317 { PreservePositionScope scope(masm()->positions_recorder());
2318 for (
int i = 0; i < arg_count; i++) {
2319 VisitForStackValue(args->at(i));
2323 SetSourcePosition(expr->position());
2326 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2328 CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
2329 RecordJSReturnSite(expr);
2332 context()->DropAndPlug(1, v0);
2338 ZoneList<Expression*>* args = expr->arguments();
2339 int arg_count = args->length();
2340 { PreservePositionScope scope(masm()->positions_recorder());
2341 for (
int i = 0; i < arg_count; i++) {
2342 VisitForStackValue(args->at(i));
2346 SetSourcePosition(expr->position());
2350 Handle<Object> uninitialized =
2352 Handle<JSGlobalPropertyCell> cell =
2353 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2354 RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2355 __ li(a2, Operand(cell));
2357 CallFunctionStub stub(arg_count, flags);
2360 RecordJSReturnSite(expr);
2363 context()->DropAndPlug(1, v0);
2367 void FullCodeGenerator::EmitResolvePossiblyDirectEval(
int arg_count) {
2369 if (arg_count > 0) {
2372 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2389 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2393 void FullCodeGenerator::VisitCall(Call* expr) {
2397 expr->return_is_recorded_ =
false;
2400 Comment cmnt(masm_,
"[ Call");
2401 Expression* callee = expr->expression();
2402 VariableProxy* proxy = callee->AsVariableProxy();
2403 Property*
property = callee->AsProperty();
2405 if (proxy !=
NULL && proxy->var()->is_possibly_eval()) {
2410 ZoneList<Expression*>* args = expr->arguments();
2411 int arg_count = args->length();
2413 { PreservePositionScope pos_scope(masm()->positions_recorder());
2414 VisitForStackValue(callee);
2415 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2419 for (
int i = 0; i < arg_count; i++) {
2420 VisitForStackValue(args->at(i));
2427 EmitResolvePossiblyDirectEval(arg_count);
2435 SetSourcePosition(expr->position());
2439 RecordJSReturnSite(expr);
2442 context()->DropAndPlug(1, v0);
2443 }
else if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
2447 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2448 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
2452 { PreservePositionScope scope(masm()->positions_recorder());
2461 __ push(context_register());
2462 __ li(a2, Operand(proxy->name()));
2464 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2470 if (done.is_linked()) {
2478 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
2487 }
else if (property !=
NULL) {
2488 { PreservePositionScope scope(masm()->positions_recorder());
2489 VisitForStackValue(property->obj());
2491 if (property->key()->IsPropertyName()) {
2492 EmitCallWithIC(expr,
2493 property->key()->AsLiteral()->handle(),
2494 RelocInfo::CODE_TARGET);
2496 EmitKeyedCallWithIC(expr, property->key());
2500 { PreservePositionScope scope(masm()->positions_recorder());
2501 VisitForStackValue(callee);
2513 ASSERT(expr->return_is_recorded_);
2518 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2519 Comment cmnt(masm_,
"[ CallNew");
2527 VisitForStackValue(expr->expression());
2530 ZoneList<Expression*>* args = expr->arguments();
2531 int arg_count = args->length();
2532 for (
int i = 0; i < arg_count; i++) {
2533 VisitForStackValue(args->at(i));
2538 SetSourcePosition(expr->position());
2541 __ li(a0, Operand(arg_count));
2545 Handle<Object> uninitialized =
2547 Handle<JSGlobalPropertyCell> cell =
2548 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2549 RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2550 __ li(a2, Operand(cell));
2553 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2554 PrepareForBailoutForId(expr->ReturnId(),
TOS_REG);
2555 context()->Plug(v0);
2559 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2560 ZoneList<Expression*>* args = expr->arguments();
2561 ASSERT(args->length() == 1);
2563 VisitForAccumulatorValue(args->at(0));
2565 Label materialize_true, materialize_false;
2566 Label* if_true =
NULL;
2567 Label* if_false =
NULL;
2568 Label* fall_through =
NULL;
2569 context()->PrepareTest(&materialize_true, &materialize_false,
2570 &if_true, &if_false, &fall_through);
2572 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2574 Split(
eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
2576 context()->Plug(if_true, if_false);
2580 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2581 ZoneList<Expression*>* args = expr->arguments();
2582 ASSERT(args->length() == 1);
2584 VisitForAccumulatorValue(args->at(0));
2586 Label materialize_true, materialize_false;
2587 Label* if_true =
NULL;
2588 Label* if_false =
NULL;
2589 Label* fall_through =
NULL;
2590 context()->PrepareTest(&materialize_true, &materialize_false,
2591 &if_true, &if_false, &fall_through);
2593 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2595 Split(
eq, at, Operand(zero_reg), if_true, if_false, fall_through);
2597 context()->Plug(if_true, if_false);
2601 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2602 ZoneList<Expression*>* args = expr->arguments();
2603 ASSERT(args->length() == 1);
2605 VisitForAccumulatorValue(args->at(0));
2607 Label materialize_true, materialize_false;
2608 Label* if_true =
NULL;
2609 Label* if_false =
NULL;
2610 Label* fall_through =
NULL;
2611 context()->PrepareTest(&materialize_true, &materialize_false,
2612 &if_true, &if_false, &fall_through);
2614 __ JumpIfSmi(v0, if_false);
2615 __ LoadRoot(at, Heap::kNullValueRootIndex);
2616 __ Branch(if_true,
eq, v0, Operand(at));
2621 __ Branch(if_false,
ne, at, Operand(zero_reg));
2624 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2626 if_true, if_false, fall_through);
2628 context()->Plug(if_true, if_false);
2632 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2633 ZoneList<Expression*>* args = expr->arguments();
2634 ASSERT(args->length() == 1);
2636 VisitForAccumulatorValue(args->at(0));
2638 Label materialize_true, materialize_false;
2639 Label* if_true =
NULL;
2640 Label* if_false =
NULL;
2641 Label* fall_through =
NULL;
2642 context()->PrepareTest(&materialize_true, &materialize_false,
2643 &if_true, &if_false, &fall_through);
2645 __ JumpIfSmi(v0, if_false);
2646 __ GetObjectType(v0, a1, a1);
2647 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2649 if_true, if_false, fall_through);
2651 context()->Plug(if_true, if_false);
2655 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2656 ZoneList<Expression*>* args = expr->arguments();
2657 ASSERT(args->length() == 1);
2659 VisitForAccumulatorValue(args->at(0));
2661 Label materialize_true, materialize_false;
2662 Label* if_true =
NULL;
2663 Label* if_false =
NULL;
2664 Label* fall_through =
NULL;
2665 context()->PrepareTest(&materialize_true, &materialize_false,
2666 &if_true, &if_false, &fall_through);
2668 __ JumpIfSmi(v0, if_false);
2672 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2673 Split(
ne, at, Operand(zero_reg), if_true, if_false, fall_through);
2675 context()->Plug(if_true, if_false);
2679 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2680 CallRuntime* expr) {
2681 ZoneList<Expression*>* args = expr->arguments();
2682 ASSERT(args->length() == 1);
2684 VisitForAccumulatorValue(args->at(0));
2686 Label materialize_true, materialize_false;
2687 Label* if_true =
NULL;
2688 Label* if_false =
NULL;
2689 Label* fall_through =
NULL;
2690 context()->PrepareTest(&materialize_true, &materialize_false,
2691 &if_true, &if_false, &fall_through);
2693 __ AssertNotSmi(v0);
2698 __ Branch(if_true,
ne, t0, Operand(zero_reg));
2703 __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
2704 __ Branch(if_false,
eq, a2, Operand(t0));
2709 Label entry, loop, done;
2712 __ NumberOfOwnDescriptors(a3, a1);
2713 __ Branch(&done,
eq, a3, Operand(zero_reg));
2715 __ LoadInstanceDescriptors(a1, t0);
2728 __ Addu(a2, a2, t1);
2734 __ LoadRoot(t2, Heap::kvalue_of_symbolRootIndex);
2738 __ Branch(if_false,
eq, a3, Operand(t2));
2741 __ Branch(&loop,
ne, t0, Operand(a2));
2747 __ JumpIfSmi(a2, if_false);
2752 __ Branch(if_false,
ne, a2, Operand(a3));
2761 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2762 context()->Plug(if_true, if_false);
2766 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2767 ZoneList<Expression*>* args = expr->arguments();
2768 ASSERT(args->length() == 1);
2770 VisitForAccumulatorValue(args->at(0));
2772 Label materialize_true, materialize_false;
2773 Label* if_true =
NULL;
2774 Label* if_false =
NULL;
2775 Label* fall_through =
NULL;
2776 context()->PrepareTest(&materialize_true, &materialize_false,
2777 &if_true, &if_false, &fall_through);
2779 __ JumpIfSmi(v0, if_false);
2780 __ GetObjectType(v0, a1, a2);
2781 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2783 __ Branch(if_false);
2785 context()->Plug(if_true, if_false);
2789 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2790 ZoneList<Expression*>* args = expr->arguments();
2791 ASSERT(args->length() == 1);
2793 VisitForAccumulatorValue(args->at(0));
2795 Label materialize_true, materialize_false;
2796 Label* if_true =
NULL;
2797 Label* if_false =
NULL;
2798 Label* fall_through =
NULL;
2799 context()->PrepareTest(&materialize_true, &materialize_false,
2800 &if_true, &if_false, &fall_through);
2802 __ JumpIfSmi(v0, if_false);
2803 __ GetObjectType(v0, a1, a1);
2804 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2806 if_true, if_false, fall_through);
2808 context()->Plug(if_true, if_false);
2812 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2813 ZoneList<Expression*>* args = expr->arguments();
2814 ASSERT(args->length() == 1);
2816 VisitForAccumulatorValue(args->at(0));
2818 Label materialize_true, materialize_false;
2819 Label* if_true =
NULL;
2820 Label* if_false =
NULL;
2821 Label* fall_through =
NULL;
2822 context()->PrepareTest(&materialize_true, &materialize_false,
2823 &if_true, &if_false, &fall_through);
2825 __ JumpIfSmi(v0, if_false);
2826 __ GetObjectType(v0, a1, a1);
2827 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2830 context()->Plug(if_true, if_false);
2834 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2835 ASSERT(expr->arguments()->length() == 0);
2837 Label materialize_true, materialize_false;
2838 Label* if_true =
NULL;
2839 Label* if_false =
NULL;
2840 Label* fall_through =
NULL;
2841 context()->PrepareTest(&materialize_true, &materialize_false,
2842 &if_true, &if_false, &fall_through);
2848 Label check_frame_marker;
2850 __ Branch(&check_frame_marker,
ne,
2855 __ bind(&check_frame_marker);
2857 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2859 if_true, if_false, fall_through);
2861 context()->Plug(if_true, if_false);
2865 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2866 ZoneList<Expression*>* args = expr->arguments();
2867 ASSERT(args->length() == 2);
2870 VisitForStackValue(args->at(0));
2871 VisitForAccumulatorValue(args->at(1));
2873 Label materialize_true, materialize_false;
2874 Label* if_true =
NULL;
2875 Label* if_false =
NULL;
2876 Label* fall_through =
NULL;
2877 context()->PrepareTest(&materialize_true, &materialize_false,
2878 &if_true, &if_false, &fall_through);
2881 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2882 Split(
eq, v0, Operand(a1), if_true, if_false, fall_through);
2884 context()->Plug(if_true, if_false);
2888 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2889 ZoneList<Expression*>* args = expr->arguments();
2890 ASSERT(args->length() == 1);
2894 VisitForAccumulatorValue(args->at(0));
2899 context()->Plug(v0);
2903 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2904 ASSERT(expr->arguments()->length() == 0);
2912 __ Branch(&exit,
ne, a3,
2920 context()->Plug(v0);
2924 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2925 ZoneList<Expression*>* args = expr->arguments();
2926 ASSERT(args->length() == 1);
2927 Label done, null,
function, non_function_constructor;
2929 VisitForAccumulatorValue(args->at(0));
2932 __ JumpIfSmi(v0, &null);
2939 __ GetObjectType(v0, v0, a1);
2954 __ GetObjectType(v0, a1, a1);
2965 __ LoadRoot(v0, Heap::kfunction_class_symbolRootIndex);
2969 __ bind(&non_function_constructor);
2970 __ LoadRoot(v0, Heap::kObject_symbolRootIndex);
2975 __ LoadRoot(v0, Heap::kNullValueRootIndex);
2980 context()->Plug(v0);
2984 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2992 ZoneList<Expression*>* args = expr->arguments();
2995 VisitForStackValue(args->at(1));
2996 VisitForStackValue(args->at(2));
2997 __ CallRuntime(Runtime::kLog, 2);
3001 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3002 context()->Plug(v0);
3006 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
3007 ASSERT(expr->arguments()->length() == 0);
3008 Label slow_allocate_heapnumber;
3009 Label heapnumber_allocated;
3013 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3014 __ AllocateHeapNumber(
s0, a1, a2, t6, &slow_allocate_heapnumber);
3015 __ jmp(&heapnumber_allocated);
3017 __ bind(&slow_allocate_heapnumber);
3020 __ CallRuntime(Runtime::kNumberAlloc, 0);
3023 __ bind(&heapnumber_allocated);
3029 __ PrepareCallCFunction(1, a0);
3032 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3034 CpuFeatures::Scope scope(
FPU);
3036 __ li(a1, Operand(0x41300000));
3038 __ Move(
f12, v0, a1);
3040 __ Move(
f14, zero_reg, a1);
3046 __ PrepareCallCFunction(2, a0);
3051 ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
3054 context()->Plug(v0);
3058 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3061 ZoneList<Expression*>* args = expr->arguments();
3062 ASSERT(args->length() == 3);
3063 VisitForStackValue(args->at(0));
3064 VisitForStackValue(args->at(1));
3065 VisitForStackValue(args->at(2));
3067 context()->Plug(v0);
3071 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3073 RegExpExecStub stub;
3074 ZoneList<Expression*>* args = expr->arguments();
3075 ASSERT(args->length() == 4);
3076 VisitForStackValue(args->at(0));
3077 VisitForStackValue(args->at(1));
3078 VisitForStackValue(args->at(2));
3079 VisitForStackValue(args->at(3));
3081 context()->Plug(v0);
3085 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3086 ZoneList<Expression*>* args = expr->arguments();
3087 ASSERT(args->length() == 1);
3089 VisitForAccumulatorValue(args->at(0));
3093 __ JumpIfSmi(v0, &done);
3095 __ GetObjectType(v0, a1, a1);
3101 context()->Plug(v0);
3105 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3106 ZoneList<Expression*>* args = expr->arguments();
3107 ASSERT(args->length() == 2);
3109 Smi* index =
Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3111 VisitForAccumulatorValue(args->at(0));
3113 Label runtime, done, not_date_object;
3114 Register
object = v0;
3115 Register result = v0;
3116 Register scratch0 = t5;
3117 Register scratch1 = a1;
3119 __ JumpIfSmi(
object, ¬_date_object);
3120 __ GetObjectType(
object, scratch1, scratch1);
3123 if (index->value() == 0) {
3128 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3129 __ li(scratch1, Operand(stamp));
3132 __ Branch(&runtime,
ne, scratch1, Operand(scratch0));
3134 kPointerSize * index->value()));
3138 __ PrepareCallCFunction(2, scratch1);
3139 __ li(a1, Operand(index));
3140 __ Move(a0,
object);
3141 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3145 __ bind(¬_date_object);
3146 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3148 context()->Plug(v0);
3152 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3154 ZoneList<Expression*>* args = expr->arguments();
3155 ASSERT(args->length() == 2);
3156 VisitForStackValue(args->at(0));
3157 VisitForStackValue(args->at(1));
3162 __ CallRuntime(Runtime::kMath_pow, 2);
3164 context()->Plug(v0);
3168 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3169 ZoneList<Expression*>* args = expr->arguments();
3170 ASSERT(args->length() == 2);
3172 VisitForStackValue(args->at(0));
3173 VisitForAccumulatorValue(args->at(1));
3178 __ JumpIfSmi(a1, &done);
3181 __ GetObjectType(a1, a2, a2);
3189 __ RecordWriteField(
3193 context()->Plug(v0);
3197 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3198 ZoneList<Expression*>* args = expr->arguments();
3202 VisitForStackValue(args->at(0));
3204 NumberToStringStub stub;
3206 context()->Plug(v0);
3210 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3211 ZoneList<Expression*>* args = expr->arguments();
3212 ASSERT(args->length() == 1);
3214 VisitForAccumulatorValue(args->at(0));
3217 StringCharFromCodeGenerator
generator(v0, a1);
3221 NopRuntimeCallHelper call_helper;
3222 generator.GenerateSlow(masm_, call_helper);
3225 context()->Plug(a1);
3229 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3230 ZoneList<Expression*>* args = expr->arguments();
3231 ASSERT(args->length() == 2);
3233 VisitForStackValue(args->at(0));
3234 VisitForAccumulatorValue(args->at(1));
3235 __ mov(a0, result_register());
3237 Register
object = a1;
3238 Register index = a0;
3239 Register result = v0;
3243 Label need_conversion;
3244 Label index_out_of_range;
3246 StringCharCodeAtGenerator
generator(
object,
3251 &index_out_of_range,
3256 __ bind(&index_out_of_range);
3259 __ LoadRoot(result, Heap::kNanValueRootIndex);
3262 __ bind(&need_conversion);
3265 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3268 NopRuntimeCallHelper call_helper;
3269 generator.GenerateSlow(masm_, call_helper);
3272 context()->Plug(result);
3276 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3277 ZoneList<Expression*>* args = expr->arguments();
3278 ASSERT(args->length() == 2);
3280 VisitForStackValue(args->at(0));
3281 VisitForAccumulatorValue(args->at(1));
3282 __ mov(a0, result_register());
3284 Register
object = a1;
3285 Register index = a0;
3286 Register scratch = a3;
3287 Register result = v0;
3291 Label need_conversion;
3292 Label index_out_of_range;
3300 &index_out_of_range,
3305 __ bind(&index_out_of_range);
3308 __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3311 __ bind(&need_conversion);
3317 NopRuntimeCallHelper call_helper;
3318 generator.GenerateSlow(masm_, call_helper);
3321 context()->Plug(result);
3325 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3326 ZoneList<Expression*>* args = expr->arguments();
3328 VisitForStackValue(args->at(0));
3329 VisitForStackValue(args->at(1));
3333 context()->Plug(v0);
3337 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3338 ZoneList<Expression*>* args = expr->arguments();
3341 VisitForStackValue(args->at(0));
3342 VisitForStackValue(args->at(1));
3344 StringCompareStub stub;
3346 context()->Plug(v0);
3350 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3354 ZoneList<Expression*>* args = expr->arguments();
3355 ASSERT(args->length() == 1);
3356 VisitForStackValue(args->at(0));
3357 __ mov(a0, result_register());
3359 context()->Plug(v0);
3363 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3367 ZoneList<Expression*>* args = expr->arguments();
3368 ASSERT(args->length() == 1);
3369 VisitForStackValue(args->at(0));
3370 __ mov(a0, result_register());
3372 context()->Plug(v0);
3376 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3380 ZoneList<Expression*>* args = expr->arguments();
3381 ASSERT(args->length() == 1);
3382 VisitForStackValue(args->at(0));
3383 __ mov(a0, result_register());
3385 context()->Plug(v0);
3389 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3393 ZoneList<Expression*>* args = expr->arguments();
3394 ASSERT(args->length() == 1);
3395 VisitForStackValue(args->at(0));
3396 __ mov(a0, result_register());
3398 context()->Plug(v0);
3402 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3404 ZoneList<Expression*>* args = expr->arguments();
3405 ASSERT(args->length() == 1);
3406 VisitForStackValue(args->at(0));
3407 __ CallRuntime(Runtime::kMath_sqrt, 1);
3408 context()->Plug(v0);
3412 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3413 ZoneList<Expression*>* args = expr->arguments();
3414 ASSERT(args->length() >= 2);
3416 int arg_count = args->length() - 2;
3417 for (
int i = 0; i < arg_count + 1; i++) {
3418 VisitForStackValue(args->at(i));
3420 VisitForAccumulatorValue(args->last());
3422 Label runtime, done;
3424 __ JumpIfSmi(v0, &runtime);
3425 __ GetObjectType(v0, a1, a1);
3429 __ mov(a1, result_register());
3430 ParameterCount count(arg_count);
3438 __ CallRuntime(Runtime::kCall, args->length());
3441 context()->Plug(v0);
3445 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3446 RegExpConstructResultStub stub;
3447 ZoneList<Expression*>* args = expr->arguments();
3448 ASSERT(args->length() == 3);
3449 VisitForStackValue(args->at(0));
3450 VisitForStackValue(args->at(1));
3451 VisitForStackValue(args->at(2));
3453 context()->Plug(v0);
3457 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3458 ZoneList<Expression*>* args = expr->arguments();
3462 int cache_id =
Smi::cast(*(args->at(0)->AsLiteral()->handle()))->
value();
3464 Handle<FixedArray> jsfunction_result_caches(
3465 isolate()->native_context()->jsfunction_result_caches());
3466 if (jsfunction_result_caches->length() <= cache_id) {
3467 __ Abort(
"Attempt to use undefined cache.");
3468 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3469 context()->Plug(v0);
3473 VisitForAccumulatorValue(args->at(1));
3476 Register cache = a1;
3486 Label done, not_found;
3493 __ addu(a3, a3, at);
3496 __ Branch(¬_found,
ne, key, Operand(a2));
3501 __ bind(¬_found);
3503 __ Push(cache, key);
3504 __ CallRuntime(Runtime::kGetFromCache, 2);
3507 context()->Plug(v0);
3511 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3512 ZoneList<Expression*>* args = expr->arguments();
3515 Register right = v0;
3520 VisitForStackValue(args->at(0));
3521 VisitForAccumulatorValue(args->at(1));
3524 Label done, fail, ok;
3525 __ Branch(&ok,
eq, left, Operand(right));
3527 __ And(tmp, left, Operand(right));
3528 __ JumpIfSmi(tmp, &fail);
3533 __ Branch(&fail,
ne, tmp, Operand(tmp2));
3536 __ Branch(&ok,
eq, tmp, Operand(tmp2));
3538 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3541 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3544 context()->Plug(v0);
3548 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3549 ZoneList<Expression*>* args = expr->arguments();
3550 VisitForAccumulatorValue(args->at(0));
3552 Label materialize_true, materialize_false;
3553 Label* if_true =
NULL;
3554 Label* if_false =
NULL;
3555 Label* fall_through =
NULL;
3556 context()->PrepareTest(&materialize_true, &materialize_false,
3557 &if_true, &if_false, &fall_through);
3562 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3563 Split(
eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3565 context()->Plug(if_true, if_false);
3569 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3570 ZoneList<Expression*>* args = expr->arguments();
3571 ASSERT(args->length() == 1);
3572 VisitForAccumulatorValue(args->at(0));
3574 __ AssertString(v0);
3577 __ IndexFromHash(v0, v0);
3579 context()->Plug(v0);
3583 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3584 Label bailout, done, one_char_separator, long_separator,
3585 non_trivial_array, not_size_one_array, loop,
3586 empty_separator_loop, one_char_separator_loop,
3587 one_char_separator_loop_entry, long_separator_loop;
3588 ZoneList<Expression*>* args = expr->arguments();
3589 ASSERT(args->length() == 2);
3590 VisitForStackValue(args->at(1));
3591 VisitForAccumulatorValue(args->at(0));
3594 Register array = v0;
3595 Register elements =
no_reg;
3596 Register result =
no_reg;
3597 Register separator = a1;
3598 Register array_length = a2;
3599 Register result_pos =
no_reg;
3600 Register string_length = a3;
3601 Register
string = t0;
3602 Register element = t1;
3603 Register elements_end = t2;
3604 Register scratch1 = t3;
3605 Register scratch2 = t5;
3606 Register scratch3 = t4;
3612 __ JumpIfSmi(array, &bailout);
3613 __ GetObjectType(array, scratch1, scratch2);
3617 __ CheckFastElements(scratch1, scratch2, &bailout);
3621 __ SmiUntag(array_length);
3622 __ Branch(&non_trivial_array,
ne, array_length, Operand(zero_reg));
3623 __ LoadRoot(v0, Heap::kEmptyStringRootIndex);
3626 __ bind(&non_trivial_array);
3635 __ mov(string_length, zero_reg);
3639 __ Addu(elements_end, element, elements_end);
3648 if (generate_debug_code_) {
3649 __ Assert(
gt,
"No empty arrays here in EmitFastAsciiArrayJoin",
3650 array_length, Operand(zero_reg));
3654 __ Addu(element, element, kPointerSize);
3655 __ JumpIfSmi(
string, &bailout);
3658 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3660 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
3661 __ BranchOnOverflow(&bailout, scratch3);
3662 __ Branch(&loop,
lt, element, Operand(elements_end));
3665 __ Branch(¬_size_one_array,
ne, array_length, Operand(1));
3669 __ bind(¬_size_one_array);
3678 __ JumpIfSmi(separator, &bailout);
3681 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3687 __ Subu(string_length, string_length, Operand(scratch1));
3688 __ Mult(array_length, scratch1);
3692 __ Branch(&bailout,
ne, scratch2, Operand(zero_reg));
3694 __ And(scratch3, scratch2, Operand(0x80000000));
3695 __ Branch(&bailout,
ne, scratch3, Operand(zero_reg));
3696 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
3697 __ BranchOnOverflow(&bailout, scratch3);
3698 __ SmiUntag(string_length);
3711 __ AllocateAsciiString(result,
3721 __ Addu(elements_end, element, elements_end);
3722 result_pos = array_length;
3731 __ Branch(&one_char_separator,
eq, scratch1, Operand(at));
3732 __ Branch(&long_separator,
gt, scratch1, Operand(at));
3735 __ bind(&empty_separator_loop);
3743 __ Addu(element, element, kPointerSize);
3745 __ SmiUntag(string_length);
3747 __ CopyBytes(
string, result_pos, string_length, scratch1);
3749 __ Branch(&empty_separator_loop,
lt, element, Operand(elements_end));
3754 __ bind(&one_char_separator);
3759 __ jmp(&one_char_separator_loop_entry);
3761 __ bind(&one_char_separator_loop);
3770 __ Addu(result_pos, result_pos, 1);
3773 __ bind(&one_char_separator_loop_entry);
3775 __ Addu(element, element, kPointerSize);
3777 __ SmiUntag(string_length);
3779 __ CopyBytes(
string, result_pos, string_length, scratch1);
3781 __ Branch(&one_char_separator_loop,
lt, element, Operand(elements_end));
3787 __ bind(&long_separator_loop);
3796 __ SmiUntag(string_length);
3800 __ CopyBytes(
string, result_pos, string_length, scratch1);
3802 __ bind(&long_separator);
3804 __ Addu(element, element, kPointerSize);
3806 __ SmiUntag(string_length);
3808 __ CopyBytes(
string, result_pos, string_length, scratch1);
3810 __ Branch(&long_separator_loop,
lt, element, Operand(elements_end));
3815 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3817 context()->Plug(v0);
3821 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3822 Handle<String> name = expr->name();
3823 if (name->length() > 0 && name->Get(0) ==
'_') {
3824 Comment cmnt(masm_,
"[ InlineRuntimeCall");
3825 EmitInlineRuntimeCall(expr);
3829 Comment cmnt(masm_,
"[ CallRuntime");
3830 ZoneList<Expression*>* args = expr->arguments();
3832 if (expr->is_jsruntime()) {
3840 int arg_count = args->length();
3841 for (
int i = 0; i < arg_count; i++) {
3842 VisitForStackValue(args->at(i));
3845 if (expr->is_jsruntime()) {
3847 __ li(a2, Operand(expr->name()));
3848 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3850 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3851 CallIC(ic, mode, expr->CallRuntimeFeedbackId());
3856 __ CallRuntime(expr->function(), arg_count);
3858 context()->Plug(v0);
3862 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3863 switch (expr->op()) {
3864 case Token::DELETE: {
3865 Comment cmnt(masm_,
"[ UnaryOperation (DELETE)");
3866 Property*
property = expr->expression()->AsProperty();
3867 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3869 if (property !=
NULL) {
3870 VisitForStackValue(property->obj());
3871 VisitForStackValue(property->key());
3877 context()->Plug(v0);
3878 }
else if (proxy !=
NULL) {
3879 Variable* var = proxy->var();
3883 if (var->IsUnallocated()) {
3885 __ li(a1, Operand(var->name()));
3887 __ Push(a2, a1, a0);
3889 context()->Plug(v0);
3890 }
else if (var->IsStackAllocated() || var->IsContextSlot()) {
3893 context()->Plug(var->is_this());
3897 __ push(context_register());
3898 __ li(a2, Operand(var->name()));
3900 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3901 context()->Plug(v0);
3906 VisitForEffect(expr->expression());
3907 context()->Plug(
true);
3913 Comment cmnt(masm_,
"[ UnaryOperation (VOID)");
3914 VisitForEffect(expr->expression());
3915 context()->Plug(Heap::kUndefinedValueRootIndex);
3920 Comment cmnt(masm_,
"[ UnaryOperation (NOT)");
3921 if (context()->IsEffect()) {
3924 VisitForEffect(expr->expression());
3925 }
else if (context()->IsTest()) {
3928 VisitForControl(expr->expression(),
3929 test->false_label(),
3931 test->fall_through());
3932 context()->Plug(test->true_label(), test->false_label());
3938 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3939 Label materialize_true, materialize_false, done;
3940 VisitForControl(expr->expression(),
3944 __ bind(&materialize_true);
3945 PrepareForBailoutForId(expr->MaterializeTrueId(),
NO_REGISTERS);
3946 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3947 if (context()->IsStackValue())
__ push(v0);
3949 __ bind(&materialize_false);
3950 PrepareForBailoutForId(expr->MaterializeFalseId(),
NO_REGISTERS);
3951 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3952 if (context()->IsStackValue())
__ push(v0);
3958 case Token::TYPEOF: {
3959 Comment cmnt(masm_,
"[ UnaryOperation (TYPEOF)");
3960 { StackValueContext context(
this);
3961 VisitForTypeofValue(expr->expression());
3963 __ CallRuntime(Runtime::kTypeof, 1);
3964 context()->Plug(v0);
3969 Comment cmt(masm_,
"[ UnaryOperation (ADD)");
3970 VisitForAccumulatorValue(expr->expression());
3971 Label no_conversion;
3972 __ JumpIfSmi(result_register(), &no_conversion);
3973 __ mov(a0, result_register());
3974 ToNumberStub convert_stub;
3975 __ CallStub(&convert_stub);
3976 __ bind(&no_conversion);
3977 context()->Plug(result_register());
3982 EmitUnaryOperation(expr,
"[ UnaryOperation (SUB)");
3985 case Token::BIT_NOT:
3986 EmitUnaryOperation(expr,
"[ UnaryOperation (BIT_NOT)");
3995 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3996 const char* comment) {
3998 Comment cmt(masm_, comment);
3999 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
4002 UnaryOpStub stub(expr->op(), overwrite);
4004 VisitForAccumulatorValue(expr->expression());
4005 SetSourcePosition(expr->position());
4006 __ mov(a0, result_register());
4007 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
4008 expr->UnaryOperationFeedbackId());
4009 context()->Plug(v0);
4013 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4014 Comment cmnt(masm_,
"[ CountOperation");
4015 SetSourcePosition(expr->position());
4019 if (!expr->expression()->IsValidLeftHandSide()) {
4020 VisitForEffect(expr->expression());
4026 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4027 LhsKind assign_type = VARIABLE;
4028 Property* prop = expr->expression()->AsProperty();
4033 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4037 if (assign_type == VARIABLE) {
4038 ASSERT(expr->expression()->AsVariableProxy()->var() !=
NULL);
4039 AccumulatorValueContext context(
this);
4040 EmitVariableLoad(expr->expression()->AsVariableProxy());
4043 if (expr->is_postfix() && !context()->IsEffect()) {
4047 if (assign_type == NAMED_PROPERTY) {
4049 VisitForAccumulatorValue(prop->obj());
4051 EmitNamedPropertyLoad(prop);
4053 VisitForStackValue(prop->obj());
4054 VisitForAccumulatorValue(prop->key());
4057 EmitKeyedPropertyLoad(prop);
4063 if (assign_type == VARIABLE) {
4064 PrepareForBailout(expr->expression(),
TOS_REG);
4066 PrepareForBailoutForId(prop->LoadId(),
TOS_REG);
4070 Label no_conversion;
4071 __ JumpIfSmi(v0, &no_conversion);
4073 ToNumberStub convert_stub;
4074 __ CallStub(&convert_stub);
4075 __ bind(&no_conversion);
4078 if (expr->is_postfix()) {
4079 if (!context()->IsEffect()) {
4083 switch (assign_type) {
4087 case NAMED_PROPERTY:
4090 case KEYED_PROPERTY:
4096 __ mov(a0, result_register());
4099 Label stub_call, done;
4100 JumpPatchSite patch_site(masm_);
4102 int count_value = expr->op() == Token::INC ? 1 : -1;
4105 if (ShouldInlineSmiCase(expr->op())) {
4106 __ AdduAndCheckForOverflow(v0, a0, a1, t0);
4107 __ BranchOnOverflow(&stub_call, t0);
4111 patch_site.EmitJumpIfSmi(v0, &done);
4112 __ bind(&stub_call);
4116 SetSourcePosition(expr->position());
4119 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
4120 patch_site.EmitPatchInfo();
4124 switch (assign_type) {
4126 if (expr->is_postfix()) {
4127 { EffectContext context(
this);
4128 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4130 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4135 if (!context()->IsEffect()) {
4136 context()->PlugTOS();
4139 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4141 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4142 context()->Plug(v0);
4145 case NAMED_PROPERTY: {
4146 __ mov(a0, result_register());
4147 __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
4149 Handle<Code> ic = is_classic_mode()
4150 ? isolate()->builtins()->StoreIC_Initialize()
4151 : isolate()->builtins()->StoreIC_Initialize_Strict();
4152 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4153 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4154 if (expr->is_postfix()) {
4155 if (!context()->IsEffect()) {
4156 context()->PlugTOS();
4159 context()->Plug(v0);
4163 case KEYED_PROPERTY: {
4164 __ mov(a0, result_register());
4167 Handle<Code> ic = is_classic_mode()
4168 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4169 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4170 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4171 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4172 if (expr->is_postfix()) {
4173 if (!context()->IsEffect()) {
4174 context()->PlugTOS();
4177 context()->Plug(v0);
4185 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4186 ASSERT(!context()->IsEffect());
4187 ASSERT(!context()->IsTest());
4188 VariableProxy* proxy = expr->AsVariableProxy();
4189 if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
4190 Comment cmnt(masm_,
"Global variable");
4192 __ li(a2, Operand(proxy->name()));
4193 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4197 PrepareForBailout(expr,
TOS_REG);
4198 context()->Plug(v0);
4199 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
4204 EmitDynamicLookupFastCase(proxy->var(),
INSIDE_TYPEOF, &slow, &done);
4207 __ li(a0, Operand(proxy->name()));
4209 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4210 PrepareForBailout(expr,
TOS_REG);
4213 context()->Plug(v0);
4216 VisitInDuplicateContext(expr);
4220 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4221 Expression* sub_expr,
4222 Handle<String>
check) {
4223 Label materialize_true, materialize_false;
4224 Label* if_true =
NULL;
4225 Label* if_false =
NULL;
4226 Label* fall_through =
NULL;
4227 context()->PrepareTest(&materialize_true, &materialize_false,
4228 &if_true, &if_false, &fall_through);
4230 { AccumulatorValueContext context(
this);
4231 VisitForTypeofValue(sub_expr);
4233 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4235 if (check->Equals(isolate()->heap()->number_symbol())) {
4236 __ JumpIfSmi(v0, if_true);
4238 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4239 Split(
eq, v0, Operand(at), if_true, if_false, fall_through);
4240 }
else if (check->Equals(isolate()->heap()->string_symbol())) {
4241 __ JumpIfSmi(v0, if_false);
4243 __ GetObjectType(v0, v0, a1);
4247 Split(
eq, a1, Operand(zero_reg),
4248 if_true, if_false, fall_through);
4249 }
else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4250 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4251 __ Branch(if_true,
eq, v0, Operand(at));
4252 __ LoadRoot(at, Heap::kFalseValueRootIndex);
4253 Split(
eq, v0, Operand(at), if_true, if_false, fall_through);
4254 }
else if (FLAG_harmony_typeof &&
4255 check->Equals(isolate()->heap()->null_symbol())) {
4256 __ LoadRoot(at, Heap::kNullValueRootIndex);
4257 Split(
eq, v0, Operand(at), if_true, if_false, fall_through);
4258 }
else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4259 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4260 __ Branch(if_true,
eq, v0, Operand(at));
4261 __ JumpIfSmi(v0, if_false);
4266 Split(
ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4267 }
else if (check->Equals(isolate()->heap()->function_symbol())) {
4268 __ JumpIfSmi(v0, if_false);
4270 __ GetObjectType(v0, v0, a1);
4273 if_true, if_false, fall_through);
4274 }
else if (check->Equals(isolate()->heap()->object_symbol())) {
4275 __ JumpIfSmi(v0, if_false);
4276 if (!FLAG_harmony_typeof) {
4277 __ LoadRoot(at, Heap::kNullValueRootIndex);
4278 __ Branch(if_true,
eq, v0, Operand(at));
4281 __ GetObjectType(v0, v0, a1);
4288 Split(
eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4290 if (if_false != fall_through)
__ jmp(if_false);
4292 context()->Plug(if_true, if_false);
4296 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4297 Comment cmnt(masm_,
"[ CompareOperation");
4298 SetSourcePosition(expr->position());
4302 if (TryLiteralCompare(expr))
return;
4306 Label materialize_true, materialize_false;
4307 Label* if_true =
NULL;
4308 Label* if_false =
NULL;
4309 Label* fall_through =
NULL;
4310 context()->PrepareTest(&materialize_true, &materialize_false,
4311 &if_true, &if_false, &fall_through);
4314 VisitForStackValue(expr->left());
4317 VisitForStackValue(expr->right());
4319 PrepareForBailoutBeforeSplit(expr,
false,
NULL,
NULL);
4320 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
4321 Split(
eq, v0, Operand(t0), if_true, if_false, fall_through);
4324 case Token::INSTANCEOF: {
4325 VisitForStackValue(expr->right());
4328 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4330 Split(
eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
4335 VisitForAccumulatorValue(expr->right());
4338 case Token::EQ_STRICT:
4355 case Token::INSTANCEOF:
4359 __ mov(a0, result_register());
4362 bool inline_smi_code = ShouldInlineSmiCase(op);
4363 JumpPatchSite patch_site(masm_);
4364 if (inline_smi_code) {
4366 __ Or(a2, a0, Operand(a1));
4367 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4368 Split(cc, a1, Operand(a0), if_true, if_false,
NULL);
4369 __ bind(&slow_case);
4372 SetSourcePosition(expr->position());
4374 CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4375 patch_site.EmitPatchInfo();
4376 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4377 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4383 context()->Plug(if_true, if_false);
4387 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4388 Expression* sub_expr,
4390 Label materialize_true, materialize_false;
4391 Label* if_true =
NULL;
4392 Label* if_false =
NULL;
4393 Label* fall_through =
NULL;
4394 context()->PrepareTest(&materialize_true, &materialize_false,
4395 &if_true, &if_false, &fall_through);
4397 VisitForAccumulatorValue(sub_expr);
4398 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4400 Heap::kNullValueRootIndex :
4401 Heap::kUndefinedValueRootIndex;
4402 __ mov(a0, result_register());
4403 __ LoadRoot(a1, nil_value);
4404 if (expr->op() == Token::EQ_STRICT) {
4405 Split(
eq, a0, Operand(a1), if_true, if_false, fall_through);
4408 Heap::kUndefinedValueRootIndex :
4409 Heap::kNullValueRootIndex;
4410 __ Branch(if_true,
eq, a0, Operand(a1));
4411 __ LoadRoot(a1, other_nil_value);
4412 __ Branch(if_true,
eq, a0, Operand(a1));
4413 __ JumpIfSmi(a0, if_false);
4418 Split(
ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4420 context()->Plug(if_true, if_false);
4424 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4426 context()->Plug(v0);
4430 Register FullCodeGenerator::result_register() {
4435 Register FullCodeGenerator::context_register() {
4440 void FullCodeGenerator::StoreToFrameField(
int frame_offset, Register value) {
4446 void FullCodeGenerator::LoadContextField(Register dst,
int context_index) {
4451 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4453 if (declaration_scope->is_global_scope() ||
4454 declaration_scope->is_module_scope()) {
4460 }
else if (declaration_scope->is_eval_scope()) {
4466 ASSERT(declaration_scope->is_function_scope());
4476 void FullCodeGenerator::EnterFinallyBlock() {
4477 ASSERT(!result_register().is(a1));
4479 __ push(result_register());
4484 __ Addu(a1, a1, Operand(a1));
4490 ExternalReference pending_message_obj =
4491 ExternalReference::address_of_pending_message_obj(isolate());
4492 __ li(at, Operand(pending_message_obj));
4496 ExternalReference has_pending_message =
4497 ExternalReference::address_of_has_pending_message(isolate());
4498 __ li(at, Operand(has_pending_message));
4503 ExternalReference pending_message_script =
4504 ExternalReference::address_of_pending_message_script(isolate());
4505 __ li(at, Operand(pending_message_script));
4511 void FullCodeGenerator::ExitFinallyBlock() {
4512 ASSERT(!result_register().is(a1));
4515 ExternalReference pending_message_script =
4516 ExternalReference::address_of_pending_message_script(isolate());
4517 __ li(at, Operand(pending_message_script));
4522 ExternalReference has_pending_message =
4523 ExternalReference::address_of_has_pending_message(isolate());
4524 __ li(at, Operand(has_pending_message));
4528 ExternalReference pending_message_obj =
4529 ExternalReference::address_of_pending_message_obj(isolate());
4530 __ li(at, Operand(pending_message_obj));
4537 __ pop(result_register());
4547 #define __ ACCESS_MASM(masm())
4551 int* context_length) {
4558 __ Drop(*stack_depth);
4559 if (*context_length > 0) {
4565 __ Call(finally_entry_);
4568 *context_length = 0;
4577 #endif // V8_TARGET_ARCH_MIPS
static const int kCallerFPOffset
static const int kLengthOffset
static const int kBitFieldOffset
Scope * DeclarationScope()
int InstructionsGeneratedSince(Label *label)
const intptr_t kSmiTagMask
VariableDeclaration * function() const
static int SlotOffset(int index)
static const int kFingerOffset
static const int kBuiltinsOffset
static const int kEnumCacheOffset
static String * cast(Object *obj)
void mov(Register rd, Register rt)
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
static const int kDataOffset
static const int kGlobalReceiverOffset
int SizeOfCodeGeneratedSince(Label *label)
Scope * outer_scope() const
static Handle< Object > UninitializedSentinel(Isolate *isolate)
static bool IsSupported(CpuFeature f)
static const int kCallerSPOffset
#define ASSERT(condition)
const int kPointerSizeLog2
static const int kMaxBackEdgeWeight
static const int kInObjectFieldCount
static bool IsCompileTimeValue(Expression *expression)
#define POINTER_SIZE_ALIGN(value)
static const int kMaximumSlots
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
bool IsOptimizable() const
Variable * parameter(int index) const
MemOperand ContextOperand(Register context, int index)
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
static const int kHashFieldOffset
void MultiPop(RegList regs)
static const int kLiteralsOffset
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
static const int kValueOffset
Variable * arguments() const
static const int kForInSlowCaseMarker
static const int kFirstOffset
static BailoutId Declarations()
static const int kForInFastCaseMarker
static const int kStringWrapperSafeForDefaultValueOf
static void MaybeCallEntryHook(MacroAssembler *masm)
void Jump(Register target, Condition cond=al)
bool ShouldSelfOptimize()
static const int kContextOffset
static const int kCacheStampOffset
static TestContext * cast(AstContext *context)
static const int kDescriptorSize
static const int kPropertiesOffset
static Register from_code(int code)
int num_parameters() const
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
static const int kHeaderSize
static const int kElementsOffset
static const int kContainsCachedArrayIndexMask
static BailoutId FunctionEntry()
friend class BlockTrampolinePoolScope
bool IsDeclaredVariableMode(VariableMode mode)
friend class NestedStatement
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
static const int kMapOffset
static const int kValueOffset
static const int kEnumCacheBridgeCacheOffset
void VisitIllegalRedeclaration(AstVisitor *visitor)
static const int kLengthOffset
MemOperand FieldMemOperand(Register object, int offset)
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
static const int kContextOffset
static const int kFunctionOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
Handle< Object > CodeObject()
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
static const int kJSReturnSequenceInstructions
static const int kConstructorOffset
#define ASSERT_NE(v1, v2)
static const int kIsUndetectable
static bool ShouldGenerateLog(Expression *type)
static const int kMaximumClonedProperties
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static const int kPrototypeOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
bool IsImmutableVariableMode(VariableMode mode)
static const int kNativeContextOffset
static const int kMarkerOffset
static const int kSharedFunctionInfoOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
static FixedArrayBase * cast(Object *object)
static const int kLocal0Offset
static const int kMaxValue
static const int kBitField2Offset
static Handle< Code > GetUninitialized(Token::Value op)
void check(i::Vector< const char > string)
static const int kMaximumClonedLength
static const int kValueOffset
static const int kInstanceTypeOffset
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag