30 #if defined(V8_TARGET_ARCH_MIPS)
56 #define __ ACCESS_MASM(masm_)
69 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
71 info_emitted_ =
false;
76 ASSERT(patch_site_.is_bound() == info_emitted_);
81 void EmitJumpIfNotSmi(Register reg, Label* target) {
82 ASSERT(!patch_site_.is_bound() && !info_emitted_);
84 __ bind(&patch_site_);
87 __ Branch(target,
eq, at, Operand(zero_reg));
92 void EmitJumpIfSmi(Register reg, Label* target) {
94 ASSERT(!patch_site_.is_bound() && !info_emitted_);
95 __ bind(&patch_site_);
98 __ Branch(target,
ne, at, Operand(zero_reg));
101 void EmitPatchInfo() {
102 if (patch_site_.is_bound()) {
103 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
105 __ andi(zero_reg, reg, delta_to_patch_site %
kImm16Mask);
107 info_emitted_ =
true;
115 MacroAssembler* masm_;
137 void FullCodeGenerator::Generate() {
138 CompilationInfo* info = info_;
140 isolate()->factory()->NewFixedArray(
function()->handler_count(),
TENURED);
141 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
143 SetFunctionPosition(
function());
144 Comment cmnt(masm_,
"[ function compiled by full code generator");
147 if (strlen(FLAG_stop_at) > 0 &&
148 info->function()->name()->IsEqualTo(
CStrVector(FLAG_stop_at))) {
157 if (!info->is_classic_mode() || info->is_native()) {
159 __ Branch(&ok,
eq, t1, Operand(zero_reg));
160 int receiver_offset = info->scope()->num_parameters() *
kPointerSize;
161 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
169 FrameScope frame_scope(masm_, StackFrame::MANUAL);
171 int locals_count = info->scope()->num_stack_slots();
174 if (locals_count > 0) {
177 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
182 { Comment cmnt(masm_,
"[ Allocate locals");
183 for (
int i = 0; i < locals_count; i++) {
188 bool function_in_register =
true;
192 if (heap_slots > 0) {
193 Comment cmnt(masm_,
"[ Allocate local context");
197 FastNewContextStub stub(heap_slots);
200 __ CallRuntime(Runtime::kNewFunctionContext, 1);
202 function_in_register =
false;
207 int num_parameters = info->scope()->num_parameters();
208 for (
int i = 0; i < num_parameters; i++) {
210 if (var->IsContextSlot()) {
220 __ RecordWriteContextSlot(
226 Variable* arguments = scope()->
arguments();
227 if (arguments !=
NULL) {
229 Comment cmnt(masm_,
"[ Allocate arguments object");
230 if (!function_in_register) {
237 int num_parameters = info->scope()->num_parameters();
249 if (!is_classic_mode()) {
251 }
else if (
function()->has_duplicate_parameters()) {
256 ArgumentsAccessStub stub(type);
259 SetVar(arguments, v0, a1, a2);
263 __ CallRuntime(Runtime::kTraceEnter, 0);
268 if (scope()->HasIllegalRedeclaration()) {
269 Comment cmnt(masm_,
"[ Declarations");
274 { Comment cmnt(masm_,
"[ Declarations");
277 if (scope()->is_function_scope() && scope()->
function() !=
NULL) {
278 VariableDeclaration*
function = scope()->
function();
279 ASSERT(function->proxy()->var()->mode() ==
CONST ||
282 VisitVariableDeclaration(
function);
284 VisitDeclarations(scope()->declarations());
287 { Comment cmnt(masm_,
"[ Stack check");
290 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
291 __ Branch(&ok,
hs,
sp, Operand(t0));
297 { Comment cmnt(masm_,
"[ Body");
298 ASSERT(loop_depth() == 0);
299 VisitStatements(
function()->body());
300 ASSERT(loop_depth() == 0);
306 { Comment cmnt(masm_,
"[ return <undefined>;");
307 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
309 EmitReturnSequence();
313 void FullCodeGenerator::ClearAccumulator() {
315 __ mov(v0, zero_reg);
319 void FullCodeGenerator::EmitProfilingCounterDecrement(
int delta) {
320 __ li(a2, Operand(profiling_counter_));
327 void FullCodeGenerator::EmitProfilingCounterReset() {
328 int reset_value = FLAG_interrupt_budget;
329 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
333 if (isolate()->IsDebuggerActive()) {
337 __ li(a2, Operand(profiling_counter_));
343 static const int kMaxBackEdgeWeight = 127;
344 static const int kBackEdgeDistanceDivisor = 142;
347 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
348 Label* back_edge_target) {
355 Comment cmnt(masm_,
"[ Stack check");
357 if (FLAG_count_based_interrupts) {
359 if (FLAG_weighted_back_edges) {
360 ASSERT(back_edge_target->is_bound());
362 weight =
Min(kMaxBackEdgeWeight,
363 Max(1, distance / kBackEdgeDistanceDivisor));
365 EmitProfilingCounterDecrement(weight);
366 __ slt(at, a3, zero_reg);
367 __ beq(at, zero_reg, &ok);
372 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
374 __ beq(at, zero_reg, &ok);
382 RecordStackCheck(stmt->OsrEntryId());
383 if (FLAG_count_based_interrupts) {
384 EmitProfilingCounterReset();
392 PrepareForBailoutForId(stmt->OsrEntryId(),
NO_REGISTERS);
396 void FullCodeGenerator::EmitReturnSequence() {
397 Comment cmnt(masm_,
"[ Return sequence");
398 if (return_label_.is_bound()) {
399 __ Branch(&return_label_);
401 __ bind(&return_label_);
406 __ CallRuntime(Runtime::kTraceExit, 1);
408 if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
411 if (info_->ShouldSelfOptimize()) {
412 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
413 }
else if (FLAG_weighted_back_edges) {
415 weight =
Min(kMaxBackEdgeWeight,
416 Max(1, distance / kBackEdgeDistanceDivisor));
418 EmitProfilingCounterDecrement(weight);
420 __ Branch(&ok,
ge, a3, Operand(zero_reg));
422 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
425 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
431 EmitProfilingCounterReset();
437 Label check_exit_codesize;
438 masm_->
bind(&check_exit_codesize);
445 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
450 masm_->Addu(
sp,
sp, Operand(sp_delta));
464 void FullCodeGenerator::EffectContext::Plug(Variable* var)
const {
465 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
469 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var)
const {
470 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
471 codegen()->GetVar(result_register(), var);
475 void FullCodeGenerator::StackValueContext::Plug(Variable* var)
const {
476 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
477 codegen()->GetVar(result_register(), var);
478 __ push(result_register());
482 void FullCodeGenerator::TestContext::Plug(Variable* var)
const {
484 codegen()->GetVar(result_register(), var);
485 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
486 codegen()->DoTest(
this);
494 void FullCodeGenerator::AccumulatorValueContext::Plug(
496 __ LoadRoot(result_register(), index);
500 void FullCodeGenerator::StackValueContext::Plug(
502 __ LoadRoot(result_register(), index);
503 __ push(result_register());
508 codegen()->PrepareForBailoutBeforeSplit(condition(),
512 if (index == Heap::kUndefinedValueRootIndex ||
513 index == Heap::kNullValueRootIndex ||
514 index == Heap::kFalseValueRootIndex) {
515 if (false_label_ != fall_through_)
__ Branch(false_label_);
516 }
else if (index == Heap::kTrueValueRootIndex) {
517 if (true_label_ != fall_through_)
__ Branch(true_label_);
519 __ LoadRoot(result_register(), index);
520 codegen()->DoTest(
this);
525 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit)
const {
529 void FullCodeGenerator::AccumulatorValueContext::Plug(
530 Handle<Object> lit)
const {
531 __ li(result_register(), Operand(lit));
535 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit)
const {
537 __ li(result_register(), Operand(lit));
538 __ push(result_register());
542 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit)
const {
543 codegen()->PrepareForBailoutBeforeSplit(condition(),
547 ASSERT(!lit->IsUndetectableObject());
548 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
549 if (false_label_ != fall_through_)
__ Branch(false_label_);
550 }
else if (lit->IsTrue() || lit->IsJSObject()) {
551 if (true_label_ != fall_through_)
__ Branch(true_label_);
552 }
else if (lit->IsString()) {
554 if (false_label_ != fall_through_)
__ Branch(false_label_);
556 if (true_label_ != fall_through_)
__ Branch(true_label_);
558 }
else if (lit->IsSmi()) {
560 if (false_label_ != fall_through_)
__ Branch(false_label_);
562 if (true_label_ != fall_through_)
__ Branch(true_label_);
566 __ li(result_register(), Operand(lit));
567 codegen()->DoTest(
this);
572 void FullCodeGenerator::EffectContext::DropAndPlug(
int count,
573 Register reg)
const {
579 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
581 Register reg)
const {
584 __ Move(result_register(), reg);
588 void FullCodeGenerator::StackValueContext::DropAndPlug(
int count,
589 Register reg)
const {
591 if (count > 1)
__ Drop(count - 1);
596 void FullCodeGenerator::TestContext::DropAndPlug(
int count,
597 Register reg)
const {
601 __ Move(result_register(), reg);
602 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
603 codegen()->DoTest(
this);
607 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
608 Label* materialize_false)
const {
609 ASSERT(materialize_true == materialize_false);
610 __ bind(materialize_true);
614 void FullCodeGenerator::AccumulatorValueContext::Plug(
615 Label* materialize_true,
616 Label* materialize_false)
const {
618 __ bind(materialize_true);
619 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
621 __ bind(materialize_false);
622 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
627 void FullCodeGenerator::StackValueContext::Plug(
628 Label* materialize_true,
629 Label* materialize_false)
const {
631 __ bind(materialize_true);
632 __ LoadRoot(at, Heap::kTrueValueRootIndex);
635 __ bind(materialize_false);
636 __ LoadRoot(at, Heap::kFalseValueRootIndex);
642 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
643 Label* materialize_false)
const {
644 ASSERT(materialize_true == true_label_);
645 ASSERT(materialize_false == false_label_);
649 void FullCodeGenerator::EffectContext::Plug(
bool flag)
const {
653 void FullCodeGenerator::AccumulatorValueContext::Plug(
bool flag)
const {
655 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
656 __ LoadRoot(result_register(), value_root_index);
660 void FullCodeGenerator::StackValueContext::Plug(
bool flag)
const {
662 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
663 __ LoadRoot(at, value_root_index);
668 void FullCodeGenerator::TestContext::Plug(
bool flag)
const {
669 codegen()->PrepareForBailoutBeforeSplit(condition(),
674 if (true_label_ != fall_through_)
__ Branch(true_label_);
676 if (false_label_ != fall_through_)
__ Branch(false_label_);
681 void FullCodeGenerator::DoTest(Expression* condition,
684 Label* fall_through) {
686 ToBooleanStub stub(result_register());
688 __ mov(at, zero_reg);
692 __ push(result_register());
693 __ CallRuntime(Runtime::kToBool, 1);
694 __ LoadRoot(at, Heap::kFalseValueRootIndex);
696 Split(
ne, v0, Operand(at), if_true, if_false, fall_through);
705 Label* fall_through) {
706 if (if_false == fall_through) {
707 __ Branch(if_true, cc, lhs, rhs);
708 }
else if (if_true == fall_through) {
711 __ Branch(if_true, cc, lhs, rhs);
717 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
718 ASSERT(var->IsStackAllocated());
722 if (var->IsParameter()) {
723 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
731 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
732 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
733 if (var->IsContextSlot()) {
735 __ LoadContext(scratch, context_chain_length);
738 return StackOperand(var);
743 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
746 __ lw(dest, location);
750 void FullCodeGenerator::SetVar(Variable* var,
754 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
755 ASSERT(!scratch0.is(src));
756 ASSERT(!scratch0.is(scratch1));
757 ASSERT(!scratch1.is(src));
758 MemOperand location = VarOperand(var, scratch0);
759 __ sw(src, location);
761 if (var->IsContextSlot()) {
762 __ RecordWriteContextSlot(scratch0,
772 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
773 bool should_normalize,
779 if (!context()->IsTest() || !info_->IsOptimizable())
return;
782 if (should_normalize)
__ Branch(&skip);
783 PrepareForBailout(expr,
TOS_REG);
784 if (should_normalize) {
785 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
786 Split(
eq, a0, Operand(t0), if_true, if_false,
NULL);
792 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
795 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
796 if (FLAG_debug_code) {
799 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
800 __ Check(
ne,
"Declaration in with context.",
802 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
803 __ Check(
ne,
"Declaration in catch context.",
809 void FullCodeGenerator::VisitVariableDeclaration(
810 VariableDeclaration* declaration) {
814 VariableProxy* proxy = declaration->proxy();
816 Variable* variable = proxy->var();
818 switch (variable->location()) {
820 globals_->
Add(variable->name(),
zone());
821 globals_->
Add(variable->binding_needs_init()
822 ? isolate()->factory()->the_hole_value()
823 : isolate()->factory()->undefined_value(),
830 Comment cmnt(masm_,
"[ VariableDeclaration");
831 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
832 __ sw(t0, StackOperand(variable));
838 Comment cmnt(masm_,
"[ VariableDeclaration");
839 EmitDebugCheckDeclarationContext(variable);
840 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
848 Comment cmnt(masm_,
"[ VariableDeclaration");
849 __ li(a2, Operand(variable->name()));
861 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
862 __ Push(
cp, a2, a1, a0);
865 __ mov(a0, zero_reg);
866 __ Push(
cp, a2, a1, a0);
868 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
875 void FullCodeGenerator::VisitFunctionDeclaration(
876 FunctionDeclaration* declaration) {
877 VariableProxy* proxy = declaration->proxy();
878 Variable* variable = proxy->var();
879 switch (variable->location()) {
881 globals_->
Add(variable->name(),
zone());
882 Handle<SharedFunctionInfo>
function =
885 if (
function.is_null())
return SetStackOverflow();
886 globals_->
Add(
function,
zone());
892 Comment cmnt(masm_,
"[ FunctionDeclaration");
893 VisitForAccumulatorValue(declaration->fun());
894 __ sw(result_register(), StackOperand(variable));
899 Comment cmnt(masm_,
"[ FunctionDeclaration");
900 EmitDebugCheckDeclarationContext(variable);
901 VisitForAccumulatorValue(declaration->fun());
905 __ RecordWriteContextSlot(
cp,
918 Comment cmnt(masm_,
"[ FunctionDeclaration");
919 __ li(a2, Operand(variable->name()));
923 VisitForStackValue(declaration->fun());
924 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
931 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
932 VariableProxy* proxy = declaration->proxy();
933 Variable* variable = proxy->var();
934 Handle<JSModule> instance = declaration->module()->interface()->Instance();
935 ASSERT(!instance.is_null());
937 switch (variable->location()) {
939 Comment cmnt(masm_,
"[ ModuleDeclaration");
940 globals_->
Add(variable->name(),
zone());
941 globals_->
Add(instance,
zone());
942 Visit(declaration->module());
947 Comment cmnt(masm_,
"[ ModuleDeclaration");
948 EmitDebugCheckDeclarationContext(variable);
949 __ li(a1, Operand(instance));
951 Visit(declaration->module());
963 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
964 VariableProxy* proxy = declaration->proxy();
965 Variable* variable = proxy->var();
966 switch (variable->location()) {
972 Comment cmnt(masm_,
"[ ImportDeclaration");
973 EmitDebugCheckDeclarationContext(variable);
986 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
991 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
994 __ li(a1, Operand(pairs));
997 __ CallRuntime(Runtime::kDeclareGlobals, 3);
1002 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1003 Comment cmnt(masm_,
"[ SwitchStatement");
1004 Breakable nested_statement(
this, stmt);
1005 SetStatementPosition(stmt);
1008 VisitForStackValue(stmt->tag());
1011 ZoneList<CaseClause*>* clauses = stmt->cases();
1012 CaseClause* default_clause =
NULL;
1016 for (
int i = 0; i < clauses->length(); i++) {
1017 CaseClause* clause = clauses->at(i);
1018 clause->body_target()->Unuse();
1021 if (clause->is_default()) {
1022 default_clause = clause;
1026 Comment cmnt(masm_,
"[ Case comparison");
1027 __ bind(&next_test);
1031 VisitForAccumulatorValue(clause->label());
1032 __ mov(a0, result_register());
1036 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1037 JumpPatchSite patch_site(masm_);
1038 if (inline_smi_code) {
1041 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1043 __ Branch(&next_test,
ne, a1, Operand(a0));
1045 __ Branch(clause->body_target());
1047 __ bind(&slow_case);
1051 SetSourcePosition(clause->position());
1053 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1054 patch_site.EmitPatchInfo();
1056 __ Branch(&next_test,
ne, v0, Operand(zero_reg));
1058 __ Branch(clause->body_target());
1063 __ bind(&next_test);
1065 if (default_clause ==
NULL) {
1066 __ Branch(nested_statement.break_label());
1068 __ Branch(default_clause->body_target());
1072 for (
int i = 0; i < clauses->length(); i++) {
1073 Comment cmnt(masm_,
"[ Case body");
1074 CaseClause* clause = clauses->at(i);
1075 __ bind(clause->body_target());
1076 PrepareForBailoutForId(clause->EntryId(),
NO_REGISTERS);
1077 VisitStatements(clause->statements());
1080 __ bind(nested_statement.break_label());
1085 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1086 Comment cmnt(masm_,
"[ ForInStatement");
1087 SetStatementPosition(stmt);
1090 ForIn loop_statement(
this, stmt);
1091 increment_loop_depth();
1096 VisitForAccumulatorValue(stmt->enumerable());
1097 __ mov(a0, result_register());
1098 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1099 __ Branch(&exit,
eq, a0, Operand(at));
1100 Register null_value = t1;
1101 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1102 __ Branch(&exit,
eq, a0, Operand(null_value));
1103 PrepareForBailoutForId(stmt->PrepareId(),
TOS_REG);
1106 Label convert, done_convert;
1107 __ JumpIfSmi(a0, &convert);
1108 __ GetObjectType(a0, a1, a1);
1114 __ bind(&done_convert);
1120 __ GetObjectType(a0, a1, a1);
1127 __ CheckEnumCache(null_value, &call_runtime);
1133 __ Branch(&use_cache);
1136 __ bind(&call_runtime);
1138 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1146 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1147 __ Branch(&fixed_array,
ne, a1, Operand(at));
1150 __ bind(&use_cache);
1151 __ LoadInstanceDescriptors(v0, a1);
1160 __ Push(a2, a1, a0);
1165 __ bind(&fixed_array);
1167 Handle<JSGlobalPropertyCell> cell =
1168 isolate()->factory()->NewJSGlobalPropertyCell(
1171 RecordTypeFeedbackCell(stmt->PrepareId(), cell);
1172 __ LoadHeapObject(a1, cell);
1179 __ GetObjectType(a2, a3, a3);
1182 __ bind(&non_proxy);
1194 __ Branch(loop_statement.break_label(),
hs, a0, Operand(a1));
1200 __ addu(t0, a2, t0);
1212 __ Branch(&update_each,
eq, t0, Operand(a2));
1217 __ Branch(&update_each,
eq, a2, Operand(zero_reg));
1225 __ mov(a3, result_register());
1226 __ Branch(loop_statement.continue_label(),
eq, a3, Operand(zero_reg));
1230 __ bind(&update_each);
1231 __ mov(result_register(), a3);
1233 { EffectContext context(
this);
1234 EmitAssignment(stmt->each());
1238 Visit(stmt->body());
1242 __ bind(loop_statement.continue_label());
1247 EmitStackCheck(stmt, &loop);
1251 __ bind(loop_statement.break_label());
1257 decrement_loop_depth();
1261 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1269 if (!FLAG_always_opt &&
1270 !FLAG_prepare_always_opt &&
1272 scope()->is_function_scope() &&
1273 info->num_literals() == 0) {
1274 FastNewClosureStub stub(info->language_mode());
1275 __ li(a0, Operand(info));
1279 __ li(a0, Operand(info));
1280 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1281 : Heap::kFalseValueRootIndex);
1282 __ Push(
cp, a0, a1);
1283 __ CallRuntime(Runtime::kNewClosure, 3);
1285 context()->Plug(v0);
1289 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1290 Comment cmnt(masm_,
"[ VariableProxy");
1291 EmitVariableLoad(expr);
1295 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1298 Register current =
cp;
1304 if (s->num_heap_slots() > 0) {
1305 if (s->calls_non_strict_eval()) {
1308 __ Branch(slow,
ne, temp, Operand(zero_reg));
1317 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope())
break;
1318 s = s->outer_scope();
1321 if (s->is_eval_scope()) {
1323 if (!current.is(next)) {
1324 __ Move(next, current);
1329 __ LoadRoot(t0, Heap::kGlobalContextMapRootIndex);
1330 __ Branch(&fast,
eq, temp, Operand(t0));
1333 __ Branch(slow,
ne, temp, Operand(zero_reg));
1341 __ li(a2, Operand(var->name()));
1343 ? RelocInfo::CODE_TARGET
1344 : RelocInfo::CODE_TARGET_CONTEXT;
1345 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1350 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1352 ASSERT(var->IsContextSlot());
1353 Register context =
cp;
1357 for (Scope* s = scope(); s != var->scope(); s = s->
outer_scope()) {
1358 if (s->num_heap_slots() > 0) {
1359 if (s->calls_non_strict_eval()) {
1362 __ Branch(slow,
ne, temp, Operand(zero_reg));
1371 __ Branch(slow,
ne, temp, Operand(zero_reg));
1380 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1390 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1393 Variable* local = var->local_if_not_shadowed();
1394 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1395 if (local->mode() ==
CONST ||
1397 local->mode() ==
LET) {
1398 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1399 __ subu(at, v0, at);
1400 if (local->mode() ==
CONST) {
1401 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1402 __ Movz(v0, a0, at);
1404 __ Branch(done,
ne, at, Operand(zero_reg));
1405 __ li(a0, Operand(var->name()));
1407 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1415 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1417 SetSourcePosition(proxy->position());
1418 Variable* var = proxy->var();
1422 switch (var->location()) {
1424 Comment cmnt(masm_,
"Global variable");
1428 __ li(a2, Operand(var->name()));
1429 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1430 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1431 context()->Plug(v0);
1438 Comment cmnt(masm_, var->IsContextSlot()
1439 ?
"Context variable"
1440 :
"Stack variable");
1441 if (var->binding_needs_init()) {
1465 bool skip_init_check;
1467 skip_init_check =
false;
1470 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1471 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1472 skip_init_check = var->mode() !=
CONST &&
1473 var->initializer_position() < proxy->position();
1476 if (!skip_init_check) {
1479 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1480 __ subu(at, v0, at);
1485 __ Branch(&done,
ne, at, Operand(zero_reg));
1486 __ li(a0, Operand(var->name()));
1488 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1493 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1494 __ Movz(v0, a0, at);
1496 context()->Plug(v0);
1500 context()->Plug(var);
1510 Comment cmnt(masm_,
"Lookup variable");
1511 __ li(a1, Operand(var->name()));
1513 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1515 context()->Plug(v0);
1521 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1522 Comment cmnt(masm_,
"[ RegExpLiteral");
1533 int literal_offset =
1536 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1537 __ Branch(&materialized,
ne, t1, Operand(at));
1542 __ li(a2, Operand(expr->pattern()));
1543 __ li(a1, Operand(expr->flags()));
1544 __ Push(t0, a3, a2, a1);
1545 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1548 __ bind(&materialized);
1550 Label allocated, runtime_allocate;
1551 __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate,
TAG_OBJECT);
1554 __ bind(&runtime_allocate);
1558 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1561 __ bind(&allocated);
1568 context()->Plug(v0);
1572 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1573 if (expression ==
NULL) {
1574 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1577 VisitForStackValue(expression);
1582 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1583 Comment cmnt(masm_,
"[ ObjectLiteral");
1584 Handle<FixedArray> constant_properties = expr->constant_properties();
1588 __ li(a1, Operand(constant_properties));
1589 int flags = expr->fast_elements()
1592 flags |= expr->has_function()
1596 __ Push(a3, a2, a1, a0);
1597 int properties_count = constant_properties->length() / 2;
1598 if (expr->depth() > 1) {
1599 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1602 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1604 FastCloneShallowObjectStub stub(properties_count);
1610 bool result_saved =
false;
1615 expr->CalculateEmitStore(
zone());
1617 AccessorTable accessor_table(isolate()->
zone());
1618 for (
int i = 0; i < expr->properties()->length(); i++) {
1619 ObjectLiteral::Property*
property = expr->properties()->at(i);
1620 if (property->IsCompileTimeValue())
continue;
1622 Literal* key =
property->key();
1623 Expression* value =
property->value();
1624 if (!result_saved) {
1626 result_saved =
true;
1628 switch (property->kind()) {
1635 if (key->handle()->IsSymbol()) {
1636 if (property->emit_store()) {
1637 VisitForAccumulatorValue(value);
1638 __ mov(a0, result_register());
1639 __ li(a2, Operand(key->handle()));
1641 Handle<Code> ic = is_classic_mode()
1642 ? isolate()->builtins()->StoreIC_Initialize()
1643 : isolate()->builtins()->StoreIC_Initialize_Strict();
1644 CallIC(ic, RelocInfo::CODE_TARGET, key->id());
1647 VisitForEffect(value);
1656 VisitForStackValue(key);
1657 VisitForStackValue(value);
1658 if (property->emit_store()) {
1661 __ CallRuntime(Runtime::kSetProperty, 4);
1667 accessor_table.lookup(key)->second->getter = value;
1670 accessor_table.lookup(key)->second->setter = value;
1677 for (AccessorTable::Iterator it = accessor_table.begin();
1678 it != accessor_table.end();
1682 VisitForStackValue(it->first);
1683 EmitAccessor(it->second->getter);
1684 EmitAccessor(it->second->setter);
1687 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1690 if (expr->has_function()) {
1694 __ CallRuntime(Runtime::kToFastProperties, 1);
1698 context()->PlugTOS();
1700 context()->Plug(v0);
1705 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1706 Comment cmnt(masm_,
"[ ArrayLiteral");
1708 ZoneList<Expression*>* subexprs = expr->values();
1709 int length = subexprs->length();
1711 Handle<FixedArray> constant_elements = expr->constant_elements();
1712 ASSERT_EQ(2, constant_elements->length());
1715 bool has_fast_elements =
1717 Handle<FixedArrayBase> constant_elements_values(
1720 __ mov(a0, result_register());
1724 __ li(a1, Operand(constant_elements));
1725 __ Push(a3, a2, a1);
1726 if (has_fast_elements && constant_elements_values->map() ==
1727 isolate()->heap()->fixed_cow_array_map()) {
1728 FastCloneShallowArrayStub stub(
1731 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1733 }
else if (expr->depth() > 1) {
1734 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1736 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1739 FLAG_smi_only_arrays);
1743 FastCloneShallowArrayStub stub(mode, length);
1747 bool result_saved =
false;
1751 for (
int i = 0; i < length; i++) {
1752 Expression* subexpr = subexprs->at(i);
1755 if (subexpr->AsLiteral() !=
NULL ||
1760 if (!result_saved) {
1762 result_saved =
true;
1765 VisitForAccumulatorValue(subexpr);
1773 __ RecordWriteField(a1, offset, result_register(), a2,
1781 __ mov(a0, result_register());
1782 StoreArrayLiteralElementStub stub;
1786 PrepareForBailoutForId(expr->GetIdForElement(i),
NO_REGISTERS);
1789 context()->PlugTOS();
1791 context()->Plug(v0);
1796 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1797 Comment cmnt(masm_,
"[ Assignment");
1800 if (!expr->target()->IsValidLeftHandSide()) {
1801 VisitForEffect(expr->target());
1807 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1808 LhsKind assign_type = VARIABLE;
1809 Property*
property = expr->target()->AsProperty();
1810 if (property !=
NULL) {
1811 assign_type = (
property->key()->IsPropertyName())
1817 switch (assign_type) {
1821 case NAMED_PROPERTY:
1822 if (expr->is_compound()) {
1824 VisitForAccumulatorValue(property->obj());
1825 __ push(result_register());
1827 VisitForStackValue(property->obj());
1830 case KEYED_PROPERTY:
1832 if (expr->is_compound()) {
1833 VisitForStackValue(property->obj());
1834 VisitForAccumulatorValue(property->key());
1838 VisitForStackValue(property->obj());
1839 VisitForStackValue(property->key());
1846 if (expr->is_compound()) {
1847 { AccumulatorValueContext context(
this);
1848 switch (assign_type) {
1850 EmitVariableLoad(expr->target()->AsVariableProxy());
1851 PrepareForBailout(expr->target(),
TOS_REG);
1853 case NAMED_PROPERTY:
1854 EmitNamedPropertyLoad(property);
1855 PrepareForBailoutForId(expr->CompoundLoadId(),
TOS_REG);
1857 case KEYED_PROPERTY:
1858 EmitKeyedPropertyLoad(property);
1859 PrepareForBailoutForId(expr->CompoundLoadId(),
TOS_REG);
1866 VisitForAccumulatorValue(expr->value());
1868 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1871 SetSourcePosition(expr->position() + 1);
1872 AccumulatorValueContext context(
this);
1873 if (ShouldInlineSmiCase(op)) {
1874 EmitInlineSmiBinaryOp(expr->binary_operation(),
1880 EmitBinaryOp(expr->binary_operation(), op, mode);
1884 PrepareForBailout(expr->binary_operation(),
TOS_REG);
1886 VisitForAccumulatorValue(expr->value());
1890 SetSourcePosition(expr->position());
1893 switch (assign_type) {
1895 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1897 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
1898 context()->Plug(v0);
1900 case NAMED_PROPERTY:
1901 EmitNamedPropertyAssignment(expr);
1903 case KEYED_PROPERTY:
1904 EmitKeyedPropertyAssignment(expr);
1910 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1911 SetSourcePosition(prop->position());
1912 Literal* key = prop->key()->AsLiteral();
1913 __ mov(a0, result_register());
1914 __ li(a2, Operand(key->handle()));
1916 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1917 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1921 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1922 SetSourcePosition(prop->position());
1923 __ mov(a0, result_register());
1925 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1926 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1930 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1933 Expression* left_expr,
1934 Expression* right_expr) {
1935 Label done, smi_case, stub_call;
1937 Register scratch1 = a2;
1938 Register scratch2 = a3;
1942 Register right = a0;
1944 __ mov(a0, result_register());
1947 __ Or(scratch1, left, Operand(right));
1949 JumpPatchSite patch_site(masm_);
1950 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1952 __ bind(&stub_call);
1953 BinaryOpStub stub(op, mode);
1954 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1955 patch_site.EmitPatchInfo();
1964 __ Branch(&stub_call);
1965 __ GetLeastBitsFromSmi(scratch1, right, 5);
1966 __ srav(right, left, scratch1);
1970 __ Branch(&stub_call);
1971 __ SmiUntag(scratch1, left);
1972 __ GetLeastBitsFromSmi(scratch2, right, 5);
1973 __ sllv(scratch1, scratch1, scratch2);
1974 __ Addu(scratch2, scratch1, Operand(0x40000000));
1975 __ Branch(&stub_call,
lt, scratch2, Operand(zero_reg));
1976 __ SmiTag(v0, scratch1);
1980 __ Branch(&stub_call);
1981 __ SmiUntag(scratch1, left);
1982 __ GetLeastBitsFromSmi(scratch2, right, 5);
1983 __ srlv(scratch1, scratch1, scratch2);
1984 __ And(scratch2, scratch1, 0xc0000000);
1985 __ Branch(&stub_call,
ne, scratch2, Operand(zero_reg));
1986 __ SmiTag(v0, scratch1);
1990 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
1991 __ BranchOnOverflow(&stub_call, scratch1);
1994 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
1995 __ BranchOnOverflow(&stub_call, scratch1);
1998 __ SmiUntag(scratch1, right);
1999 __ Mult(left, scratch1);
2002 __ sra(scratch1, scratch1, 31);
2003 __ Branch(&stub_call,
ne, scratch1, Operand(scratch2));
2005 __ Branch(&done,
ne, v0, Operand(zero_reg));
2006 __ Addu(scratch2, right, left);
2007 __ Branch(&stub_call,
lt, scratch2, Operand(zero_reg));
2009 __ mov(v0, zero_reg);
2013 __ Or(v0, left, Operand(right));
2015 case Token::BIT_AND:
2016 __ And(v0, left, Operand(right));
2018 case Token::BIT_XOR:
2019 __ Xor(v0, left, Operand(right));
2026 context()->Plug(v0);
2030 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2033 __ mov(a0, result_register());
2035 BinaryOpStub stub(op, mode);
2036 JumpPatchSite patch_site(masm_);
2037 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
2038 patch_site.EmitPatchInfo();
2039 context()->Plug(v0);
2043 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2046 if (!expr->IsValidLeftHandSide()) {
2047 VisitForEffect(expr);
2053 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2054 LhsKind assign_type = VARIABLE;
2055 Property* prop = expr->AsProperty();
2057 assign_type = (prop->key()->IsPropertyName())
2062 switch (assign_type) {
2064 Variable* var = expr->AsVariableProxy()->var();
2065 EffectContext context(
this);
2066 EmitVariableAssignment(var, Token::ASSIGN);
2069 case NAMED_PROPERTY: {
2070 __ push(result_register());
2071 VisitForAccumulatorValue(prop->obj());
2072 __ mov(a1, result_register());
2074 __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
2075 Handle<Code> ic = is_classic_mode()
2076 ? isolate()->builtins()->StoreIC_Initialize()
2077 : isolate()->builtins()->StoreIC_Initialize_Strict();
2081 case KEYED_PROPERTY: {
2082 __ push(result_register());
2083 VisitForStackValue(prop->obj());
2084 VisitForAccumulatorValue(prop->key());
2085 __ mov(a1, result_register());
2088 Handle<Code> ic = is_classic_mode()
2089 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2090 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2095 context()->Plug(v0);
2099 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2101 if (var->IsUnallocated()) {
2103 __ mov(a0, result_register());
2104 __ li(a2, Operand(var->name()));
2106 Handle<Code> ic = is_classic_mode()
2107 ? isolate()->builtins()->StoreIC_Initialize()
2108 : isolate()->builtins()->StoreIC_Initialize_Strict();
2109 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2111 }
else if (op == Token::INIT_CONST) {
2113 ASSERT(!var->IsParameter());
2114 if (var->IsStackLocal()) {
2116 __ lw(a1, StackOperand(var));
2117 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2118 __ Branch(&skip,
ne, a1, Operand(t0));
2119 __ sw(result_register(), StackOperand(var));
2122 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2129 __ li(a0, Operand(var->name()));
2131 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2134 }
else if (var->mode() ==
LET && op != Token::INIT_LET) {
2136 if (var->IsLookupSlot()) {
2138 __ li(a1, Operand(var->name()));
2140 __ Push(
cp, a1, a0);
2141 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2143 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2146 __ lw(a3, location);
2147 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2148 __ Branch(&assign,
ne, a3, Operand(t0));
2149 __ li(a3, Operand(var->name()));
2151 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2154 __ sw(result_register(), location);
2155 if (var->IsContextSlot()) {
2157 __ mov(a3, result_register());
2159 __ RecordWriteContextSlot(
2164 }
else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2167 if (var->IsStackAllocated() || var->IsContextSlot()) {
2169 if (FLAG_debug_code && op == Token::INIT_LET) {
2171 __ lw(a2, location);
2172 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2173 __ Check(
eq,
"Let binding re-initialization.", a2, Operand(t0));
2176 __ sw(v0, location);
2177 if (var->IsContextSlot()) {
2180 __ RecordWriteContextSlot(
2184 ASSERT(var->IsLookupSlot());
2186 __ li(a1, Operand(var->name()));
2188 __ Push(
cp, a1, a0);
2189 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2196 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2198 Property* prop = expr->target()->AsProperty();
2205 if (expr->starts_initialization_block()) {
2206 __ push(result_register());
2209 __ CallRuntime(Runtime::kToSlowProperties, 1);
2210 __ pop(result_register());
2214 SetSourcePosition(expr->position());
2215 __ mov(a0, result_register());
2216 __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
2219 if (expr->ends_initialization_block()) {
2225 Handle<Code> ic = is_classic_mode()
2226 ? isolate()->builtins()->StoreIC_Initialize()
2227 : isolate()->builtins()->StoreIC_Initialize_Strict();
2228 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2231 if (expr->ends_initialization_block()) {
2236 __ CallRuntime(Runtime::kToFastProperties, 1);
2240 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2241 context()->Plug(v0);
2245 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2251 if (expr->starts_initialization_block()) {
2252 __ push(result_register());
2256 __ CallRuntime(Runtime::kToSlowProperties, 1);
2257 __ pop(result_register());
2261 SetSourcePosition(expr->position());
2267 __ mov(a0, result_register());
2271 if (expr->ends_initialization_block()) {
2277 Handle<Code> ic = is_classic_mode()
2278 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2279 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2280 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2283 if (expr->ends_initialization_block()) {
2288 __ CallRuntime(Runtime::kToFastProperties, 1);
2292 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2293 context()->Plug(v0);
2297 void FullCodeGenerator::VisitProperty(Property* expr) {
2298 Comment cmnt(masm_,
"[ Property");
2299 Expression* key = expr->key();
2301 if (key->IsPropertyName()) {
2302 VisitForAccumulatorValue(expr->obj());
2303 EmitNamedPropertyLoad(expr);
2304 context()->Plug(v0);
2306 VisitForStackValue(expr->obj());
2307 VisitForAccumulatorValue(expr->key());
2309 EmitKeyedPropertyLoad(expr);
2310 context()->Plug(v0);
2315 void FullCodeGenerator::CallIC(Handle<Code> code,
2316 RelocInfo::Mode rmode,
2319 __ Call(code, rmode, ast_id);
2323 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2324 Handle<Object>
name,
2325 RelocInfo::Mode mode) {
2327 ZoneList<Expression*>* args = expr->arguments();
2328 int arg_count = args->length();
2329 { PreservePositionScope scope(masm()->positions_recorder());
2330 for (
int i = 0; i < arg_count; i++) {
2331 VisitForStackValue(args->at(i));
2333 __ li(a2, Operand(name));
2336 SetSourcePosition(expr->position());
2339 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2340 CallIC(ic, mode, expr->id());
2341 RecordJSReturnSite(expr);
2344 context()->Plug(v0);
2348 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2351 VisitForAccumulatorValue(key);
2360 ZoneList<Expression*>* args = expr->arguments();
2361 int arg_count = args->length();
2362 { PreservePositionScope scope(masm()->positions_recorder());
2363 for (
int i = 0; i < arg_count; i++) {
2364 VisitForStackValue(args->at(i));
2368 SetSourcePosition(expr->position());
2371 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2373 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2374 RecordJSReturnSite(expr);
2377 context()->DropAndPlug(1, v0);
2383 ZoneList<Expression*>* args = expr->arguments();
2384 int arg_count = args->length();
2385 { PreservePositionScope scope(masm()->positions_recorder());
2386 for (
int i = 0; i < arg_count; i++) {
2387 VisitForStackValue(args->at(i));
2391 SetSourcePosition(expr->position());
2396 Handle<Object> uninitialized =
2398 Handle<JSGlobalPropertyCell> cell =
2399 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2400 RecordTypeFeedbackCell(expr->id(), cell);
2401 __ li(a2, Operand(cell));
2404 CallFunctionStub stub(arg_count, flags);
2407 RecordJSReturnSite(expr);
2410 context()->DropAndPlug(1, v0);
2414 void FullCodeGenerator::EmitResolvePossiblyDirectEval(
int arg_count) {
2416 if (arg_count > 0) {
2419 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2424 int receiver_offset = 2 + info_->scope()->num_parameters();
2436 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2440 void FullCodeGenerator::VisitCall(Call* expr) {
2444 expr->return_is_recorded_ =
false;
2447 Comment cmnt(masm_,
"[ Call");
2448 Expression* callee = expr->expression();
2449 VariableProxy* proxy = callee->AsVariableProxy();
2450 Property*
property = callee->AsProperty();
2452 if (proxy !=
NULL && proxy->var()->is_possibly_eval()) {
2457 ZoneList<Expression*>* args = expr->arguments();
2458 int arg_count = args->length();
2460 { PreservePositionScope pos_scope(masm()->positions_recorder());
2461 VisitForStackValue(callee);
2462 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2466 for (
int i = 0; i < arg_count; i++) {
2467 VisitForStackValue(args->at(i));
2474 EmitResolvePossiblyDirectEval(arg_count);
2482 SetSourcePosition(expr->position());
2486 RecordJSReturnSite(expr);
2489 context()->DropAndPlug(1, v0);
2490 }
else if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
2494 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2495 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
2499 { PreservePositionScope scope(masm()->positions_recorder());
2508 __ push(context_register());
2509 __ li(a2, Operand(proxy->name()));
2511 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2517 if (done.is_linked()) {
2525 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
2534 }
else if (property !=
NULL) {
2535 { PreservePositionScope scope(masm()->positions_recorder());
2536 VisitForStackValue(property->obj());
2538 if (property->key()->IsPropertyName()) {
2539 EmitCallWithIC(expr,
2540 property->key()->AsLiteral()->handle(),
2541 RelocInfo::CODE_TARGET);
2543 EmitKeyedCallWithIC(expr, property->key());
2547 { PreservePositionScope scope(masm()->positions_recorder());
2548 VisitForStackValue(callee);
2560 ASSERT(expr->return_is_recorded_);
2565 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2566 Comment cmnt(masm_,
"[ CallNew");
2574 VisitForStackValue(expr->expression());
2577 ZoneList<Expression*>* args = expr->arguments();
2578 int arg_count = args->length();
2579 for (
int i = 0; i < arg_count; i++) {
2580 VisitForStackValue(args->at(i));
2585 SetSourcePosition(expr->position());
2588 __ li(a0, Operand(arg_count));
2595 Handle<Object> uninitialized =
2597 Handle<JSGlobalPropertyCell> cell =
2598 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2599 RecordTypeFeedbackCell(expr->id(), cell);
2600 __ li(a2, Operand(cell));
2605 CallConstructStub stub(flags);
2606 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2607 PrepareForBailoutForId(expr->ReturnId(),
TOS_REG);
2608 context()->Plug(v0);
2612 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2613 ZoneList<Expression*>* args = expr->arguments();
2614 ASSERT(args->length() == 1);
2616 VisitForAccumulatorValue(args->at(0));
2618 Label materialize_true, materialize_false;
2619 Label* if_true =
NULL;
2620 Label* if_false =
NULL;
2621 Label* fall_through =
NULL;
2622 context()->PrepareTest(&materialize_true, &materialize_false,
2623 &if_true, &if_false, &fall_through);
2625 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2627 Split(
eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
2629 context()->Plug(if_true, if_false);
2633 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2634 ZoneList<Expression*>* args = expr->arguments();
2635 ASSERT(args->length() == 1);
2637 VisitForAccumulatorValue(args->at(0));
2639 Label materialize_true, materialize_false;
2640 Label* if_true =
NULL;
2641 Label* if_false =
NULL;
2642 Label* fall_through =
NULL;
2643 context()->PrepareTest(&materialize_true, &materialize_false,
2644 &if_true, &if_false, &fall_through);
2646 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2648 Split(
eq, at, Operand(zero_reg), if_true, if_false, fall_through);
2650 context()->Plug(if_true, if_false);
2654 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2655 ZoneList<Expression*>* args = expr->arguments();
2656 ASSERT(args->length() == 1);
2658 VisitForAccumulatorValue(args->at(0));
2660 Label materialize_true, materialize_false;
2661 Label* if_true =
NULL;
2662 Label* if_false =
NULL;
2663 Label* fall_through =
NULL;
2664 context()->PrepareTest(&materialize_true, &materialize_false,
2665 &if_true, &if_false, &fall_through);
2667 __ JumpIfSmi(v0, if_false);
2668 __ LoadRoot(at, Heap::kNullValueRootIndex);
2669 __ Branch(if_true,
eq, v0, Operand(at));
2674 __ Branch(if_false,
ne, at, Operand(zero_reg));
2677 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2679 if_true, if_false, fall_through);
2681 context()->Plug(if_true, if_false);
2685 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2686 ZoneList<Expression*>* args = expr->arguments();
2687 ASSERT(args->length() == 1);
2689 VisitForAccumulatorValue(args->at(0));
2691 Label materialize_true, materialize_false;
2692 Label* if_true =
NULL;
2693 Label* if_false =
NULL;
2694 Label* fall_through =
NULL;
2695 context()->PrepareTest(&materialize_true, &materialize_false,
2696 &if_true, &if_false, &fall_through);
2698 __ JumpIfSmi(v0, if_false);
2699 __ GetObjectType(v0, a1, a1);
2700 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2702 if_true, if_false, fall_through);
2704 context()->Plug(if_true, if_false);
2708 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2709 ZoneList<Expression*>* args = expr->arguments();
2710 ASSERT(args->length() == 1);
2712 VisitForAccumulatorValue(args->at(0));
2714 Label materialize_true, materialize_false;
2715 Label* if_true =
NULL;
2716 Label* if_false =
NULL;
2717 Label* fall_through =
NULL;
2718 context()->PrepareTest(&materialize_true, &materialize_false,
2719 &if_true, &if_false, &fall_through);
2721 __ JumpIfSmi(v0, if_false);
2725 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2726 Split(
ne, at, Operand(zero_reg), if_true, if_false, fall_through);
2728 context()->Plug(if_true, if_false);
2732 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2733 CallRuntime* expr) {
2734 ZoneList<Expression*>* args = expr->arguments();
2735 ASSERT(args->length() == 1);
2737 VisitForAccumulatorValue(args->at(0));
2739 Label materialize_true, materialize_false;
2740 Label* if_true =
NULL;
2741 Label* if_false =
NULL;
2742 Label* fall_through =
NULL;
2743 context()->PrepareTest(&materialize_true, &materialize_false,
2744 &if_true, &if_false, &fall_through);
2746 if (FLAG_debug_code)
__ AbortIfSmi(v0);
2751 __ Branch(if_true,
ne, t0, Operand(zero_reg));
2756 __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
2757 __ Branch(if_false,
eq, a2, Operand(t0));
2762 __ LoadInstanceDescriptors(a1, t0);
2772 __ Addu(a2, a2, t1);
2784 __ LoadRoot(t2, Heap::kvalue_of_symbolRootIndex);
2788 __ Branch(if_false,
eq, a3, Operand(t2));
2789 __ Addu(t0, t0, Operand(kPointerSize));
2791 __ Branch(&loop,
ne, t0, Operand(a2));
2796 __ JumpIfSmi(a2, if_false);
2801 __ Branch(if_false,
ne, a2, Operand(a3));
2810 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2811 context()->Plug(if_true, if_false);
2815 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2816 ZoneList<Expression*>* args = expr->arguments();
2817 ASSERT(args->length() == 1);
2819 VisitForAccumulatorValue(args->at(0));
2821 Label materialize_true, materialize_false;
2822 Label* if_true =
NULL;
2823 Label* if_false =
NULL;
2824 Label* fall_through =
NULL;
2825 context()->PrepareTest(&materialize_true, &materialize_false,
2826 &if_true, &if_false, &fall_through);
2828 __ JumpIfSmi(v0, if_false);
2829 __ GetObjectType(v0, a1, a2);
2830 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2832 __ Branch(if_false);
2834 context()->Plug(if_true, if_false);
2838 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2839 ZoneList<Expression*>* args = expr->arguments();
2840 ASSERT(args->length() == 1);
2842 VisitForAccumulatorValue(args->at(0));
2844 Label materialize_true, materialize_false;
2845 Label* if_true =
NULL;
2846 Label* if_false =
NULL;
2847 Label* fall_through =
NULL;
2848 context()->PrepareTest(&materialize_true, &materialize_false,
2849 &if_true, &if_false, &fall_through);
2851 __ JumpIfSmi(v0, if_false);
2852 __ GetObjectType(v0, a1, a1);
2853 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2855 if_true, if_false, fall_through);
2857 context()->Plug(if_true, if_false);
2861 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2862 ZoneList<Expression*>* args = expr->arguments();
2863 ASSERT(args->length() == 1);
2865 VisitForAccumulatorValue(args->at(0));
2867 Label materialize_true, materialize_false;
2868 Label* if_true =
NULL;
2869 Label* if_false =
NULL;
2870 Label* fall_through =
NULL;
2871 context()->PrepareTest(&materialize_true, &materialize_false,
2872 &if_true, &if_false, &fall_through);
2874 __ JumpIfSmi(v0, if_false);
2875 __ GetObjectType(v0, a1, a1);
2876 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2879 context()->Plug(if_true, if_false);
2883 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2884 ASSERT(expr->arguments()->length() == 0);
2886 Label materialize_true, materialize_false;
2887 Label* if_true =
NULL;
2888 Label* if_false =
NULL;
2889 Label* fall_through =
NULL;
2890 context()->PrepareTest(&materialize_true, &materialize_false,
2891 &if_true, &if_false, &fall_through);
2897 Label check_frame_marker;
2899 __ Branch(&check_frame_marker,
ne,
2904 __ bind(&check_frame_marker);
2906 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2908 if_true, if_false, fall_through);
2910 context()->Plug(if_true, if_false);
2914 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2915 ZoneList<Expression*>* args = expr->arguments();
2916 ASSERT(args->length() == 2);
2919 VisitForStackValue(args->at(0));
2920 VisitForAccumulatorValue(args->at(1));
2922 Label materialize_true, materialize_false;
2923 Label* if_true =
NULL;
2924 Label* if_false =
NULL;
2925 Label* fall_through =
NULL;
2926 context()->PrepareTest(&materialize_true, &materialize_false,
2927 &if_true, &if_false, &fall_through);
2930 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2931 Split(
eq, v0, Operand(a1), if_true, if_false, fall_through);
2933 context()->Plug(if_true, if_false);
2937 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2938 ZoneList<Expression*>* args = expr->arguments();
2939 ASSERT(args->length() == 1);
2943 VisitForAccumulatorValue(args->at(0));
2945 __ li(a0, Operand(
Smi::FromInt(info_->scope()->num_parameters())));
2948 context()->Plug(v0);
2952 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2953 ASSERT(expr->arguments()->length() == 0);
2956 __ li(v0, Operand(
Smi::FromInt(info_->scope()->num_parameters())));
2961 __ Branch(&exit,
ne, a3,
2969 context()->Plug(v0);
2973 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2974 ZoneList<Expression*>* args = expr->arguments();
2975 ASSERT(args->length() == 1);
2976 Label done, null,
function, non_function_constructor;
2978 VisitForAccumulatorValue(args->at(0));
2981 __ JumpIfSmi(v0, &null);
2988 __ GetObjectType(v0, v0, a1);
3003 __ GetObjectType(v0, a1, a1);
3014 __ LoadRoot(v0, Heap::kfunction_class_symbolRootIndex);
3018 __ bind(&non_function_constructor);
3019 __ LoadRoot(v0, Heap::kObject_symbolRootIndex);
3024 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3029 context()->Plug(v0);
3033 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3041 ZoneList<Expression*>* args = expr->arguments();
3044 VisitForStackValue(args->at(1));
3045 VisitForStackValue(args->at(2));
3046 __ CallRuntime(Runtime::kLog, 2);
3050 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3051 context()->Plug(v0);
3055 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
3056 ASSERT(expr->arguments()->length() == 0);
3057 Label slow_allocate_heapnumber;
3058 Label heapnumber_allocated;
3062 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3063 __ AllocateHeapNumber(
s0, a1, a2, t6, &slow_allocate_heapnumber);
3064 __ jmp(&heapnumber_allocated);
3066 __ bind(&slow_allocate_heapnumber);
3069 __ CallRuntime(Runtime::kNumberAlloc, 0);
3072 __ bind(&heapnumber_allocated);
3078 __ PrepareCallCFunction(1, a0);
3081 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3083 CpuFeatures::Scope scope(
FPU);
3085 __ li(a1, Operand(0x41300000));
3087 __ Move(
f12, v0, a1);
3089 __ Move(
f14, zero_reg, a1);
3095 __ PrepareCallCFunction(2, a0);
3100 ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
3103 context()->Plug(v0);
3107 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3110 ZoneList<Expression*>* args = expr->arguments();
3111 ASSERT(args->length() == 3);
3112 VisitForStackValue(args->at(0));
3113 VisitForStackValue(args->at(1));
3114 VisitForStackValue(args->at(2));
3116 context()->Plug(v0);
3120 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3122 RegExpExecStub stub;
3123 ZoneList<Expression*>* args = expr->arguments();
3124 ASSERT(args->length() == 4);
3125 VisitForStackValue(args->at(0));
3126 VisitForStackValue(args->at(1));
3127 VisitForStackValue(args->at(2));
3128 VisitForStackValue(args->at(3));
3130 context()->Plug(v0);
3134 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3135 ZoneList<Expression*>* args = expr->arguments();
3136 ASSERT(args->length() == 1);
3138 VisitForAccumulatorValue(args->at(0));
3142 __ JumpIfSmi(v0, &done);
3144 __ GetObjectType(v0, a1, a1);
3150 context()->Plug(v0);
3154 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3155 ZoneList<Expression*>* args = expr->arguments();
3156 ASSERT(args->length() == 2);
3158 Smi* index =
Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3160 VisitForAccumulatorValue(args->at(0));
3162 Label runtime, done;
3163 Register
object = v0;
3164 Register result = v0;
3165 Register scratch0 = t5;
3166 Register scratch1 = a1;
3169 __ AbortIfSmi(
object);
3170 __ GetObjectType(
object, scratch1, scratch1);
3171 __ Assert(
eq,
"Trying to get date field from non-date.",
3175 if (index->value() == 0) {
3179 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3180 __ li(scratch1, Operand(stamp));
3183 __ Branch(&runtime,
ne, scratch1, Operand(scratch0));
3185 kPointerSize * index->value()));
3189 __ PrepareCallCFunction(2, scratch1);
3190 __ li(a1, Operand(index));
3191 __ Move(a0,
object);
3192 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3196 context()->Plug(v0);
3200 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3202 ZoneList<Expression*>* args = expr->arguments();
3203 ASSERT(args->length() == 2);
3204 VisitForStackValue(args->at(0));
3205 VisitForStackValue(args->at(1));
3210 __ CallRuntime(Runtime::kMath_pow, 2);
3212 context()->Plug(v0);
3216 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3217 ZoneList<Expression*>* args = expr->arguments();
3218 ASSERT(args->length() == 2);
3220 VisitForStackValue(args->at(0));
3221 VisitForAccumulatorValue(args->at(1));
3226 __ JumpIfSmi(a1, &done);
3229 __ GetObjectType(a1, a2, a2);
3237 __ RecordWriteField(
3241 context()->Plug(v0);
3245 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3246 ZoneList<Expression*>* args = expr->arguments();
3250 VisitForStackValue(args->at(0));
3252 NumberToStringStub stub;
3254 context()->Plug(v0);
3258 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3259 ZoneList<Expression*>* args = expr->arguments();
3260 ASSERT(args->length() == 1);
3262 VisitForAccumulatorValue(args->at(0));
3265 StringCharFromCodeGenerator generator(v0, a1);
3266 generator.GenerateFast(masm_);
3269 NopRuntimeCallHelper call_helper;
3270 generator.GenerateSlow(masm_, call_helper);
3273 context()->Plug(a1);
3277 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3278 ZoneList<Expression*>* args = expr->arguments();
3279 ASSERT(args->length() == 2);
3281 VisitForStackValue(args->at(0));
3282 VisitForAccumulatorValue(args->at(1));
3283 __ mov(a0, result_register());
3285 Register
object = a1;
3286 Register index = a0;
3287 Register result = v0;
3291 Label need_conversion;
3292 Label index_out_of_range;
3294 StringCharCodeAtGenerator generator(
object,
3299 &index_out_of_range,
3301 generator.GenerateFast(masm_);
3304 __ bind(&index_out_of_range);
3307 __ LoadRoot(result, Heap::kNanValueRootIndex);
3310 __ bind(&need_conversion);
3313 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3316 NopRuntimeCallHelper call_helper;
3317 generator.GenerateSlow(masm_, call_helper);
3320 context()->Plug(result);
3324 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3325 ZoneList<Expression*>* args = expr->arguments();
3326 ASSERT(args->length() == 2);
3328 VisitForStackValue(args->at(0));
3329 VisitForAccumulatorValue(args->at(1));
3330 __ mov(a0, result_register());
3332 Register
object = a1;
3333 Register index = a0;
3334 Register scratch = a3;
3335 Register result = v0;
3339 Label need_conversion;
3340 Label index_out_of_range;
3342 StringCharAtGenerator generator(
object,
3348 &index_out_of_range,
3350 generator.GenerateFast(masm_);
3353 __ bind(&index_out_of_range);
3356 __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3359 __ bind(&need_conversion);
3365 NopRuntimeCallHelper call_helper;
3366 generator.GenerateSlow(masm_, call_helper);
3369 context()->Plug(result);
3373 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3374 ZoneList<Expression*>* args = expr->arguments();
3376 VisitForStackValue(args->at(0));
3377 VisitForStackValue(args->at(1));
3381 context()->Plug(v0);
3385 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3386 ZoneList<Expression*>* args = expr->arguments();
3389 VisitForStackValue(args->at(0));
3390 VisitForStackValue(args->at(1));
3392 StringCompareStub stub;
3394 context()->Plug(v0);
3398 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3402 ZoneList<Expression*>* args = expr->arguments();
3403 ASSERT(args->length() == 1);
3404 VisitForStackValue(args->at(0));
3405 __ mov(a0, result_register());
3407 context()->Plug(v0);
3411 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3415 ZoneList<Expression*>* args = expr->arguments();
3416 ASSERT(args->length() == 1);
3417 VisitForStackValue(args->at(0));
3418 __ mov(a0, result_register());
3420 context()->Plug(v0);
3424 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3428 ZoneList<Expression*>* args = expr->arguments();
3429 ASSERT(args->length() == 1);
3430 VisitForStackValue(args->at(0));
3431 __ mov(a0, result_register());
3433 context()->Plug(v0);
3437 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3441 ZoneList<Expression*>* args = expr->arguments();
3442 ASSERT(args->length() == 1);
3443 VisitForStackValue(args->at(0));
3444 __ mov(a0, result_register());
3446 context()->Plug(v0);
3450 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3452 ZoneList<Expression*>* args = expr->arguments();
3453 ASSERT(args->length() == 1);
3454 VisitForStackValue(args->at(0));
3455 __ CallRuntime(Runtime::kMath_sqrt, 1);
3456 context()->Plug(v0);
3460 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3461 ZoneList<Expression*>* args = expr->arguments();
3462 ASSERT(args->length() >= 2);
3464 int arg_count = args->length() - 2;
3465 for (
int i = 0; i < arg_count + 1; i++) {
3466 VisitForStackValue(args->at(i));
3468 VisitForAccumulatorValue(args->last());
3472 __ GetObjectType(v0, a1, a1);
3476 __ mov(a1, result_register());
3477 ParameterCount count(arg_count);
3485 __ CallRuntime(Runtime::kCall, args->length());
3488 context()->Plug(v0);
3492 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3493 RegExpConstructResultStub stub;
3494 ZoneList<Expression*>* args = expr->arguments();
3495 ASSERT(args->length() == 3);
3496 VisitForStackValue(args->at(0));
3497 VisitForStackValue(args->at(1));
3498 VisitForStackValue(args->at(2));
3500 context()->Plug(v0);
3504 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3505 ZoneList<Expression*>* args = expr->arguments();
3509 int cache_id =
Smi::cast(*(args->at(0)->AsLiteral()->handle()))->
value();
3511 Handle<FixedArray> jsfunction_result_caches(
3512 isolate()->global_context()->jsfunction_result_caches());
3513 if (jsfunction_result_caches->length() <= cache_id) {
3514 __ Abort(
"Attempt to use undefined cache.");
3515 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3516 context()->Plug(v0);
3520 VisitForAccumulatorValue(args->at(1));
3523 Register cache = a1;
3533 Label done, not_found;
3540 __ addu(a3, a3, at);
3543 __ Branch(¬_found,
ne, key, Operand(a2));
3548 __ bind(¬_found);
3550 __ Push(cache, key);
3551 __ CallRuntime(Runtime::kGetFromCache, 2);
3554 context()->Plug(v0);
3558 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3559 ZoneList<Expression*>* args = expr->arguments();
3562 Register right = v0;
3567 VisitForStackValue(args->at(0));
3568 VisitForAccumulatorValue(args->at(1));
3571 Label done, fail, ok;
3572 __ Branch(&ok,
eq, left, Operand(right));
3574 __ And(tmp, left, Operand(right));
3575 __ JumpIfSmi(tmp, &fail);
3580 __ Branch(&fail,
ne, tmp, Operand(tmp2));
3583 __ Branch(&ok,
eq, tmp, Operand(tmp2));
3585 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3588 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3591 context()->Plug(v0);
3595 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3596 ZoneList<Expression*>* args = expr->arguments();
3597 VisitForAccumulatorValue(args->at(0));
3599 Label materialize_true, materialize_false;
3600 Label* if_true =
NULL;
3601 Label* if_false =
NULL;
3602 Label* fall_through =
NULL;
3603 context()->PrepareTest(&materialize_true, &materialize_false,
3604 &if_true, &if_false, &fall_through);
3609 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3610 Split(
eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3612 context()->Plug(if_true, if_false);
3616 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3617 ZoneList<Expression*>* args = expr->arguments();
3618 ASSERT(args->length() == 1);
3619 VisitForAccumulatorValue(args->at(0));
3621 if (FLAG_debug_code) {
3622 __ AbortIfNotString(v0);
3626 __ IndexFromHash(v0, v0);
3628 context()->Plug(v0);
3632 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3633 Label bailout, done, one_char_separator, long_separator,
3634 non_trivial_array, not_size_one_array, loop,
3635 empty_separator_loop, one_char_separator_loop,
3636 one_char_separator_loop_entry, long_separator_loop;
3637 ZoneList<Expression*>* args = expr->arguments();
3638 ASSERT(args->length() == 2);
3639 VisitForStackValue(args->at(1));
3640 VisitForAccumulatorValue(args->at(0));
3643 Register array = v0;
3644 Register elements =
no_reg;
3645 Register result =
no_reg;
3646 Register separator = a1;
3647 Register array_length = a2;
3648 Register result_pos =
no_reg;
3649 Register string_length = a3;
3650 Register
string = t0;
3651 Register element = t1;
3652 Register elements_end = t2;
3653 Register scratch1 = t3;
3654 Register scratch2 = t5;
3655 Register scratch3 = t4;
3661 __ JumpIfSmi(array, &bailout);
3662 __ GetObjectType(array, scratch1, scratch2);
3666 __ CheckFastElements(scratch1, scratch2, &bailout);
3670 __ SmiUntag(array_length);
3671 __ Branch(&non_trivial_array,
ne, array_length, Operand(zero_reg));
3672 __ LoadRoot(v0, Heap::kEmptyStringRootIndex);
3675 __ bind(&non_trivial_array);
3684 __ mov(string_length, zero_reg);
3688 __ Addu(elements_end, element, elements_end);
3697 if (FLAG_debug_code) {
3698 __ Assert(
gt,
"No empty arrays here in EmitFastAsciiArrayJoin",
3699 array_length, Operand(zero_reg));
3703 __ Addu(element, element, kPointerSize);
3704 __ JumpIfSmi(
string, &bailout);
3707 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3709 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
3710 __ BranchOnOverflow(&bailout, scratch3);
3711 __ Branch(&loop,
lt, element, Operand(elements_end));
3714 __ Branch(¬_size_one_array,
ne, array_length, Operand(1));
3718 __ bind(¬_size_one_array);
3727 __ JumpIfSmi(separator, &bailout);
3730 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3736 __ Subu(string_length, string_length, Operand(scratch1));
3737 __ Mult(array_length, scratch1);
3741 __ Branch(&bailout,
ne, scratch2, Operand(zero_reg));
3743 __ And(scratch3, scratch2, Operand(0x80000000));
3744 __ Branch(&bailout,
ne, scratch3, Operand(zero_reg));
3745 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
3746 __ BranchOnOverflow(&bailout, scratch3);
3747 __ SmiUntag(string_length);
3760 __ AllocateAsciiString(result,
3770 __ Addu(elements_end, element, elements_end);
3771 result_pos = array_length;
3780 __ Branch(&one_char_separator,
eq, scratch1, Operand(at));
3781 __ Branch(&long_separator,
gt, scratch1, Operand(at));
3784 __ bind(&empty_separator_loop);
3792 __ Addu(element, element, kPointerSize);
3794 __ SmiUntag(string_length);
3796 __ CopyBytes(
string, result_pos, string_length, scratch1);
3798 __ Branch(&empty_separator_loop,
lt, element, Operand(elements_end));
3803 __ bind(&one_char_separator);
3808 __ jmp(&one_char_separator_loop_entry);
3810 __ bind(&one_char_separator_loop);
3819 __ Addu(result_pos, result_pos, 1);
3822 __ bind(&one_char_separator_loop_entry);
3824 __ Addu(element, element, kPointerSize);
3826 __ SmiUntag(string_length);
3828 __ CopyBytes(
string, result_pos, string_length, scratch1);
3830 __ Branch(&one_char_separator_loop,
lt, element, Operand(elements_end));
3836 __ bind(&long_separator_loop);
3845 __ SmiUntag(string_length);
3849 __ CopyBytes(
string, result_pos, string_length, scratch1);
3851 __ bind(&long_separator);
3853 __ Addu(element, element, kPointerSize);
3855 __ SmiUntag(string_length);
3857 __ CopyBytes(
string, result_pos, string_length, scratch1);
3859 __ Branch(&long_separator_loop,
lt, element, Operand(elements_end));
3864 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3866 context()->Plug(v0);
3870 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3871 Handle<String> name = expr->name();
3872 if (name->length() > 0 && name->Get(0) ==
'_') {
3873 Comment cmnt(masm_,
"[ InlineRuntimeCall");
3874 EmitInlineRuntimeCall(expr);
3878 Comment cmnt(masm_,
"[ CallRuntime");
3879 ZoneList<Expression*>* args = expr->arguments();
3881 if (expr->is_jsruntime()) {
3889 int arg_count = args->length();
3890 for (
int i = 0; i < arg_count; i++) {
3891 VisitForStackValue(args->at(i));
3894 if (expr->is_jsruntime()) {
3896 __ li(a2, Operand(expr->name()));
3897 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3899 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3900 CallIC(ic, mode, expr->id());
3905 __ CallRuntime(expr->function(), arg_count);
3907 context()->Plug(v0);
3911 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3912 switch (expr->op()) {
3913 case Token::DELETE: {
3914 Comment cmnt(masm_,
"[ UnaryOperation (DELETE)");
3915 Property*
property = expr->expression()->AsProperty();
3916 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3918 if (property !=
NULL) {
3919 VisitForStackValue(property->obj());
3920 VisitForStackValue(property->key());
3926 context()->Plug(v0);
3927 }
else if (proxy !=
NULL) {
3928 Variable* var = proxy->var();
3932 if (var->IsUnallocated()) {
3934 __ li(a1, Operand(var->name()));
3936 __ Push(a2, a1, a0);
3938 context()->Plug(v0);
3939 }
else if (var->IsStackAllocated() || var->IsContextSlot()) {
3942 context()->Plug(var->is_this());
3946 __ push(context_register());
3947 __ li(a2, Operand(var->name()));
3949 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3950 context()->Plug(v0);
3955 VisitForEffect(expr->expression());
3956 context()->Plug(
true);
3962 Comment cmnt(masm_,
"[ UnaryOperation (VOID)");
3963 VisitForEffect(expr->expression());
3964 context()->Plug(Heap::kUndefinedValueRootIndex);
3969 Comment cmnt(masm_,
"[ UnaryOperation (NOT)");
3970 if (context()->IsEffect()) {
3973 VisitForEffect(expr->expression());
3974 }
else if (context()->IsTest()) {
3977 VisitForControl(expr->expression(),
3978 test->false_label(),
3980 test->fall_through());
3981 context()->Plug(test->true_label(), test->false_label());
3987 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3988 Label materialize_true, materialize_false, done;
3989 VisitForControl(expr->expression(),
3993 __ bind(&materialize_true);
3994 PrepareForBailoutForId(expr->MaterializeTrueId(),
NO_REGISTERS);
3995 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3996 if (context()->IsStackValue())
__ push(v0);
3998 __ bind(&materialize_false);
3999 PrepareForBailoutForId(expr->MaterializeFalseId(),
NO_REGISTERS);
4000 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4001 if (context()->IsStackValue())
__ push(v0);
4007 case Token::TYPEOF: {
4008 Comment cmnt(masm_,
"[ UnaryOperation (TYPEOF)");
4009 { StackValueContext context(
this);
4010 VisitForTypeofValue(expr->expression());
4012 __ CallRuntime(Runtime::kTypeof, 1);
4013 context()->Plug(v0);
4018 Comment cmt(masm_,
"[ UnaryOperation (ADD)");
4019 VisitForAccumulatorValue(expr->expression());
4020 Label no_conversion;
4021 __ JumpIfSmi(result_register(), &no_conversion);
4022 __ mov(a0, result_register());
4023 ToNumberStub convert_stub;
4024 __ CallStub(&convert_stub);
4025 __ bind(&no_conversion);
4026 context()->Plug(result_register());
4031 EmitUnaryOperation(expr,
"[ UnaryOperation (SUB)");
4034 case Token::BIT_NOT:
4035 EmitUnaryOperation(expr,
"[ UnaryOperation (BIT_NOT)");
4044 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
4047 Comment cmt(masm_, comment);
4048 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
4051 UnaryOpStub stub(expr->op(), overwrite);
4053 VisitForAccumulatorValue(expr->expression());
4054 SetSourcePosition(expr->position());
4055 __ mov(a0, result_register());
4056 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
4057 context()->Plug(v0);
4061 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4062 Comment cmnt(masm_,
"[ CountOperation");
4063 SetSourcePosition(expr->position());
4067 if (!expr->expression()->IsValidLeftHandSide()) {
4068 VisitForEffect(expr->expression());
4074 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4075 LhsKind assign_type = VARIABLE;
4076 Property* prop = expr->expression()->AsProperty();
4081 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4085 if (assign_type == VARIABLE) {
4086 ASSERT(expr->expression()->AsVariableProxy()->var() !=
NULL);
4087 AccumulatorValueContext context(
this);
4088 EmitVariableLoad(expr->expression()->AsVariableProxy());
4091 if (expr->is_postfix() && !context()->IsEffect()) {
4095 if (assign_type == NAMED_PROPERTY) {
4097 VisitForAccumulatorValue(prop->obj());
4099 EmitNamedPropertyLoad(prop);
4101 VisitForStackValue(prop->obj());
4102 VisitForAccumulatorValue(prop->key());
4105 EmitKeyedPropertyLoad(prop);
4111 if (assign_type == VARIABLE) {
4112 PrepareForBailout(expr->expression(),
TOS_REG);
4114 PrepareForBailoutForId(expr->CountId(),
TOS_REG);
4118 Label no_conversion;
4119 __ JumpIfSmi(v0, &no_conversion);
4121 ToNumberStub convert_stub;
4122 __ CallStub(&convert_stub);
4123 __ bind(&no_conversion);
4126 if (expr->is_postfix()) {
4127 if (!context()->IsEffect()) {
4131 switch (assign_type) {
4135 case NAMED_PROPERTY:
4138 case KEYED_PROPERTY:
4144 __ mov(a0, result_register());
4147 Label stub_call, done;
4148 JumpPatchSite patch_site(masm_);
4150 int count_value = expr->op() == Token::INC ? 1 : -1;
4153 if (ShouldInlineSmiCase(expr->op())) {
4154 __ AdduAndCheckForOverflow(v0, a0, a1, t0);
4155 __ BranchOnOverflow(&stub_call, t0);
4159 patch_site.EmitJumpIfSmi(v0, &done);
4160 __ bind(&stub_call);
4164 SetSourcePosition(expr->position());
4167 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
4168 patch_site.EmitPatchInfo();
4172 switch (assign_type) {
4174 if (expr->is_postfix()) {
4175 { EffectContext context(
this);
4176 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4178 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4183 if (!context()->IsEffect()) {
4184 context()->PlugTOS();
4187 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4189 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4190 context()->Plug(v0);
4193 case NAMED_PROPERTY: {
4194 __ mov(a0, result_register());
4195 __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
4197 Handle<Code> ic = is_classic_mode()
4198 ? isolate()->builtins()->StoreIC_Initialize()
4199 : isolate()->builtins()->StoreIC_Initialize_Strict();
4200 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4201 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4202 if (expr->is_postfix()) {
4203 if (!context()->IsEffect()) {
4204 context()->PlugTOS();
4207 context()->Plug(v0);
4211 case KEYED_PROPERTY: {
4212 __ mov(a0, result_register());
4215 Handle<Code> ic = is_classic_mode()
4216 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4217 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4218 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4219 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4220 if (expr->is_postfix()) {
4221 if (!context()->IsEffect()) {
4222 context()->PlugTOS();
4225 context()->Plug(v0);
4233 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4234 ASSERT(!context()->IsEffect());
4235 ASSERT(!context()->IsTest());
4236 VariableProxy* proxy = expr->AsVariableProxy();
4237 if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
4238 Comment cmnt(masm_,
"Global variable");
4240 __ li(a2, Operand(proxy->name()));
4241 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4245 PrepareForBailout(expr,
TOS_REG);
4246 context()->Plug(v0);
4247 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
4252 EmitDynamicLookupFastCase(proxy->var(),
INSIDE_TYPEOF, &slow, &done);
4255 __ li(a0, Operand(proxy->name()));
4257 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4258 PrepareForBailout(expr,
TOS_REG);
4261 context()->Plug(v0);
4264 VisitInDuplicateContext(expr);
4268 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4269 Expression* sub_expr,
4270 Handle<String>
check) {
4271 Label materialize_true, materialize_false;
4272 Label* if_true =
NULL;
4273 Label* if_false =
NULL;
4274 Label* fall_through =
NULL;
4275 context()->PrepareTest(&materialize_true, &materialize_false,
4276 &if_true, &if_false, &fall_through);
4278 { AccumulatorValueContext context(
this);
4279 VisitForTypeofValue(sub_expr);
4281 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4283 if (check->Equals(isolate()->heap()->number_symbol())) {
4284 __ JumpIfSmi(v0, if_true);
4286 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4287 Split(
eq, v0, Operand(at), if_true, if_false, fall_through);
4288 }
else if (check->Equals(isolate()->heap()->string_symbol())) {
4289 __ JumpIfSmi(v0, if_false);
4291 __ GetObjectType(v0, v0, a1);
4295 Split(
eq, a1, Operand(zero_reg),
4296 if_true, if_false, fall_through);
4297 }
else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4298 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4299 __ Branch(if_true,
eq, v0, Operand(at));
4300 __ LoadRoot(at, Heap::kFalseValueRootIndex);
4301 Split(
eq, v0, Operand(at), if_true, if_false, fall_through);
4302 }
else if (FLAG_harmony_typeof &&
4303 check->Equals(isolate()->heap()->null_symbol())) {
4304 __ LoadRoot(at, Heap::kNullValueRootIndex);
4305 Split(
eq, v0, Operand(at), if_true, if_false, fall_through);
4306 }
else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4307 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4308 __ Branch(if_true,
eq, v0, Operand(at));
4309 __ JumpIfSmi(v0, if_false);
4314 Split(
ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4315 }
else if (check->Equals(isolate()->heap()->function_symbol())) {
4316 __ JumpIfSmi(v0, if_false);
4318 __ GetObjectType(v0, v0, a1);
4321 if_true, if_false, fall_through);
4322 }
else if (check->Equals(isolate()->heap()->object_symbol())) {
4323 __ JumpIfSmi(v0, if_false);
4324 if (!FLAG_harmony_typeof) {
4325 __ LoadRoot(at, Heap::kNullValueRootIndex);
4326 __ Branch(if_true,
eq, v0, Operand(at));
4329 __ GetObjectType(v0, v0, a1);
4336 Split(
eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4338 if (if_false != fall_through)
__ jmp(if_false);
4340 context()->Plug(if_true, if_false);
4344 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4345 Comment cmnt(masm_,
"[ CompareOperation");
4346 SetSourcePosition(expr->position());
4350 if (TryLiteralCompare(expr))
return;
4354 Label materialize_true, materialize_false;
4355 Label* if_true =
NULL;
4356 Label* if_false =
NULL;
4357 Label* fall_through =
NULL;
4358 context()->PrepareTest(&materialize_true, &materialize_false,
4359 &if_true, &if_false, &fall_through);
4362 VisitForStackValue(expr->left());
4365 VisitForStackValue(expr->right());
4367 PrepareForBailoutBeforeSplit(expr,
false,
NULL,
NULL);
4368 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
4369 Split(
eq, v0, Operand(t0), if_true, if_false, fall_through);
4372 case Token::INSTANCEOF: {
4373 VisitForStackValue(expr->right());
4376 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4378 Split(
eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
4383 VisitForAccumulatorValue(expr->right());
4386 case Token::EQ_STRICT:
4403 case Token::INSTANCEOF:
4407 __ mov(a0, result_register());
4410 bool inline_smi_code = ShouldInlineSmiCase(op);
4411 JumpPatchSite patch_site(masm_);
4412 if (inline_smi_code) {
4414 __ Or(a2, a0, Operand(a1));
4415 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4416 Split(cc, a1, Operand(a0), if_true, if_false,
NULL);
4417 __ bind(&slow_case);
4420 SetSourcePosition(expr->position());
4422 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4423 patch_site.EmitPatchInfo();
4424 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4425 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4431 context()->Plug(if_true, if_false);
4435 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4436 Expression* sub_expr,
4438 Label materialize_true, materialize_false;
4439 Label* if_true =
NULL;
4440 Label* if_false =
NULL;
4441 Label* fall_through =
NULL;
4442 context()->PrepareTest(&materialize_true, &materialize_false,
4443 &if_true, &if_false, &fall_through);
4445 VisitForAccumulatorValue(sub_expr);
4446 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4448 Heap::kNullValueRootIndex :
4449 Heap::kUndefinedValueRootIndex;
4450 __ mov(a0, result_register());
4451 __ LoadRoot(a1, nil_value);
4452 if (expr->op() == Token::EQ_STRICT) {
4453 Split(
eq, a0, Operand(a1), if_true, if_false, fall_through);
4456 Heap::kUndefinedValueRootIndex :
4457 Heap::kNullValueRootIndex;
4458 __ Branch(if_true,
eq, a0, Operand(a1));
4459 __ LoadRoot(a1, other_nil_value);
4460 __ Branch(if_true,
eq, a0, Operand(a1));
4461 __ JumpIfSmi(a0, if_false);
4466 Split(
ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4468 context()->Plug(if_true, if_false);
4472 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4474 context()->Plug(v0);
4478 Register FullCodeGenerator::result_register() {
4483 Register FullCodeGenerator::context_register() {
4488 void FullCodeGenerator::StoreToFrameField(
int frame_offset, Register value) {
4494 void FullCodeGenerator::LoadContextField(Register dst,
int context_index) {
4499 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4501 if (declaration_scope->is_global_scope() ||
4502 declaration_scope->is_module_scope()) {
4508 }
else if (declaration_scope->is_eval_scope()) {
4514 ASSERT(declaration_scope->is_function_scope());
4524 void FullCodeGenerator::EnterFinallyBlock() {
4525 ASSERT(!result_register().is(a1));
4527 __ push(result_register());
4532 __ Addu(a1, a1, Operand(a1));
4538 ExternalReference pending_message_obj =
4539 ExternalReference::address_of_pending_message_obj(isolate());
4540 __ li(at, Operand(pending_message_obj));
4544 ExternalReference has_pending_message =
4545 ExternalReference::address_of_has_pending_message(isolate());
4546 __ li(at, Operand(has_pending_message));
4550 ExternalReference pending_message_script =
4551 ExternalReference::address_of_pending_message_script(isolate());
4552 __ li(at, Operand(pending_message_script));
4558 void FullCodeGenerator::ExitFinallyBlock() {
4559 ASSERT(!result_register().is(a1));
4562 ExternalReference pending_message_script =
4563 ExternalReference::address_of_pending_message_script(isolate());
4564 __ li(at, Operand(pending_message_script));
4568 ExternalReference has_pending_message =
4569 ExternalReference::address_of_has_pending_message(isolate());
4570 __ li(at, Operand(has_pending_message));
4574 ExternalReference pending_message_obj =
4575 ExternalReference::address_of_pending_message_obj(isolate());
4576 __ li(at, Operand(pending_message_obj));
4583 __ pop(result_register());
4593 #define __ ACCESS_MASM(masm())
4597 int* context_length) {
4604 __ Drop(*stack_depth);
4605 if (*context_length > 0) {
4611 __ Call(finally_entry_);
4614 *context_length = 0;
4623 #endif // V8_TARGET_ARCH_MIPS
static const int kCallerFPOffset
static const int kLengthOffset
static const int kBitFieldOffset
Scope * DeclarationScope()
int InstructionsGeneratedSince(Label *label)
const intptr_t kSmiTagMask
VariableDeclaration * function() const
static int SlotOffset(int index)
static const int kFingerOffset
static const int kBuiltinsOffset
static String * cast(Object *obj)
void mov(Register rd, Register rt)
static const int kDeclarationsId
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
static const int kDataOffset
static const int kGlobalReceiverOffset
int SizeOfCodeGeneratedSince(Label *label)
Scope * outer_scope() const
static Handle< Object > UninitializedSentinel(Isolate *isolate)
static bool IsSupported(CpuFeature f)
static const int kCallerSPOffset
#define ASSERT(condition)
const int kPointerSizeLog2
static const int kInObjectFieldCount
const char * comment() const
static bool IsCompileTimeValue(Expression *expression)
#define POINTER_SIZE_ALIGN(value)
static const int kMaximumSlots
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
static const int kGlobalContextOffset
Variable * parameter(int index) const
MemOperand ContextOperand(Register context, int index)
static const int kFunctionEntryId
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
static const int kHashFieldOffset
void MultiPop(RegList regs)
static const int kLiteralsOffset
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
static const int kValueOffset
Variable * arguments() const
static const int kForInSlowCaseMarker
static const int kForInFastCaseMarker
static const int kStringWrapperSafeForDefaultValueOf
void Jump(Register target, Condition cond=al)
static const int kContextOffset
static const int kCacheStampOffset
static TestContext * cast(AstContext *context)
static const int kPropertiesOffset
static Register from_code(int code)
static const int kHeaderSize
static const int kElementsOffset
static const int kContainsCachedArrayIndexMask
friend class BlockTrampolinePoolScope
friend class NestedStatement
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
static const int kEnumerationIndexOffset
static const int kMapOffset
static const int kValueOffset
static const int kEnumCacheBridgeCacheOffset
void VisitIllegalRedeclaration(AstVisitor *visitor)
static const int kLengthOffset
MemOperand FieldMemOperand(Register object, int offset)
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
static const int kContextOffset
static const int kFunctionOffset
Handle< Object > CodeObject()
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
static const int kJSReturnSequenceInstructions
static const int kConstructorOffset
#define ASSERT_NE(v1, v2)
static const int kIsUndetectable
static bool ShouldGenerateLog(Expression *type)
static const int kMaximumClonedProperties
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static const int kPrototypeOffset
static const int kValueOffset
static const int kMarkerOffset
static const int kSharedFunctionInfoOffset
static FixedArrayBase * cast(Object *object)
static const int kLocal0Offset
static const int kMaxValue
static const int kBitField2Offset
static Handle< Code > GetUninitialized(Token::Value op)
void check(i::Vector< const char > string)
static const int kMaximumClonedLength
static const int kValueOffset
static const int kFirstIndex
static const int kInstanceTypeOffset