30 #if defined(V8_TARGET_ARCH_ARM)
48 #define __ ACCESS_MASM(masm_)
59 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
61 info_emitted_ =
false;
66 ASSERT(patch_site_.is_bound() == info_emitted_);
71 void EmitJumpIfNotSmi(Register reg, Label* target) {
72 ASSERT(!patch_site_.is_bound() && !info_emitted_);
74 __ bind(&patch_site_);
75 __ cmp(reg, Operand(reg));
81 void EmitJumpIfSmi(Register reg, Label* target) {
82 ASSERT(!patch_site_.is_bound() && !info_emitted_);
84 __ bind(&patch_site_);
85 __ cmp(reg, Operand(reg));
89 void EmitPatchInfo() {
92 if (patch_site_.is_bound()) {
93 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
95 reg.set_code(delta_to_patch_site /
kOff12Mask);
96 __ cmp_raw_immediate(reg, delta_to_patch_site %
kOff12Mask);
106 MacroAssembler* masm_;
128 void FullCodeGenerator::Generate() {
129 CompilationInfo* info = info_;
131 isolate()->factory()->NewFixedArray(
function()->handler_count(),
TENURED);
132 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
134 SetFunctionPosition(
function());
135 Comment cmnt(masm_,
"[ function compiled by full code generator");
140 if (strlen(FLAG_stop_at) > 0 &&
141 info->function()->name()->IsEqualTo(
CStrVector(FLAG_stop_at))) {
150 if (!info->is_classic_mode() || info->is_native()) {
152 __ cmp(
r5, Operand(0));
154 int receiver_offset = info->scope()->num_parameters() *
kPointerSize;
155 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
163 FrameScope frame_scope(masm_, StackFrame::MANUAL);
165 int locals_count = info->scope()->num_stack_slots();
168 if (locals_count > 0) {
171 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
176 { Comment cmnt(masm_,
"[ Allocate locals");
177 for (
int i = 0; i < locals_count; i++) {
182 bool function_in_register =
true;
186 if (heap_slots > 0) {
188 Comment cmnt(masm_,
"[ Allocate context");
190 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
191 __ Push(info->scope()->GetScopeInfo());
192 __ CallRuntime(Runtime::kNewGlobalContext, 2);
194 FastNewContextStub stub(heap_slots);
197 __ CallRuntime(Runtime::kNewFunctionContext, 1);
199 function_in_register =
false;
204 int num_parameters = info->scope()->num_parameters();
205 for (
int i = 0; i < num_parameters; i++) {
207 if (var->IsContextSlot()) {
217 __ RecordWriteContextSlot(
223 Variable* arguments = scope()->
arguments();
224 if (arguments !=
NULL) {
226 Comment cmnt(masm_,
"[ Allocate arguments object");
227 if (!function_in_register) {
234 int num_parameters = info->scope()->num_parameters();
246 if (!is_classic_mode()) {
248 }
else if (
function()->has_duplicate_parameters()) {
253 ArgumentsAccessStub stub(type);
256 SetVar(arguments,
r0,
r1,
r2);
260 __ CallRuntime(Runtime::kTraceEnter, 0);
265 if (scope()->HasIllegalRedeclaration()) {
266 Comment cmnt(masm_,
"[ Declarations");
271 { Comment cmnt(masm_,
"[ Declarations");
274 if (scope()->is_function_scope() && scope()->
function() !=
NULL) {
275 VariableDeclaration*
function = scope()->
function();
276 ASSERT(function->proxy()->var()->mode() ==
CONST ||
279 VisitVariableDeclaration(
function);
281 VisitDeclarations(scope()->declarations());
284 { Comment cmnt(masm_,
"[ Stack check");
287 __ LoadRoot(
ip, Heap::kStackLimitRootIndex);
290 PredictableCodeSizeScope predictable(masm_);
296 { Comment cmnt(masm_,
"[ Body");
297 ASSERT(loop_depth() == 0);
298 VisitStatements(
function()->body());
299 ASSERT(loop_depth() == 0);
305 { Comment cmnt(masm_,
"[ return <undefined>;");
306 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
308 EmitReturnSequence();
316 void FullCodeGenerator::ClearAccumulator() {
321 void FullCodeGenerator::EmitProfilingCounterDecrement(
int delta) {
322 __ mov(
r2, Operand(profiling_counter_));
329 void FullCodeGenerator::EmitProfilingCounterReset() {
330 int reset_value = FLAG_interrupt_budget;
335 if (isolate()->IsDebuggerActive()) {
337 reset_value = FLAG_interrupt_budget >> 4;
339 __ mov(
r2, Operand(profiling_counter_));
345 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
346 Label* back_edge_target) {
347 Comment cmnt(masm_,
"[ Stack check");
352 if (FLAG_count_based_interrupts) {
354 if (FLAG_weighted_back_edges) {
355 ASSERT(back_edge_target->is_bound());
358 Max(1, distance / kBackEdgeDistanceUnit));
360 EmitProfilingCounterDecrement(weight);
365 __ LoadRoot(
ip, Heap::kStackLimitRootIndex);
368 PredictableCodeSizeScope predictable(masm_);
376 RecordStackCheck(stmt->OsrEntryId());
378 if (FLAG_count_based_interrupts) {
379 EmitProfilingCounterReset();
387 PrepareForBailoutForId(stmt->OsrEntryId(),
NO_REGISTERS);
391 void FullCodeGenerator::EmitReturnSequence() {
392 Comment cmnt(masm_,
"[ Return sequence");
393 if (return_label_.is_bound()) {
394 __ b(&return_label_);
396 __ bind(&return_label_);
401 __ CallRuntime(Runtime::kTraceExit, 1);
403 if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
407 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
408 }
else if (FLAG_weighted_back_edges) {
411 Max(1, distance / kBackEdgeDistanceUnit));
413 EmitProfilingCounterDecrement(weight);
420 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
426 EmitProfilingCounterReset();
432 Label check_exit_codesize;
433 masm_->
bind(&check_exit_codesize);
442 PredictableCodeSizeScope predictable(masm_);
446 masm_->
add(
sp,
sp, Operand(sp_delta));
460 void FullCodeGenerator::EffectContext::Plug(Variable* var)
const {
461 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
465 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var)
const {
466 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
467 codegen()->GetVar(result_register(), var);
471 void FullCodeGenerator::StackValueContext::Plug(Variable* var)
const {
472 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
473 codegen()->GetVar(result_register(), var);
474 __ push(result_register());
478 void FullCodeGenerator::TestContext::Plug(Variable* var)
const {
479 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
481 codegen()->GetVar(result_register(), var);
482 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
483 codegen()->DoTest(
this);
491 void FullCodeGenerator::AccumulatorValueContext::Plug(
493 __ LoadRoot(result_register(), index);
497 void FullCodeGenerator::StackValueContext::Plug(
499 __ LoadRoot(result_register(), index);
500 __ push(result_register());
505 codegen()->PrepareForBailoutBeforeSplit(condition(),
509 if (index == Heap::kUndefinedValueRootIndex ||
510 index == Heap::kNullValueRootIndex ||
511 index == Heap::kFalseValueRootIndex) {
512 if (false_label_ != fall_through_)
__ b(false_label_);
513 }
else if (index == Heap::kTrueValueRootIndex) {
514 if (true_label_ != fall_through_)
__ b(true_label_);
516 __ LoadRoot(result_register(), index);
517 codegen()->DoTest(
this);
522 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit)
const {
526 void FullCodeGenerator::AccumulatorValueContext::Plug(
527 Handle<Object> lit)
const {
528 __ mov(result_register(), Operand(lit));
532 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit)
const {
534 __ mov(result_register(), Operand(lit));
535 __ push(result_register());
539 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit)
const {
540 codegen()->PrepareForBailoutBeforeSplit(condition(),
544 ASSERT(!lit->IsUndetectableObject());
545 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
546 if (false_label_ != fall_through_)
__ b(false_label_);
547 }
else if (lit->IsTrue() || lit->IsJSObject()) {
548 if (true_label_ != fall_through_)
__ b(true_label_);
549 }
else if (lit->IsString()) {
551 if (false_label_ != fall_through_)
__ b(false_label_);
553 if (true_label_ != fall_through_)
__ b(true_label_);
555 }
else if (lit->IsSmi()) {
557 if (false_label_ != fall_through_)
__ b(false_label_);
559 if (true_label_ != fall_through_)
__ b(true_label_);
563 __ mov(result_register(), Operand(lit));
564 codegen()->DoTest(
this);
569 void FullCodeGenerator::EffectContext::DropAndPlug(
int count,
570 Register reg)
const {
576 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
578 Register reg)
const {
581 __ Move(result_register(), reg);
585 void FullCodeGenerator::StackValueContext::DropAndPlug(
int count,
586 Register reg)
const {
588 if (count > 1)
__ Drop(count - 1);
593 void FullCodeGenerator::TestContext::DropAndPlug(
int count,
594 Register reg)
const {
598 __ Move(result_register(), reg);
599 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
600 codegen()->DoTest(
this);
604 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
605 Label* materialize_false)
const {
606 ASSERT(materialize_true == materialize_false);
607 __ bind(materialize_true);
611 void FullCodeGenerator::AccumulatorValueContext::Plug(
612 Label* materialize_true,
613 Label* materialize_false)
const {
615 __ bind(materialize_true);
616 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
618 __ bind(materialize_false);
619 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
624 void FullCodeGenerator::StackValueContext::Plug(
625 Label* materialize_true,
626 Label* materialize_false)
const {
628 __ bind(materialize_true);
629 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
632 __ bind(materialize_false);
633 __ LoadRoot(
ip, Heap::kFalseValueRootIndex);
639 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
640 Label* materialize_false)
const {
641 ASSERT(materialize_true == true_label_);
642 ASSERT(materialize_false == false_label_);
646 void FullCodeGenerator::EffectContext::Plug(
bool flag)
const {
650 void FullCodeGenerator::AccumulatorValueContext::Plug(
bool flag)
const {
652 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
653 __ LoadRoot(result_register(), value_root_index);
657 void FullCodeGenerator::StackValueContext::Plug(
bool flag)
const {
659 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
660 __ LoadRoot(
ip, value_root_index);
665 void FullCodeGenerator::TestContext::Plug(
bool flag)
const {
666 codegen()->PrepareForBailoutBeforeSplit(condition(),
671 if (true_label_ != fall_through_)
__ b(true_label_);
673 if (false_label_ != fall_through_)
__ b(false_label_);
678 void FullCodeGenerator::DoTest(Expression* condition,
681 Label* fall_through) {
682 ToBooleanStub stub(result_register());
684 __ tst(result_register(), result_register());
685 Split(
ne, if_true, if_false, fall_through);
689 void FullCodeGenerator::Split(
Condition cond,
692 Label* fall_through) {
693 if (if_false == fall_through) {
695 }
else if (if_true == fall_through) {
704 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
705 ASSERT(var->IsStackAllocated());
709 if (var->IsParameter()) {
718 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
719 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
720 if (var->IsContextSlot()) {
722 __ LoadContext(scratch, context_chain_length);
725 return StackOperand(var);
730 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
733 __ ldr(dest, location);
737 void FullCodeGenerator::SetVar(Variable* var,
741 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
742 ASSERT(!scratch0.is(src));
743 ASSERT(!scratch0.is(scratch1));
744 ASSERT(!scratch1.is(src));
745 MemOperand location = VarOperand(var, scratch0);
746 __ str(src, location);
749 if (var->IsContextSlot()) {
750 __ RecordWriteContextSlot(scratch0,
760 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
761 bool should_normalize,
770 if (should_normalize)
__ b(&skip);
771 PrepareForBailout(expr,
TOS_REG);
772 if (should_normalize) {
773 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
775 Split(
eq, if_true, if_false,
NULL);
781 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
784 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
785 if (generate_debug_code_) {
788 __ CompareRoot(
r1, Heap::kWithContextMapRootIndex);
789 __ Check(
ne,
"Declaration in with context.");
790 __ CompareRoot(
r1, Heap::kCatchContextMapRootIndex);
791 __ Check(
ne,
"Declaration in catch context.");
796 void FullCodeGenerator::VisitVariableDeclaration(
797 VariableDeclaration* declaration) {
801 VariableProxy* proxy = declaration->proxy();
803 Variable* variable = proxy->var();
805 switch (variable->location()) {
807 globals_->
Add(variable->name(),
zone());
808 globals_->
Add(variable->binding_needs_init()
809 ? isolate()->factory()->the_hole_value()
810 : isolate()->factory()->undefined_value(),
817 Comment cmnt(masm_,
"[ VariableDeclaration");
818 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
819 __ str(
ip, StackOperand(variable));
825 Comment cmnt(masm_,
"[ VariableDeclaration");
826 EmitDebugCheckDeclarationContext(variable);
827 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
835 Comment cmnt(masm_,
"[ VariableDeclaration");
836 __ mov(
r2, Operand(variable->name()));
847 __ LoadRoot(
r0, Heap::kTheHoleValueRootIndex);
853 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
860 void FullCodeGenerator::VisitFunctionDeclaration(
861 FunctionDeclaration* declaration) {
862 VariableProxy* proxy = declaration->proxy();
863 Variable* variable = proxy->var();
864 switch (variable->location()) {
866 globals_->
Add(variable->name(),
zone());
867 Handle<SharedFunctionInfo>
function =
870 if (
function.is_null())
return SetStackOverflow();
871 globals_->
Add(
function,
zone());
877 Comment cmnt(masm_,
"[ FunctionDeclaration");
878 VisitForAccumulatorValue(declaration->fun());
879 __ str(result_register(), StackOperand(variable));
884 Comment cmnt(masm_,
"[ FunctionDeclaration");
885 EmitDebugCheckDeclarationContext(variable);
886 VisitForAccumulatorValue(declaration->fun());
890 __ RecordWriteContextSlot(
cp,
903 Comment cmnt(masm_,
"[ FunctionDeclaration");
904 __ mov(
r2, Operand(variable->name()));
908 VisitForStackValue(declaration->fun());
909 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
916 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
917 VariableProxy* proxy = declaration->proxy();
918 Variable* variable = proxy->var();
919 Handle<JSModule> instance = declaration->module()->interface()->Instance();
920 ASSERT(!instance.is_null());
922 switch (variable->location()) {
924 Comment cmnt(masm_,
"[ ModuleDeclaration");
925 globals_->
Add(variable->name(),
zone());
926 globals_->
Add(instance,
zone());
927 Visit(declaration->module());
932 Comment cmnt(masm_,
"[ ModuleDeclaration");
933 EmitDebugCheckDeclarationContext(variable);
934 __ mov(
r1, Operand(instance));
936 Visit(declaration->module());
948 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
949 VariableProxy* proxy = declaration->proxy();
950 Variable* variable = proxy->var();
951 switch (variable->location()) {
957 Comment cmnt(masm_,
"[ ImportDeclaration");
958 EmitDebugCheckDeclarationContext(variable);
971 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
976 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray>
pairs) {
979 __ mov(
r1, Operand(pairs));
982 __ CallRuntime(Runtime::kDeclareGlobals, 3);
987 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
988 Comment cmnt(masm_,
"[ SwitchStatement");
989 Breakable nested_statement(
this, stmt);
990 SetStatementPosition(stmt);
993 VisitForStackValue(stmt->tag());
996 ZoneList<CaseClause*>* clauses = stmt->cases();
997 CaseClause* default_clause =
NULL;
1001 for (
int i = 0; i < clauses->length(); i++) {
1002 CaseClause* clause = clauses->at(i);
1003 clause->body_target()->Unuse();
1006 if (clause->is_default()) {
1007 default_clause = clause;
1011 Comment cmnt(masm_,
"[ Case comparison");
1012 __ bind(&next_test);
1016 VisitForAccumulatorValue(clause->label());
1020 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1021 JumpPatchSite patch_site(masm_);
1022 if (inline_smi_code) {
1025 patch_site.EmitJumpIfNotSmi(
r2, &slow_case);
1028 __ b(
ne, &next_test);
1030 __ b(clause->body_target());
1031 __ bind(&slow_case);
1035 SetSourcePosition(clause->position());
1037 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1038 patch_site.EmitPatchInfo();
1040 __ cmp(
r0, Operand(0));
1041 __ b(
ne, &next_test);
1043 __ b(clause->body_target());
1048 __ bind(&next_test);
1050 if (default_clause ==
NULL) {
1051 __ b(nested_statement.break_label());
1053 __ b(default_clause->body_target());
1057 for (
int i = 0; i < clauses->length(); i++) {
1058 Comment cmnt(masm_,
"[ Case body");
1059 CaseClause* clause = clauses->at(i);
1060 __ bind(clause->body_target());
1061 PrepareForBailoutForId(clause->EntryId(),
NO_REGISTERS);
1062 VisitStatements(clause->statements());
1065 __ bind(nested_statement.break_label());
1070 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1071 Comment cmnt(masm_,
"[ ForInStatement");
1072 SetStatementPosition(stmt);
1075 ForIn loop_statement(
this, stmt);
1076 increment_loop_depth();
1081 VisitForAccumulatorValue(stmt->enumerable());
1082 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
1085 Register null_value =
r5;
1086 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1087 __ cmp(
r0, null_value);
1090 PrepareForBailoutForId(stmt->PrepareId(),
TOS_REG);
1093 Label convert, done_convert;
1094 __ JumpIfSmi(
r0, &convert);
1096 __ b(
ge, &done_convert);
1100 __ bind(&done_convert);
1107 __ b(
le, &call_runtime);
1113 __ CheckEnumCache(null_value, &call_runtime);
1122 __ bind(&call_runtime);
1124 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1131 __ LoadRoot(
ip, Heap::kMetaMapRootIndex);
1133 __ b(
ne, &fixed_array);
1136 Label no_descriptors;
1137 __ bind(&use_cache);
1141 __ b(
eq, &no_descriptors);
1143 __ LoadInstanceDescriptors(
r0,
r2);
1154 __ bind(&no_descriptors);
1160 __ bind(&fixed_array);
1162 Handle<JSGlobalPropertyCell> cell =
1163 isolate()->factory()->NewJSGlobalPropertyCell(
1166 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1167 __ LoadHeapObject(
r1, cell);
1175 __ b(
gt, &non_proxy);
1177 __ bind(&non_proxy);
1189 __ b(
hs, loop_statement.break_label());
1206 __ b(
eq, &update_each);
1211 __ b(
eq, &update_each);
1220 __ b(
eq, loop_statement.continue_label());
1224 __ bind(&update_each);
1225 __ mov(result_register(),
r3);
1227 { EffectContext context(
this);
1228 EmitAssignment(stmt->each());
1232 Visit(stmt->body());
1236 __ bind(loop_statement.continue_label());
1241 EmitStackCheck(stmt, &loop);
1245 __ bind(loop_statement.break_label());
1251 decrement_loop_depth();
1255 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1263 if (!FLAG_always_opt &&
1264 !FLAG_prepare_always_opt &&
1266 scope()->is_function_scope() &&
1267 info->num_literals() == 0) {
1268 FastNewClosureStub stub(info->language_mode());
1269 __ mov(
r0, Operand(info));
1273 __ mov(
r0, Operand(info));
1274 __ LoadRoot(
r1, pretenure ? Heap::kTrueValueRootIndex
1275 : Heap::kFalseValueRootIndex);
1277 __ CallRuntime(Runtime::kNewClosure, 3);
1279 context()->Plug(
r0);
1283 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1284 Comment cmnt(masm_,
"[ VariableProxy");
1285 EmitVariableLoad(expr);
1289 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1292 Register current =
cp;
1298 if (s->num_heap_slots() > 0) {
1299 if (s->calls_non_strict_eval()) {
1312 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope())
break;
1313 s = s->outer_scope();
1316 if (s->is_eval_scope()) {
1318 if (!current.is(next)) {
1319 __ Move(next, current);
1324 __ LoadRoot(
ip, Heap::kNativeContextMapRootIndex);
1338 __ mov(
r2, Operand(var->name()));
1340 ? RelocInfo::CODE_TARGET
1341 : RelocInfo::CODE_TARGET_CONTEXT;
1342 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1347 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1349 ASSERT(var->IsContextSlot());
1350 Register context =
cp;
1354 for (Scope* s = scope(); s != var->scope(); s = s->
outer_scope()) {
1355 if (s->num_heap_slots() > 0) {
1356 if (s->calls_non_strict_eval()) {
1379 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1389 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1392 Variable* local = var->local_if_not_shadowed();
1393 __ ldr(
r0, ContextSlotOperandCheckExtensions(local, slow));
1394 if (local->mode() ==
CONST ||
1396 local->mode() ==
LET) {
1397 __ CompareRoot(
r0, Heap::kTheHoleValueRootIndex);
1398 if (local->mode() ==
CONST) {
1399 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex,
eq);
1402 __ mov(
r0, Operand(var->name()));
1404 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1412 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1414 SetSourcePosition(proxy->position());
1415 Variable* var = proxy->var();
1419 switch (var->location()) {
1421 Comment cmnt(masm_,
"Global variable");
1425 __ mov(
r2, Operand(var->name()));
1426 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1427 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1428 context()->Plug(
r0);
1435 Comment cmnt(masm_, var->IsContextSlot()
1436 ?
"Context variable"
1437 :
"Stack variable");
1438 if (var->binding_needs_init()) {
1462 bool skip_init_check;
1464 skip_init_check =
false;
1467 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1468 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1469 skip_init_check = var->mode() !=
CONST &&
1470 var->initializer_position() < proxy->position();
1473 if (!skip_init_check) {
1476 __ CompareRoot(
r0, Heap::kTheHoleValueRootIndex);
1482 __ mov(
r0, Operand(var->name()));
1484 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1489 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex,
eq);
1491 context()->Plug(
r0);
1495 context()->Plug(var);
1505 Comment cmnt(masm_,
"Lookup variable");
1506 __ mov(
r1, Operand(var->name()));
1508 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1510 context()->Plug(
r0);
1516 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1517 Comment cmnt(masm_,
"[ RegExpLiteral");
1528 int literal_offset =
1531 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
1533 __ b(
ne, &materialized);
1538 __ mov(
r2, Operand(expr->pattern()));
1539 __ mov(
r1, Operand(expr->flags()));
1541 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1544 __ bind(&materialized);
1546 Label allocated, runtime_allocate;
1550 __ bind(&runtime_allocate);
1554 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1557 __ bind(&allocated);
1563 context()->Plug(
r0);
1567 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1568 if (expression ==
NULL) {
1569 __ LoadRoot(
r1, Heap::kNullValueRootIndex);
1572 VisitForStackValue(expression);
1577 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1578 Comment cmnt(masm_,
"[ ObjectLiteral");
1579 Handle<FixedArray> constant_properties = expr->constant_properties();
1583 __ mov(
r1, Operand(constant_properties));
1584 int flags = expr->fast_elements()
1587 flags |= expr->has_function()
1592 int properties_count = constant_properties->length() / 2;
1593 if (expr->depth() > 1) {
1594 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1597 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1599 FastCloneShallowObjectStub stub(properties_count);
1605 bool result_saved =
false;
1610 expr->CalculateEmitStore(
zone());
1612 AccessorTable accessor_table(
zone());
1613 for (
int i = 0; i < expr->properties()->length(); i++) {
1614 ObjectLiteral::Property*
property = expr->properties()->at(i);
1615 if (property->IsCompileTimeValue())
continue;
1617 Literal* key =
property->key();
1618 Expression* value =
property->value();
1619 if (!result_saved) {
1621 result_saved =
true;
1623 switch (property->kind()) {
1630 if (key->handle()->IsSymbol()) {
1631 if (property->emit_store()) {
1632 VisitForAccumulatorValue(value);
1633 __ mov(
r2, Operand(key->handle()));
1635 Handle<Code> ic = is_classic_mode()
1636 ? isolate()->builtins()->StoreIC_Initialize()
1637 : isolate()->builtins()->StoreIC_Initialize_Strict();
1638 CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
1641 VisitForEffect(value);
1650 VisitForStackValue(key);
1651 VisitForStackValue(value);
1652 if (property->emit_store()) {
1655 __ CallRuntime(Runtime::kSetProperty, 4);
1661 accessor_table.lookup(key)->second->getter = value;
1664 accessor_table.lookup(key)->second->setter = value;
1671 for (AccessorTable::Iterator it = accessor_table.begin();
1672 it != accessor_table.end();
1676 VisitForStackValue(it->first);
1677 EmitAccessor(it->second->getter);
1678 EmitAccessor(it->second->setter);
1681 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1684 if (expr->has_function()) {
1688 __ CallRuntime(Runtime::kToFastProperties, 1);
1692 context()->PlugTOS();
1694 context()->Plug(
r0);
1699 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1700 Comment cmnt(masm_,
"[ ArrayLiteral");
1702 ZoneList<Expression*>* subexprs = expr->values();
1703 int length = subexprs->length();
1704 Handle<FixedArray> constant_elements = expr->constant_elements();
1705 ASSERT_EQ(2, constant_elements->length());
1709 Handle<FixedArrayBase> constant_elements_values(
1715 __ mov(
r1, Operand(constant_elements));
1717 if (has_fast_elements && constant_elements_values->map() ==
1718 isolate()->heap()->fixed_cow_array_map()) {
1719 FastCloneShallowArrayStub stub(
1722 __ IncrementCounter(
1723 isolate()->counters()->cow_arrays_created_stub(), 1,
r1,
r2);
1724 }
else if (expr->depth() > 1) {
1725 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1727 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1730 FLAG_smi_only_arrays);
1734 FastCloneShallowArrayStub stub(mode, length);
1738 bool result_saved =
false;
1742 for (
int i = 0; i < length; i++) {
1743 Expression* subexpr = subexprs->at(i);
1746 if (subexpr->AsLiteral() !=
NULL ||
1751 if (!result_saved) {
1753 result_saved =
true;
1755 VisitForAccumulatorValue(subexpr);
1763 __ RecordWriteField(
r1, offset, result_register(),
r2,
1771 StoreArrayLiteralElementStub stub;
1775 PrepareForBailoutForId(expr->GetIdForElement(i),
NO_REGISTERS);
1779 context()->PlugTOS();
1781 context()->Plug(
r0);
1786 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1787 Comment cmnt(masm_,
"[ Assignment");
1790 if (!expr->target()->IsValidLeftHandSide()) {
1791 VisitForEffect(expr->target());
1797 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1798 LhsKind assign_type = VARIABLE;
1799 Property*
property = expr->target()->AsProperty();
1800 if (property !=
NULL) {
1801 assign_type = (
property->key()->IsPropertyName())
1807 switch (assign_type) {
1811 case NAMED_PROPERTY:
1812 if (expr->is_compound()) {
1814 VisitForAccumulatorValue(property->obj());
1815 __ push(result_register());
1817 VisitForStackValue(property->obj());
1820 case KEYED_PROPERTY:
1821 if (expr->is_compound()) {
1822 VisitForStackValue(property->obj());
1823 VisitForAccumulatorValue(property->key());
1827 VisitForStackValue(property->obj());
1828 VisitForStackValue(property->key());
1835 if (expr->is_compound()) {
1836 { AccumulatorValueContext context(
this);
1837 switch (assign_type) {
1839 EmitVariableLoad(expr->target()->AsVariableProxy());
1840 PrepareForBailout(expr->target(),
TOS_REG);
1842 case NAMED_PROPERTY:
1843 EmitNamedPropertyLoad(property);
1844 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1846 case KEYED_PROPERTY:
1847 EmitKeyedPropertyLoad(property);
1848 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1855 VisitForAccumulatorValue(expr->value());
1857 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1860 SetSourcePosition(expr->position() + 1);
1861 AccumulatorValueContext context(
this);
1862 if (ShouldInlineSmiCase(op)) {
1863 EmitInlineSmiBinaryOp(expr->binary_operation(),
1869 EmitBinaryOp(expr->binary_operation(), op, mode);
1873 PrepareForBailout(expr->binary_operation(),
TOS_REG);
1875 VisitForAccumulatorValue(expr->value());
1879 SetSourcePosition(expr->position());
1882 switch (assign_type) {
1884 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1886 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
1887 context()->Plug(
r0);
1889 case NAMED_PROPERTY:
1890 EmitNamedPropertyAssignment(expr);
1892 case KEYED_PROPERTY:
1893 EmitKeyedPropertyAssignment(expr);
1899 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1900 SetSourcePosition(prop->position());
1901 Literal* key = prop->key()->AsLiteral();
1902 __ mov(
r2, Operand(key->handle()));
1904 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1905 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1909 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1910 SetSourcePosition(prop->position());
1912 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1913 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1917 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1920 Expression* left_expr,
1921 Expression* right_expr) {
1922 Label done, smi_case, stub_call;
1924 Register scratch1 =
r2;
1925 Register scratch2 =
r3;
1929 Register right =
r0;
1933 __ orr(scratch1, left, Operand(right));
1935 JumpPatchSite patch_site(masm_);
1936 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1938 __ bind(&stub_call);
1939 BinaryOpStub stub(op, mode);
1940 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
1941 expr->BinaryOperationFeedbackId());
1942 patch_site.EmitPatchInfo();
1952 __ GetLeastBitsFromSmi(scratch1, right, 5);
1953 __ mov(right, Operand(left,
ASR, scratch1));
1958 __ SmiUntag(scratch1, left);
1959 __ GetLeastBitsFromSmi(scratch2, right, 5);
1960 __ mov(scratch1, Operand(scratch1,
LSL, scratch2));
1961 __ add(scratch2, scratch1, Operand(0x40000000),
SetCC);
1962 __ b(
mi, &stub_call);
1963 __ SmiTag(right, scratch1);
1968 __ SmiUntag(scratch1, left);
1969 __ GetLeastBitsFromSmi(scratch2, right, 5);
1970 __ mov(scratch1, Operand(scratch1,
LSR, scratch2));
1971 __ tst(scratch1, Operand(0xc0000000));
1972 __ b(
ne, &stub_call);
1973 __ SmiTag(right, scratch1);
1977 __ add(scratch1, left, Operand(right),
SetCC);
1978 __ b(
vs, &stub_call);
1979 __ mov(right, scratch1);
1982 __ sub(scratch1, left, Operand(right),
SetCC);
1983 __ b(
vs, &stub_call);
1984 __ mov(right, scratch1);
1987 __ SmiUntag(
ip, right);
1988 __ smull(scratch1, scratch2, left,
ip);
1989 __ mov(
ip, Operand(scratch1,
ASR, 31));
1990 __ cmp(
ip, Operand(scratch2));
1991 __ b(
ne, &stub_call);
1992 __ cmp(scratch1, Operand(0));
1995 __ add(scratch2, right, Operand(left),
SetCC);
1997 __ b(
mi, &stub_call);
2001 __ orr(right, left, Operand(right));
2003 case Token::BIT_AND:
2004 __ and_(right, left, Operand(right));
2006 case Token::BIT_XOR:
2007 __ eor(right, left, Operand(right));
2014 context()->Plug(
r0);
2018 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2022 BinaryOpStub stub(op, mode);
2023 JumpPatchSite patch_site(masm_);
2024 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
2025 expr->BinaryOperationFeedbackId());
2026 patch_site.EmitPatchInfo();
2027 context()->Plug(
r0);
2031 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2034 if (!expr->IsValidLeftHandSide()) {
2035 VisitForEffect(expr);
2041 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2042 LhsKind assign_type = VARIABLE;
2043 Property* prop = expr->AsProperty();
2045 assign_type = (prop->key()->IsPropertyName())
2050 switch (assign_type) {
2052 Variable* var = expr->AsVariableProxy()->var();
2053 EffectContext context(
this);
2054 EmitVariableAssignment(var, Token::ASSIGN);
2057 case NAMED_PROPERTY: {
2059 VisitForAccumulatorValue(prop->obj());
2062 __ mov(
r2, Operand(prop->key()->AsLiteral()->handle()));
2063 Handle<Code> ic = is_classic_mode()
2064 ? isolate()->builtins()->StoreIC_Initialize()
2065 : isolate()->builtins()->StoreIC_Initialize_Strict();
2069 case KEYED_PROPERTY: {
2071 VisitForStackValue(prop->obj());
2072 VisitForAccumulatorValue(prop->key());
2076 Handle<Code> ic = is_classic_mode()
2077 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2078 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2083 context()->Plug(
r0);
2087 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2089 if (var->IsUnallocated()) {
2091 __ mov(
r2, Operand(var->name()));
2093 Handle<Code> ic = is_classic_mode()
2094 ? isolate()->builtins()->StoreIC_Initialize()
2095 : isolate()->builtins()->StoreIC_Initialize_Strict();
2096 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2098 }
else if (op == Token::INIT_CONST) {
2100 ASSERT(!var->IsParameter());
2101 if (var->IsStackLocal()) {
2103 __ ldr(
r1, StackOperand(var));
2104 __ CompareRoot(
r1, Heap::kTheHoleValueRootIndex);
2106 __ str(result_register(), StackOperand(var));
2109 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2116 __ mov(
r0, Operand(var->name()));
2118 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2121 }
else if (var->mode() ==
LET && op != Token::INIT_LET) {
2123 if (var->IsLookupSlot()) {
2125 __ mov(
r1, Operand(var->name()));
2128 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2130 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2133 __ ldr(
r3, location);
2134 __ CompareRoot(
r3, Heap::kTheHoleValueRootIndex);
2136 __ mov(
r3, Operand(var->name()));
2138 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2141 __ str(result_register(), location);
2142 if (var->IsContextSlot()) {
2144 __ mov(
r3, result_register());
2146 __ RecordWriteContextSlot(
2151 }
else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2154 if (var->IsStackAllocated() || var->IsContextSlot()) {
2156 if (generate_debug_code_ && op == Token::INIT_LET) {
2158 __ ldr(
r2, location);
2159 __ CompareRoot(
r2, Heap::kTheHoleValueRootIndex);
2160 __ Check(
eq,
"Let binding re-initialization.");
2163 __ str(
r0, location);
2164 if (var->IsContextSlot()) {
2167 __ RecordWriteContextSlot(
2171 ASSERT(var->IsLookupSlot());
2173 __ mov(
r1, Operand(var->name()));
2176 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2183 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2185 Property* prop = expr->target()->AsProperty();
2190 SetSourcePosition(expr->position());
2191 __ mov(
r2, Operand(prop->key()->AsLiteral()->handle()));
2194 Handle<Code> ic = is_classic_mode()
2195 ? isolate()->builtins()->StoreIC_Initialize()
2196 : isolate()->builtins()->StoreIC_Initialize_Strict();
2197 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2199 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2200 context()->Plug(
r0);
2204 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2208 SetSourcePosition(expr->position());
2212 Handle<Code> ic = is_classic_mode()
2213 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2214 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2215 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2217 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2218 context()->Plug(
r0);
2222 void FullCodeGenerator::VisitProperty(Property* expr) {
2223 Comment cmnt(masm_,
"[ Property");
2224 Expression* key = expr->key();
2226 if (key->IsPropertyName()) {
2227 VisitForAccumulatorValue(expr->obj());
2228 EmitNamedPropertyLoad(expr);
2229 PrepareForBailoutForId(expr->LoadId(),
TOS_REG);
2230 context()->Plug(
r0);
2232 VisitForStackValue(expr->obj());
2233 VisitForAccumulatorValue(expr->key());
2235 EmitKeyedPropertyLoad(expr);
2236 context()->Plug(
r0);
2241 void FullCodeGenerator::CallIC(Handle<Code>
code,
2242 RelocInfo::Mode rmode,
2243 TypeFeedbackId ast_id) {
2250 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2251 Handle<Object> name,
2252 RelocInfo::Mode mode) {
2254 ZoneList<Expression*>* args = expr->arguments();
2255 int arg_count = args->length();
2256 { PreservePositionScope scope(masm()->positions_recorder());
2257 for (
int i = 0; i < arg_count; i++) {
2258 VisitForStackValue(args->at(i));
2260 __ mov(
r2, Operand(name));
2263 SetSourcePosition(expr->position());
2266 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2267 CallIC(ic, mode, expr->CallFeedbackId());
2268 RecordJSReturnSite(expr);
2271 context()->Plug(
r0);
2275 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2278 VisitForAccumulatorValue(key);
2287 ZoneList<Expression*>* args = expr->arguments();
2288 int arg_count = args->length();
2289 { PreservePositionScope scope(masm()->positions_recorder());
2290 for (
int i = 0; i < arg_count; i++) {
2291 VisitForStackValue(args->at(i));
2295 SetSourcePosition(expr->position());
2298 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2300 CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
2301 RecordJSReturnSite(expr);
2304 context()->DropAndPlug(1,
r0);
2310 ZoneList<Expression*>* args = expr->arguments();
2311 int arg_count = args->length();
2312 { PreservePositionScope scope(masm()->positions_recorder());
2313 for (
int i = 0; i < arg_count; i++) {
2314 VisitForStackValue(args->at(i));
2318 SetSourcePosition(expr->position());
2322 Handle<Object> uninitialized =
2324 Handle<JSGlobalPropertyCell> cell =
2325 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2326 RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2327 __ mov(
r2, Operand(cell));
2329 CallFunctionStub stub(arg_count, flags);
2332 RecordJSReturnSite(expr);
2335 context()->DropAndPlug(1,
r0);
2339 void FullCodeGenerator::EmitResolvePossiblyDirectEval(
int arg_count) {
2341 if (arg_count > 0) {
2344 __ LoadRoot(
r1, Heap::kUndefinedValueRootIndex);
2361 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2365 void FullCodeGenerator::VisitCall(Call* expr) {
2369 expr->return_is_recorded_ =
false;
2372 Comment cmnt(masm_,
"[ Call");
2373 Expression* callee = expr->expression();
2374 VariableProxy* proxy = callee->AsVariableProxy();
2375 Property*
property = callee->AsProperty();
2377 if (proxy !=
NULL && proxy->var()->is_possibly_eval()) {
2382 ZoneList<Expression*>* args = expr->arguments();
2383 int arg_count = args->length();
2385 { PreservePositionScope pos_scope(masm()->positions_recorder());
2386 VisitForStackValue(callee);
2387 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
2391 for (
int i = 0; i < arg_count; i++) {
2392 VisitForStackValue(args->at(i));
2399 EmitResolvePossiblyDirectEval(arg_count);
2408 SetSourcePosition(expr->position());
2412 RecordJSReturnSite(expr);
2415 context()->DropAndPlug(1,
r0);
2416 }
else if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
2420 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2421 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
2425 { PreservePositionScope scope(masm()->positions_recorder());
2434 __ push(context_register());
2435 __ mov(
r2, Operand(proxy->name()));
2437 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2443 if (done.is_linked()) {
2451 __ LoadRoot(
r1, Heap::kTheHoleValueRootIndex);
2460 }
else if (property !=
NULL) {
2461 { PreservePositionScope scope(masm()->positions_recorder());
2462 VisitForStackValue(property->obj());
2464 if (property->key()->IsPropertyName()) {
2465 EmitCallWithIC(expr,
2466 property->key()->AsLiteral()->handle(),
2467 RelocInfo::CODE_TARGET);
2469 EmitKeyedCallWithIC(expr, property->key());
2473 { PreservePositionScope scope(masm()->positions_recorder());
2474 VisitForStackValue(callee);
2486 ASSERT(expr->return_is_recorded_);
2491 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2492 Comment cmnt(masm_,
"[ CallNew");
2500 VisitForStackValue(expr->expression());
2503 ZoneList<Expression*>* args = expr->arguments();
2504 int arg_count = args->length();
2505 for (
int i = 0; i < arg_count; i++) {
2506 VisitForStackValue(args->at(i));
2511 SetSourcePosition(expr->position());
2514 __ mov(
r0, Operand(arg_count));
2518 Handle<Object> uninitialized =
2520 Handle<JSGlobalPropertyCell> cell =
2521 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2522 RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2523 __ mov(
r2, Operand(cell));
2526 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2527 PrepareForBailoutForId(expr->ReturnId(),
TOS_REG);
2528 context()->Plug(
r0);
2532 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2533 ZoneList<Expression*>* args = expr->arguments();
2534 ASSERT(args->length() == 1);
2536 VisitForAccumulatorValue(args->at(0));
2538 Label materialize_true, materialize_false;
2539 Label* if_true =
NULL;
2540 Label* if_false =
NULL;
2541 Label* fall_through =
NULL;
2542 context()->PrepareTest(&materialize_true, &materialize_false,
2543 &if_true, &if_false, &fall_through);
2545 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2547 Split(
eq, if_true, if_false, fall_through);
2549 context()->Plug(if_true, if_false);
2553 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2554 ZoneList<Expression*>* args = expr->arguments();
2555 ASSERT(args->length() == 1);
2557 VisitForAccumulatorValue(args->at(0));
2559 Label materialize_true, materialize_false;
2560 Label* if_true =
NULL;
2561 Label* if_false =
NULL;
2562 Label* fall_through =
NULL;
2563 context()->PrepareTest(&materialize_true, &materialize_false,
2564 &if_true, &if_false, &fall_through);
2566 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2568 Split(
eq, if_true, if_false, fall_through);
2570 context()->Plug(if_true, if_false);
2574 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2575 ZoneList<Expression*>* args = expr->arguments();
2576 ASSERT(args->length() == 1);
2578 VisitForAccumulatorValue(args->at(0));
2580 Label materialize_true, materialize_false;
2581 Label* if_true =
NULL;
2582 Label* if_false =
NULL;
2583 Label* fall_through =
NULL;
2584 context()->PrepareTest(&materialize_true, &materialize_false,
2585 &if_true, &if_false, &fall_through);
2587 __ JumpIfSmi(
r0, if_false);
2588 __ LoadRoot(
ip, Heap::kNullValueRootIndex);
2600 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2601 Split(
le, if_true, if_false, fall_through);
2603 context()->Plug(if_true, if_false);
2607 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2608 ZoneList<Expression*>* args = expr->arguments();
2609 ASSERT(args->length() == 1);
2611 VisitForAccumulatorValue(args->at(0));
2613 Label materialize_true, materialize_false;
2614 Label* if_true =
NULL;
2615 Label* if_false =
NULL;
2616 Label* fall_through =
NULL;
2617 context()->PrepareTest(&materialize_true, &materialize_false,
2618 &if_true, &if_false, &fall_through);
2620 __ JumpIfSmi(
r0, if_false);
2622 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2623 Split(
ge, if_true, if_false, fall_through);
2625 context()->Plug(if_true, if_false);
2629 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2630 ZoneList<Expression*>* args = expr->arguments();
2631 ASSERT(args->length() == 1);
2633 VisitForAccumulatorValue(args->at(0));
2635 Label materialize_true, materialize_false;
2636 Label* if_true =
NULL;
2637 Label* if_false =
NULL;
2638 Label* fall_through =
NULL;
2639 context()->PrepareTest(&materialize_true, &materialize_false,
2640 &if_true, &if_false, &fall_through);
2642 __ JumpIfSmi(
r0, if_false);
2646 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2647 Split(
ne, if_true, if_false, fall_through);
2649 context()->Plug(if_true, if_false);
2653 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2654 CallRuntime* expr) {
2655 ZoneList<Expression*>* args = expr->arguments();
2656 ASSERT(args->length() == 1);
2658 VisitForAccumulatorValue(args->at(0));
2660 Label materialize_true, materialize_false;
2661 Label* if_true =
NULL;
2662 Label* if_false =
NULL;
2663 Label* fall_through =
NULL;
2664 context()->PrepareTest(&materialize_true, &materialize_false,
2665 &if_true, &if_false, &fall_through);
2667 __ AssertNotSmi(
r0);
2677 __ LoadRoot(
ip, Heap::kHashTableMapRootIndex);
2684 Label entry, loop, done;
2687 __ NumberOfOwnDescriptors(
r3,
r1);
2688 __ cmp(
r3, Operand(0));
2691 __ LoadInstanceDescriptors(
r1,
r4);
2724 __ JumpIfSmi(
r2, if_false);
2739 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2740 context()->Plug(if_true, if_false);
2744 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2745 ZoneList<Expression*>* args = expr->arguments();
2746 ASSERT(args->length() == 1);
2748 VisitForAccumulatorValue(args->at(0));
2750 Label materialize_true, materialize_false;
2751 Label* if_true =
NULL;
2752 Label* if_false =
NULL;
2753 Label* fall_through =
NULL;
2754 context()->PrepareTest(&materialize_true, &materialize_false,
2755 &if_true, &if_false, &fall_through);
2757 __ JumpIfSmi(
r0, if_false);
2759 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2760 Split(
eq, if_true, if_false, fall_through);
2762 context()->Plug(if_true, if_false);
2766 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2767 ZoneList<Expression*>* args = expr->arguments();
2768 ASSERT(args->length() == 1);
2770 VisitForAccumulatorValue(args->at(0));
2772 Label materialize_true, materialize_false;
2773 Label* if_true =
NULL;
2774 Label* if_false =
NULL;
2775 Label* fall_through =
NULL;
2776 context()->PrepareTest(&materialize_true, &materialize_false,
2777 &if_true, &if_false, &fall_through);
2779 __ JumpIfSmi(
r0, if_false);
2781 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2782 Split(
eq, if_true, if_false, fall_through);
2784 context()->Plug(if_true, if_false);
2788 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2789 ZoneList<Expression*>* args = expr->arguments();
2790 ASSERT(args->length() == 1);
2792 VisitForAccumulatorValue(args->at(0));
2794 Label materialize_true, materialize_false;
2795 Label* if_true =
NULL;
2796 Label* if_false =
NULL;
2797 Label* fall_through =
NULL;
2798 context()->PrepareTest(&materialize_true, &materialize_false,
2799 &if_true, &if_false, &fall_through);
2801 __ JumpIfSmi(
r0, if_false);
2803 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2804 Split(
eq, if_true, if_false, fall_through);
2806 context()->Plug(if_true, if_false);
2811 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2812 ASSERT(expr->arguments()->length() == 0);
2814 Label materialize_true, materialize_false;
2815 Label* if_true =
NULL;
2816 Label* if_false =
NULL;
2817 Label* fall_through =
NULL;
2818 context()->PrepareTest(&materialize_true, &materialize_false,
2819 &if_true, &if_false, &fall_through);
2825 Label check_frame_marker;
2828 __ b(
ne, &check_frame_marker);
2832 __ bind(&check_frame_marker);
2835 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2836 Split(
eq, if_true, if_false, fall_through);
2838 context()->Plug(if_true, if_false);
2842 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2843 ZoneList<Expression*>* args = expr->arguments();
2844 ASSERT(args->length() == 2);
2847 VisitForStackValue(args->at(0));
2848 VisitForAccumulatorValue(args->at(1));
2850 Label materialize_true, materialize_false;
2851 Label* if_true =
NULL;
2852 Label* if_false =
NULL;
2853 Label* fall_through =
NULL;
2854 context()->PrepareTest(&materialize_true, &materialize_false,
2855 &if_true, &if_false, &fall_through);
2859 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2860 Split(
eq, if_true, if_false, fall_through);
2862 context()->Plug(if_true, if_false);
2866 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2867 ZoneList<Expression*>* args = expr->arguments();
2868 ASSERT(args->length() == 1);
2872 VisitForAccumulatorValue(args->at(0));
2877 context()->Plug(
r0);
2881 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2882 ASSERT(expr->arguments()->length() == 0);
2898 context()->Plug(
r0);
2902 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2903 ZoneList<Expression*>* args = expr->arguments();
2904 ASSERT(args->length() == 1);
2905 Label done, null,
function, non_function_constructor;
2907 VisitForAccumulatorValue(args->at(0));
2910 __ JumpIfSmi(
r0, &null);
2922 __ b(
eq, &
function);
2927 __ b(
eq, &
function);
2934 __ b(
ne, &non_function_constructor);
2944 __ LoadRoot(
r0, Heap::kfunction_class_symbolRootIndex);
2948 __ bind(&non_function_constructor);
2949 __ LoadRoot(
r0, Heap::kObject_symbolRootIndex);
2954 __ LoadRoot(
r0, Heap::kNullValueRootIndex);
2959 context()->Plug(
r0);
2963 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2971 ZoneList<Expression*>* args = expr->arguments();
2974 VisitForStackValue(args->at(1));
2975 VisitForStackValue(args->at(2));
2976 __ CallRuntime(Runtime::kLog, 2);
2980 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
2981 context()->Plug(
r0);
2985 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2986 ASSERT(expr->arguments()->length() == 0);
2987 Label slow_allocate_heapnumber;
2988 Label heapnumber_allocated;
2990 __ LoadRoot(
r6, Heap::kHeapNumberMapRootIndex);
2991 __ AllocateHeapNumber(
r4,
r1,
r2,
r6, &slow_allocate_heapnumber);
2992 __ jmp(&heapnumber_allocated);
2994 __ bind(&slow_allocate_heapnumber);
2996 __ CallRuntime(Runtime::kNumberAlloc, 0);
2999 __ bind(&heapnumber_allocated);
3005 __ PrepareCallCFunction(1,
r0);
3009 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3011 CpuFeatures::Scope scope(
VFP2);
3014 __ mov(
r1, Operand(0x41000000));
3015 __ orr(
r1,
r1, Operand(0x300000));
3027 __ PrepareCallCFunction(2,
r0);
3033 ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
3036 context()->Plug(
r0);
3040 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3043 ZoneList<Expression*>* args = expr->arguments();
3044 ASSERT(args->length() == 3);
3045 VisitForStackValue(args->at(0));
3046 VisitForStackValue(args->at(1));
3047 VisitForStackValue(args->at(2));
3049 context()->Plug(
r0);
3053 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3055 RegExpExecStub stub;
3056 ZoneList<Expression*>* args = expr->arguments();
3057 ASSERT(args->length() == 4);
3058 VisitForStackValue(args->at(0));
3059 VisitForStackValue(args->at(1));
3060 VisitForStackValue(args->at(2));
3061 VisitForStackValue(args->at(3));
3063 context()->Plug(
r0);
3067 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3068 ZoneList<Expression*>* args = expr->arguments();
3069 ASSERT(args->length() == 1);
3070 VisitForAccumulatorValue(args->at(0));
3074 __ JumpIfSmi(
r0, &done);
3081 context()->Plug(
r0);
3085 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3086 ZoneList<Expression*>* args = expr->arguments();
3087 ASSERT(args->length() == 2);
3089 Smi* index =
Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3091 VisitForAccumulatorValue(args->at(0));
3093 Label runtime, done, not_date_object;
3094 Register
object =
r0;
3095 Register result =
r0;
3096 Register scratch0 =
r9;
3097 Register scratch1 =
r1;
3099 __ JumpIfSmi(
object, ¬_date_object);
3101 __ b(
ne, ¬_date_object);
3103 if (index->value() == 0) {
3108 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3109 __ mov(scratch1, Operand(stamp));
3112 __ cmp(scratch1, scratch0);
3115 kPointerSize * index->value()));
3119 __ PrepareCallCFunction(2, scratch1);
3120 __ mov(
r1, Operand(index));
3121 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3125 __ bind(¬_date_object);
3126 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3128 context()->Plug(
r0);
3132 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3134 ZoneList<Expression*>* args = expr->arguments();
3135 ASSERT(args->length() == 2);
3136 VisitForStackValue(args->at(0));
3137 VisitForStackValue(args->at(1));
3142 __ CallRuntime(Runtime::kMath_pow, 2);
3144 context()->Plug(
r0);
3148 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3149 ZoneList<Expression*>* args = expr->arguments();
3150 ASSERT(args->length() == 2);
3151 VisitForStackValue(args->at(0));
3152 VisitForAccumulatorValue(args->at(1));
3157 __ JumpIfSmi(
r1, &done);
3168 __ RecordWriteField(
3172 context()->Plug(
r0);
3176 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3177 ZoneList<Expression*>* args = expr->arguments();
3180 VisitForStackValue(args->at(0));
3182 NumberToStringStub stub;
3184 context()->Plug(
r0);
3188 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3189 ZoneList<Expression*>* args = expr->arguments();
3190 ASSERT(args->length() == 1);
3191 VisitForAccumulatorValue(args->at(0));
3198 NopRuntimeCallHelper call_helper;
3199 generator.GenerateSlow(masm_, call_helper);
3202 context()->Plug(
r1);
3206 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3207 ZoneList<Expression*>* args = expr->arguments();
3208 ASSERT(args->length() == 2);
3209 VisitForStackValue(args->at(0));
3210 VisitForAccumulatorValue(args->at(1));
3212 Register
object =
r1;
3213 Register index =
r0;
3214 Register result =
r3;
3218 Label need_conversion;
3219 Label index_out_of_range;
3221 StringCharCodeAtGenerator
generator(
object,
3226 &index_out_of_range,
3231 __ bind(&index_out_of_range);
3234 __ LoadRoot(result, Heap::kNanValueRootIndex);
3237 __ bind(&need_conversion);
3240 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3243 NopRuntimeCallHelper call_helper;
3244 generator.GenerateSlow(masm_, call_helper);
3247 context()->Plug(result);
3251 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3252 ZoneList<Expression*>* args = expr->arguments();
3253 ASSERT(args->length() == 2);
3254 VisitForStackValue(args->at(0));
3255 VisitForAccumulatorValue(args->at(1));
3257 Register
object =
r1;
3258 Register index =
r0;
3259 Register scratch =
r3;
3260 Register result =
r0;
3264 Label need_conversion;
3265 Label index_out_of_range;
3273 &index_out_of_range,
3278 __ bind(&index_out_of_range);
3281 __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3284 __ bind(&need_conversion);
3290 NopRuntimeCallHelper call_helper;
3291 generator.GenerateSlow(masm_, call_helper);
3294 context()->Plug(result);
3298 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3299 ZoneList<Expression*>* args = expr->arguments();
3301 VisitForStackValue(args->at(0));
3302 VisitForStackValue(args->at(1));
3306 context()->Plug(
r0);
3310 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3311 ZoneList<Expression*>* args = expr->arguments();
3313 VisitForStackValue(args->at(0));
3314 VisitForStackValue(args->at(1));
3316 StringCompareStub stub;
3318 context()->Plug(
r0);
3322 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3326 ZoneList<Expression*>* args = expr->arguments();
3327 ASSERT(args->length() == 1);
3328 VisitForStackValue(args->at(0));
3330 context()->Plug(
r0);
3334 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3338 ZoneList<Expression*>* args = expr->arguments();
3339 ASSERT(args->length() == 1);
3340 VisitForStackValue(args->at(0));
3342 context()->Plug(
r0);
3346 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3350 ZoneList<Expression*>* args = expr->arguments();
3351 ASSERT(args->length() == 1);
3352 VisitForStackValue(args->at(0));
3354 context()->Plug(
r0);
3358 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3362 ZoneList<Expression*>* args = expr->arguments();
3363 ASSERT(args->length() == 1);
3364 VisitForStackValue(args->at(0));
3366 context()->Plug(
r0);
3370 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3372 ZoneList<Expression*>* args = expr->arguments();
3373 ASSERT(args->length() == 1);
3374 VisitForStackValue(args->at(0));
3375 __ CallRuntime(Runtime::kMath_sqrt, 1);
3376 context()->Plug(
r0);
3380 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3381 ZoneList<Expression*>* args = expr->arguments();
3382 ASSERT(args->length() >= 2);
3384 int arg_count = args->length() - 2;
3385 for (
int i = 0; i < arg_count + 1; i++) {
3386 VisitForStackValue(args->at(i));
3388 VisitForAccumulatorValue(args->last());
3390 Label runtime, done;
3392 __ JumpIfSmi(
r0, &runtime);
3397 __ mov(
r1, result_register());
3398 ParameterCount count(arg_count);
3406 __ CallRuntime(Runtime::kCall, args->length());
3409 context()->Plug(
r0);
3413 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3414 RegExpConstructResultStub stub;
3415 ZoneList<Expression*>* args = expr->arguments();
3416 ASSERT(args->length() == 3);
3417 VisitForStackValue(args->at(0));
3418 VisitForStackValue(args->at(1));
3419 VisitForStackValue(args->at(2));
3421 context()->Plug(
r0);
3425 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3426 ZoneList<Expression*>* args = expr->arguments();
3429 int cache_id =
Smi::cast(*(args->at(0)->AsLiteral()->handle()))->
value();
3431 Handle<FixedArray> jsfunction_result_caches(
3432 isolate()->native_context()->jsfunction_result_caches());
3433 if (jsfunction_result_caches->length() <= cache_id) {
3434 __ Abort(
"Attempt to use undefined cache.");
3435 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
3436 context()->Plug(
r0);
3440 VisitForAccumulatorValue(args->at(1));
3443 Register cache =
r1;
3451 Label done, not_found;
3461 __ b(
ne, ¬_found);
3466 __ bind(¬_found);
3468 __ Push(cache, key);
3469 __ CallRuntime(Runtime::kGetFromCache, 2);
3472 context()->Plug(
r0);
3476 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3477 ZoneList<Expression*>* args = expr->arguments();
3480 Register right =
r0;
3485 VisitForStackValue(args->at(0));
3486 VisitForAccumulatorValue(args->at(1));
3489 Label done, fail, ok;
3490 __ cmp(left, Operand(right));
3493 __ and_(tmp, left, Operand(right));
3494 __ JumpIfSmi(tmp, &fail);
3500 __ cmp(tmp, Operand(tmp2));
3507 __ LoadRoot(
r0, Heap::kFalseValueRootIndex);
3510 __ LoadRoot(
r0, Heap::kTrueValueRootIndex);
3513 context()->Plug(
r0);
3517 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3518 ZoneList<Expression*>* args = expr->arguments();
3519 VisitForAccumulatorValue(args->at(0));
3521 Label materialize_true, materialize_false;
3522 Label* if_true =
NULL;
3523 Label* if_false =
NULL;
3524 Label* fall_through =
NULL;
3525 context()->PrepareTest(&materialize_true, &materialize_false,
3526 &if_true, &if_false, &fall_through);
3530 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3531 Split(
eq, if_true, if_false, fall_through);
3533 context()->Plug(if_true, if_false);
3537 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3538 ZoneList<Expression*>* args = expr->arguments();
3539 ASSERT(args->length() == 1);
3540 VisitForAccumulatorValue(args->at(0));
3542 __ AssertString(
r0);
3545 __ IndexFromHash(
r0,
r0);
3547 context()->Plug(
r0);
3551 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3552 Label bailout, done, one_char_separator, long_separator,
3553 non_trivial_array, not_size_one_array, loop,
3554 empty_separator_loop, one_char_separator_loop,
3555 one_char_separator_loop_entry, long_separator_loop;
3556 ZoneList<Expression*>* args = expr->arguments();
3557 ASSERT(args->length() == 2);
3558 VisitForStackValue(args->at(1));
3559 VisitForAccumulatorValue(args->at(0));
3562 Register array =
r0;
3563 Register elements =
no_reg;
3564 Register result =
no_reg;
3565 Register separator =
r1;
3566 Register array_length =
r2;
3567 Register result_pos =
no_reg;
3568 Register string_length =
r3;
3569 Register
string =
r4;
3570 Register element =
r5;
3571 Register elements_end =
r6;
3572 Register scratch1 =
r7;
3573 Register scratch2 =
r9;
3579 __ JumpIfSmi(array, &bailout);
3584 __ CheckFastElements(scratch1, scratch2, &bailout);
3588 __ SmiUntag(array_length,
SetCC);
3589 __ b(
ne, &non_trivial_array);
3590 __ LoadRoot(
r0, Heap::kEmptyStringRootIndex);
3593 __ bind(&non_trivial_array);
3602 __ mov(string_length, Operand(0));
3614 if (generate_debug_code_) {
3615 __ cmp(array_length, Operand(0));
3616 __ Assert(
gt,
"No empty arrays here in EmitFastAsciiArrayJoin");
3620 __ JumpIfSmi(
string, &bailout);
3623 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3625 __ add(string_length, string_length, Operand(scratch1),
SetCC);
3627 __ cmp(element, elements_end);
3631 __ cmp(array_length, Operand(1));
3632 __ b(
ne, ¬_size_one_array);
3636 __ bind(¬_size_one_array);
3645 __ JumpIfSmi(separator, &bailout);
3648 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3654 __ sub(string_length, string_length, Operand(scratch1));
3655 __ smull(scratch2,
ip, array_length, scratch1);
3658 __ cmp(
ip, Operand(0));
3660 __ tst(scratch2, Operand(0x80000000));
3662 __ add(string_length, string_length, Operand(scratch2),
SetCC);
3664 __ SmiUntag(string_length);
3677 __ AllocateAsciiString(result,
3687 result_pos = array_length;
3696 __ b(
eq, &one_char_separator);
3697 __ b(
gt, &long_separator);
3700 __ bind(&empty_separator_loop);
3709 __ SmiUntag(string_length);
3711 __ CopyBytes(
string, result_pos, string_length, scratch1);
3712 __ cmp(element, elements_end);
3713 __ b(
lt, &empty_separator_loop);
3718 __ bind(&one_char_separator);
3723 __ jmp(&one_char_separator_loop_entry);
3725 __ bind(&one_char_separator_loop);
3736 __ bind(&one_char_separator_loop_entry);
3739 __ SmiUntag(string_length);
3741 __ CopyBytes(
string, result_pos, string_length, scratch1);
3742 __ cmp(element, elements_end);
3743 __ b(
lt, &one_char_separator_loop);
3749 __ bind(&long_separator_loop);
3758 __ SmiUntag(string_length);
3762 __ CopyBytes(
string, result_pos, string_length, scratch1);
3764 __ bind(&long_separator);
3767 __ SmiUntag(string_length);
3769 __ CopyBytes(
string, result_pos, string_length, scratch1);
3770 __ cmp(element, elements_end);
3771 __ b(
lt, &long_separator_loop);
3776 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
3778 context()->Plug(
r0);
3782 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3783 Handle<String> name = expr->name();
3784 if (name->length() > 0 && name->Get(0) ==
'_') {
3785 Comment cmnt(masm_,
"[ InlineRuntimeCall");
3786 EmitInlineRuntimeCall(expr);
3790 Comment cmnt(masm_,
"[ CallRuntime");
3791 ZoneList<Expression*>* args = expr->arguments();
3793 if (expr->is_jsruntime()) {
3801 int arg_count = args->length();
3802 for (
int i = 0; i < arg_count; i++) {
3803 VisitForStackValue(args->at(i));
3806 if (expr->is_jsruntime()) {
3808 __ mov(
r2, Operand(expr->name()));
3809 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3811 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3812 CallIC(ic, mode, expr->CallRuntimeFeedbackId());
3817 __ CallRuntime(expr->function(), arg_count);
3819 context()->Plug(
r0);
3823 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3824 switch (expr->op()) {
3825 case Token::DELETE: {
3826 Comment cmnt(masm_,
"[ UnaryOperation (DELETE)");
3827 Property*
property = expr->expression()->AsProperty();
3828 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3830 if (property !=
NULL) {
3831 VisitForStackValue(property->obj());
3832 VisitForStackValue(property->key());
3838 context()->Plug(
r0);
3839 }
else if (proxy !=
NULL) {
3840 Variable* var = proxy->var();
3844 if (var->IsUnallocated()) {
3846 __ mov(
r1, Operand(var->name()));
3850 context()->Plug(
r0);
3851 }
else if (var->IsStackAllocated() || var->IsContextSlot()) {
3854 context()->Plug(var->is_this());
3858 __ push(context_register());
3859 __ mov(
r2, Operand(var->name()));
3861 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3862 context()->Plug(
r0);
3867 VisitForEffect(expr->expression());
3868 context()->Plug(
true);
3874 Comment cmnt(masm_,
"[ UnaryOperation (VOID)");
3875 VisitForEffect(expr->expression());
3876 context()->Plug(Heap::kUndefinedValueRootIndex);
3881 Comment cmnt(masm_,
"[ UnaryOperation (NOT)");
3882 if (context()->IsEffect()) {
3885 VisitForEffect(expr->expression());
3886 }
else if (context()->IsTest()) {
3889 VisitForControl(expr->expression(),
3890 test->false_label(),
3892 test->fall_through());
3893 context()->Plug(test->true_label(), test->false_label());
3899 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3900 Label materialize_true, materialize_false, done;
3901 VisitForControl(expr->expression(),
3905 __ bind(&materialize_true);
3906 PrepareForBailoutForId(expr->MaterializeTrueId(),
NO_REGISTERS);
3907 __ LoadRoot(
r0, Heap::kTrueValueRootIndex);
3908 if (context()->IsStackValue())
__ push(
r0);
3910 __ bind(&materialize_false);
3911 PrepareForBailoutForId(expr->MaterializeFalseId(),
NO_REGISTERS);
3912 __ LoadRoot(
r0, Heap::kFalseValueRootIndex);
3913 if (context()->IsStackValue())
__ push(
r0);
3919 case Token::TYPEOF: {
3920 Comment cmnt(masm_,
"[ UnaryOperation (TYPEOF)");
3921 { StackValueContext context(
this);
3922 VisitForTypeofValue(expr->expression());
3924 __ CallRuntime(Runtime::kTypeof, 1);
3925 context()->Plug(
r0);
3930 Comment cmt(masm_,
"[ UnaryOperation (ADD)");
3931 VisitForAccumulatorValue(expr->expression());
3932 Label no_conversion;
3933 __ JumpIfSmi(result_register(), &no_conversion);
3934 ToNumberStub convert_stub;
3935 __ CallStub(&convert_stub);
3936 __ bind(&no_conversion);
3937 context()->Plug(result_register());
3942 EmitUnaryOperation(expr,
"[ UnaryOperation (SUB)");
3945 case Token::BIT_NOT:
3946 EmitUnaryOperation(expr,
"[ UnaryOperation (BIT_NOT)");
3955 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3956 const char* comment) {
3958 Comment cmt(masm_, comment);
3959 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3962 UnaryOpStub stub(expr->op(), overwrite);
3965 VisitForAccumulatorValue(expr->expression());
3966 SetSourcePosition(expr->position());
3967 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
3968 expr->UnaryOperationFeedbackId());
3969 context()->Plug(
r0);
3973 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3974 Comment cmnt(masm_,
"[ CountOperation");
3975 SetSourcePosition(expr->position());
3979 if (!expr->expression()->IsValidLeftHandSide()) {
3980 VisitForEffect(expr->expression());
3986 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3987 LhsKind assign_type = VARIABLE;
3988 Property* prop = expr->expression()->AsProperty();
3993 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3997 if (assign_type == VARIABLE) {
3998 ASSERT(expr->expression()->AsVariableProxy()->var() !=
NULL);
3999 AccumulatorValueContext context(
this);
4000 EmitVariableLoad(expr->expression()->AsVariableProxy());
4003 if (expr->is_postfix() && !context()->IsEffect()) {
4007 if (assign_type == NAMED_PROPERTY) {
4009 VisitForAccumulatorValue(prop->obj());
4011 EmitNamedPropertyLoad(prop);
4013 VisitForStackValue(prop->obj());
4014 VisitForAccumulatorValue(prop->key());
4017 EmitKeyedPropertyLoad(prop);
4023 if (assign_type == VARIABLE) {
4024 PrepareForBailout(expr->expression(),
TOS_REG);
4026 PrepareForBailoutForId(prop->LoadId(),
TOS_REG);
4030 Label no_conversion;
4031 __ JumpIfSmi(
r0, &no_conversion);
4032 ToNumberStub convert_stub;
4033 __ CallStub(&convert_stub);
4034 __ bind(&no_conversion);
4037 if (expr->is_postfix()) {
4038 if (!context()->IsEffect()) {
4042 switch (assign_type) {
4046 case NAMED_PROPERTY:
4049 case KEYED_PROPERTY:
4058 Label stub_call, done;
4059 JumpPatchSite patch_site(masm_);
4061 int count_value = expr->op() == Token::INC ? 1 : -1;
4062 if (ShouldInlineSmiCase(expr->op())) {
4064 __ b(
vs, &stub_call);
4067 patch_site.EmitJumpIfSmi(
r0, &done);
4069 __ bind(&stub_call);
4076 SetSourcePosition(expr->position());
4079 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
4080 patch_site.EmitPatchInfo();
4084 switch (assign_type) {
4086 if (expr->is_postfix()) {
4087 { EffectContext context(
this);
4088 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4090 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4095 if (!context()->IsEffect()) {
4096 context()->PlugTOS();
4099 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4101 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4102 context()->Plug(
r0);
4105 case NAMED_PROPERTY: {
4106 __ mov(
r2, Operand(prop->key()->AsLiteral()->handle()));
4108 Handle<Code> ic = is_classic_mode()
4109 ? isolate()->builtins()->StoreIC_Initialize()
4110 : isolate()->builtins()->StoreIC_Initialize_Strict();
4111 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4112 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4113 if (expr->is_postfix()) {
4114 if (!context()->IsEffect()) {
4115 context()->PlugTOS();
4118 context()->Plug(
r0);
4122 case KEYED_PROPERTY: {
4125 Handle<Code> ic = is_classic_mode()
4126 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4127 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4128 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4129 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4130 if (expr->is_postfix()) {
4131 if (!context()->IsEffect()) {
4132 context()->PlugTOS();
4135 context()->Plug(
r0);
4143 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4144 ASSERT(!context()->IsEffect());
4145 ASSERT(!context()->IsTest());
4146 VariableProxy* proxy = expr->AsVariableProxy();
4147 if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
4148 Comment cmnt(masm_,
"Global variable");
4150 __ mov(
r2, Operand(proxy->name()));
4151 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4155 PrepareForBailout(expr,
TOS_REG);
4156 context()->Plug(
r0);
4157 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
4162 EmitDynamicLookupFastCase(proxy->var(),
INSIDE_TYPEOF, &slow, &done);
4165 __ mov(
r0, Operand(proxy->name()));
4167 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4168 PrepareForBailout(expr,
TOS_REG);
4171 context()->Plug(
r0);
4174 VisitInDuplicateContext(expr);
4179 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4180 Expression* sub_expr,
4181 Handle<String>
check) {
4182 Label materialize_true, materialize_false;
4183 Label* if_true =
NULL;
4184 Label* if_false =
NULL;
4185 Label* fall_through =
NULL;
4186 context()->PrepareTest(&materialize_true, &materialize_false,
4187 &if_true, &if_false, &fall_through);
4189 { AccumulatorValueContext context(
this);
4190 VisitForTypeofValue(sub_expr);
4192 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4194 if (check->Equals(isolate()->heap()->number_symbol())) {
4195 __ JumpIfSmi(
r0, if_true);
4197 __ LoadRoot(
ip, Heap::kHeapNumberMapRootIndex);
4199 Split(
eq, if_true, if_false, fall_through);
4200 }
else if (check->Equals(isolate()->heap()->string_symbol())) {
4201 __ JumpIfSmi(
r0, if_false);
4207 Split(
eq, if_true, if_false, fall_through);
4208 }
else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4209 __ CompareRoot(
r0, Heap::kTrueValueRootIndex);
4211 __ CompareRoot(
r0, Heap::kFalseValueRootIndex);
4212 Split(
eq, if_true, if_false, fall_through);
4213 }
else if (FLAG_harmony_typeof &&
4214 check->Equals(isolate()->heap()->null_symbol())) {
4215 __ CompareRoot(
r0, Heap::kNullValueRootIndex);
4216 Split(
eq, if_true, if_false, fall_through);
4217 }
else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4218 __ CompareRoot(
r0, Heap::kUndefinedValueRootIndex);
4220 __ JumpIfSmi(
r0, if_false);
4225 Split(
ne, if_true, if_false, fall_through);
4227 }
else if (check->Equals(isolate()->heap()->function_symbol())) {
4228 __ JumpIfSmi(
r0, if_false);
4233 Split(
eq, if_true, if_false, fall_through);
4234 }
else if (check->Equals(isolate()->heap()->object_symbol())) {
4235 __ JumpIfSmi(
r0, if_false);
4236 if (!FLAG_harmony_typeof) {
4237 __ CompareRoot(
r0, Heap::kNullValueRootIndex);
4248 Split(
eq, if_true, if_false, fall_through);
4250 if (if_false != fall_through)
__ jmp(if_false);
4252 context()->Plug(if_true, if_false);
4256 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4257 Comment cmnt(masm_,
"[ CompareOperation");
4258 SetSourcePosition(expr->position());
4262 if (TryLiteralCompare(expr))
return;
4266 Label materialize_true, materialize_false;
4267 Label* if_true =
NULL;
4268 Label* if_false =
NULL;
4269 Label* fall_through =
NULL;
4270 context()->PrepareTest(&materialize_true, &materialize_false,
4271 &if_true, &if_false, &fall_through);
4274 VisitForStackValue(expr->left());
4277 VisitForStackValue(expr->right());
4279 PrepareForBailoutBeforeSplit(expr,
false,
NULL,
NULL);
4280 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
4282 Split(
eq, if_true, if_false, fall_through);
4285 case Token::INSTANCEOF: {
4286 VisitForStackValue(expr->right());
4289 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4292 Split(
eq, if_true, if_false, fall_through);
4297 VisitForAccumulatorValue(expr->right());
4300 case Token::EQ_STRICT:
4317 case Token::INSTANCEOF:
4323 bool inline_smi_code = ShouldInlineSmiCase(op);
4324 JumpPatchSite patch_site(masm_);
4325 if (inline_smi_code) {
4328 patch_site.EmitJumpIfNotSmi(
r2, &slow_case);
4330 Split(cond, if_true, if_false,
NULL);
4331 __ bind(&slow_case);
4335 SetSourcePosition(expr->position());
4337 CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4338 patch_site.EmitPatchInfo();
4339 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4340 __ cmp(
r0, Operand(0));
4341 Split(cond, if_true, if_false, fall_through);
4347 context()->Plug(if_true, if_false);
4351 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4352 Expression* sub_expr,
4354 Label materialize_true, materialize_false;
4355 Label* if_true =
NULL;
4356 Label* if_false =
NULL;
4357 Label* fall_through =
NULL;
4358 context()->PrepareTest(&materialize_true, &materialize_false,
4359 &if_true, &if_false, &fall_through);
4361 VisitForAccumulatorValue(sub_expr);
4362 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4364 Heap::kNullValueRootIndex :
4365 Heap::kUndefinedValueRootIndex;
4366 __ LoadRoot(
r1, nil_value);
4368 if (expr->op() == Token::EQ_STRICT) {
4369 Split(
eq, if_true, if_false, fall_through);
4372 Heap::kUndefinedValueRootIndex :
4373 Heap::kNullValueRootIndex;
4375 __ LoadRoot(
r1, other_nil_value);
4378 __ JumpIfSmi(
r0, if_false);
4384 Split(
eq, if_true, if_false, fall_through);
4386 context()->Plug(if_true, if_false);
4390 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4392 context()->Plug(
r0);
4396 Register FullCodeGenerator::result_register() {
4401 Register FullCodeGenerator::context_register() {
4406 void FullCodeGenerator::StoreToFrameField(
int frame_offset, Register value) {
4412 void FullCodeGenerator::LoadContextField(Register dst,
int context_index) {
4417 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4419 if (declaration_scope->is_global_scope() ||
4420 declaration_scope->is_module_scope()) {
4426 }
else if (declaration_scope->is_eval_scope()) {
4432 ASSERT(declaration_scope->is_function_scope());
4442 void FullCodeGenerator::EnterFinallyBlock() {
4445 __ push(result_register());
4456 ExternalReference pending_message_obj =
4457 ExternalReference::address_of_pending_message_obj(isolate());
4458 __ mov(
ip, Operand(pending_message_obj));
4462 ExternalReference has_pending_message =
4463 ExternalReference::address_of_has_pending_message(isolate());
4464 __ mov(
ip, Operand(has_pending_message));
4469 ExternalReference pending_message_script =
4470 ExternalReference::address_of_pending_message_script(isolate());
4471 __ mov(
ip, Operand(pending_message_script));
4477 void FullCodeGenerator::ExitFinallyBlock() {
4481 ExternalReference pending_message_script =
4482 ExternalReference::address_of_pending_message_script(isolate());
4483 __ mov(
ip, Operand(pending_message_script));
4488 ExternalReference has_pending_message =
4489 ExternalReference::address_of_has_pending_message(isolate());
4490 __ mov(
ip, Operand(has_pending_message));
4494 ExternalReference pending_message_obj =
4495 ExternalReference::address_of_pending_message_obj(isolate());
4496 __ mov(
ip, Operand(pending_message_obj));
4503 __ pop(result_register());
4512 #define __ ACCESS_MASM(masm())
4516 int* context_length) {
4523 __ Drop(*stack_depth);
4524 if (*context_length > 0) {
4530 __ bl(finally_entry_);
4533 *context_length = 0;
4542 #endif // V8_TARGET_ARCH_ARM
static const int kCallerFPOffset
static const int kLengthOffset
static const int kBitFieldOffset
Scope * DeclarationScope()
int InstructionsGeneratedSince(Label *label)
const intptr_t kSmiTagMask
VariableDeclaration * function() const
static int SlotOffset(int index)
static const int kFingerOffset
static const int kBuiltinsOffset
static const int kEnumCacheOffset
static String * cast(Object *obj)
void mov(Register rd, Register rt)
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
static const int kDataOffset
static const int kGlobalReceiverOffset
int SizeOfCodeGeneratedSince(Label *label)
Scope * outer_scope() const
static Handle< Object > UninitializedSentinel(Isolate *isolate)
static bool IsSupported(CpuFeature f)
static const int kCallerSPOffset
#define ASSERT(condition)
friend class BlockConstPoolScope
const int kPointerSizeLog2
static const int kMaxBackEdgeWeight
static const int kInObjectFieldCount
static bool IsCompileTimeValue(Expression *expression)
#define POINTER_SIZE_ALIGN(value)
static const int kMaximumSlots
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
bool IsOptimizable() const
Variable * parameter(int index) const
MemOperand ContextOperand(Register context, int index)
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
static const int kHashFieldOffset
static const int kLiteralsOffset
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
static const int kValueOffset
Variable * arguments() const
static const int kForInSlowCaseMarker
static const int kFirstOffset
static BailoutId Declarations()
static const int kForInFastCaseMarker
static const int kStringWrapperSafeForDefaultValueOf
static void MaybeCallEntryHook(MacroAssembler *masm)
void Jump(Register target, Condition cond=al)
bool ShouldSelfOptimize()
static const int kContextOffset
static const int kCacheStampOffset
static TestContext * cast(AstContext *context)
static const int kDescriptorSize
static const int kPropertiesOffset
int num_parameters() const
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
void ldm(BlockAddrMode am, Register base, RegList dst, Condition cond=al)
static const int kHeaderSize
void CheckConstPool(bool force_emit, bool require_jump)
static const int kElementsOffset
static const int kContainsCachedArrayIndexMask
static BailoutId FunctionEntry()
void add(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
bool IsDeclaredVariableMode(VariableMode mode)
friend class NestedStatement
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
static const int kMapOffset
static const int kValueOffset
static const int kEnumCacheBridgeCacheOffset
void VisitIllegalRedeclaration(AstVisitor *visitor)
static const int kLengthOffset
MemOperand FieldMemOperand(Register object, int offset)
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
static const int kContextOffset
static const int kFunctionOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
Handle< Object > CodeObject()
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
static const int kJSReturnSequenceInstructions
static const int kConstructorOffset
#define ASSERT_NE(v1, v2)
static const int kIsUndetectable
static bool ShouldGenerateLog(Expression *type)
static const int kMaximumClonedProperties
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static const int kPrototypeOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
bool IsImmutableVariableMode(VariableMode mode)
static const int kNativeContextOffset
static const int kMarkerOffset
static const int kSharedFunctionInfoOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
static FixedArrayBase * cast(Object *object)
static const int kLocal0Offset
static const int kMaxValue
static const int kBitField2Offset
static Handle< Code > GetUninitialized(Token::Value op)
void check(i::Vector< const char > string)
static const int kMaximumClonedLength
static const int kValueOffset
static const int kInstanceTypeOffset
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag