30 #if defined(V8_TARGET_ARCH_X64)
45 #define __ ACCESS_MASM(masm_)
50 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
52 info_emitted_ =
false;
57 ASSERT(patch_site_.is_bound() == info_emitted_);
60 void EmitJumpIfNotSmi(Register reg,
62 Label::Distance near_jump = Label::kFar) {
67 void EmitJumpIfSmi(Register reg,
69 Label::Distance near_jump = Label::kFar) {
71 EmitJump(
carry, target, near_jump);
74 void EmitPatchInfo() {
75 if (patch_site_.is_bound()) {
76 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
78 __ testl(
rax, Immediate(delta_to_patch_site));
89 void EmitJump(
Condition cc, Label* target, Label::Distance near_jump) {
90 ASSERT(!patch_site_.is_bound() && !info_emitted_);
92 __ bind(&patch_site_);
93 __ j(cc, target, near_jump);
96 MacroAssembler* masm_;
117 void FullCodeGenerator::Generate() {
118 CompilationInfo* info = info_;
120 isolate()->factory()->NewFixedArray(
function()->handler_count(),
TENURED);
121 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
123 SetFunctionPosition(
function());
124 Comment cmnt(masm_,
"[ function compiled by full code generator");
129 if (strlen(FLAG_stop_at) > 0 &&
130 info->function()->name()->IsEqualTo(
CStrVector(FLAG_stop_at))) {
139 if (!info->is_classic_mode() || info->is_native()) {
142 __ j(
zero, &ok, Label::kNear);
144 int receiver_offset = (info->scope()->num_parameters() + 1) *
kPointerSize;
153 FrameScope frame_scope(masm_, StackFrame::MANUAL);
160 { Comment cmnt(masm_,
"[ Allocate locals");
161 int locals_count = info->scope()->num_stack_slots();
162 if (locals_count == 1) {
163 __ PushRoot(Heap::kUndefinedValueRootIndex);
164 }
else if (locals_count > 1) {
165 __ LoadRoot(
rdx, Heap::kUndefinedValueRootIndex);
166 for (
int i = 0; i < locals_count; i++) {
172 bool function_in_register =
true;
176 if (heap_slots > 0) {
177 Comment cmnt(masm_,
"[ Allocate context");
180 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
181 __ Push(info->scope()->GetScopeInfo());
182 __ CallRuntime(Runtime::kNewGlobalContext, 2);
184 FastNewContextStub stub(heap_slots);
187 __ CallRuntime(Runtime::kNewFunctionContext, 1);
189 function_in_register =
false;
195 int num_parameters = info->scope()->num_parameters();
196 for (
int i = 0; i < num_parameters; i++) {
198 if (var->IsContextSlot()) {
202 __ movq(
rax, Operand(
rbp, parameter_offset));
205 __ movq(Operand(
rsi, context_offset),
rax);
207 __ RecordWriteContextSlot(
214 Variable* arguments = scope()->
arguments();
215 if (arguments !=
NULL) {
218 Comment cmnt(masm_,
"[ Allocate arguments object");
219 if (function_in_register) {
225 int num_parameters = info->scope()->num_parameters();
236 if (!is_classic_mode()) {
238 }
else if (
function()->has_duplicate_parameters()) {
243 ArgumentsAccessStub stub(type);
250 __ CallRuntime(Runtime::kTraceEnter, 0);
255 if (scope()->HasIllegalRedeclaration()) {
256 Comment cmnt(masm_,
"[ Declarations");
261 { Comment cmnt(masm_,
"[ Declarations");
264 if (scope()->is_function_scope() && scope()->
function() !=
NULL) {
265 VariableDeclaration*
function = scope()->
function();
266 ASSERT(function->proxy()->var()->mode() ==
CONST ||
269 VisitVariableDeclaration(
function);
271 VisitDeclarations(scope()->declarations());
274 { Comment cmnt(masm_,
"[ Stack check");
277 __ CompareRoot(
rsp, Heap::kStackLimitRootIndex);
284 { Comment cmnt(masm_,
"[ Body");
285 ASSERT(loop_depth() == 0);
286 VisitStatements(
function()->body());
287 ASSERT(loop_depth() == 0);
293 { Comment cmnt(masm_,
"[ return <undefined>;");
294 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
295 EmitReturnSequence();
300 void FullCodeGenerator::ClearAccumulator() {
305 void FullCodeGenerator::EmitProfilingCounterDecrement(
int delta) {
306 __ movq(
rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
312 void FullCodeGenerator::EmitProfilingCounterReset() {
313 int reset_value = FLAG_interrupt_budget;
318 __ movq(
rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
327 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
328 Label* back_edge_target) {
329 Comment cmnt(masm_,
"[ Stack check");
332 if (FLAG_count_based_interrupts) {
334 if (FLAG_weighted_back_edges) {
335 ASSERT(back_edge_target->is_bound());
338 Max(1, distance / kBackEdgeDistanceUnit));
340 EmitProfilingCounterDecrement(weight);
345 __ CompareRoot(
rsp, Heap::kStackLimitRootIndex);
354 RecordStackCheck(stmt->OsrEntryId());
363 if (FLAG_count_based_interrupts) {
364 EmitProfilingCounterReset();
372 PrepareForBailoutForId(stmt->OsrEntryId(),
NO_REGISTERS);
376 void FullCodeGenerator::EmitReturnSequence() {
377 Comment cmnt(masm_,
"[ Return sequence");
378 if (return_label_.is_bound()) {
379 __ jmp(&return_label_);
381 __ bind(&return_label_);
384 __ CallRuntime(Runtime::kTraceExit, 1);
386 if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
390 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
391 }
else if (FLAG_weighted_back_edges) {
394 Max(1, distance / kBackEdgeDistanceUnit));
396 EmitProfilingCounterDecrement(weight);
402 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
408 EmitProfilingCounterReset();
413 Label check_exit_codesize;
414 masm_->
bind(&check_exit_codesize);
424 __ Ret(arguments_bytes,
rcx);
426 #ifdef ENABLE_DEBUGGER_SUPPORT
431 for (
int i = 0; i < kPadding; ++i) {
443 void FullCodeGenerator::EffectContext::Plug(Variable* var)
const {
444 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
448 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var)
const {
449 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
450 codegen()->GetVar(result_register(), var);
454 void FullCodeGenerator::StackValueContext::Plug(Variable* var)
const {
455 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
456 MemOperand operand = codegen()->VarOperand(var, result_register());
461 void FullCodeGenerator::TestContext::Plug(Variable* var)
const {
462 codegen()->GetVar(result_register(), var);
463 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
464 codegen()->DoTest(
this);
472 void FullCodeGenerator::AccumulatorValueContext::Plug(
474 __ LoadRoot(result_register(), index);
478 void FullCodeGenerator::StackValueContext::Plug(
485 codegen()->PrepareForBailoutBeforeSplit(condition(),
489 if (index == Heap::kUndefinedValueRootIndex ||
490 index == Heap::kNullValueRootIndex ||
491 index == Heap::kFalseValueRootIndex) {
492 if (false_label_ != fall_through_)
__ jmp(false_label_);
493 }
else if (index == Heap::kTrueValueRootIndex) {
494 if (true_label_ != fall_through_)
__ jmp(true_label_);
496 __ LoadRoot(result_register(), index);
497 codegen()->DoTest(
this);
502 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit)
const {
506 void FullCodeGenerator::AccumulatorValueContext::Plug(
507 Handle<Object> lit)
const {
511 __ Move(result_register(), lit);
516 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit)
const {
525 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit)
const {
526 codegen()->PrepareForBailoutBeforeSplit(condition(),
530 ASSERT(!lit->IsUndetectableObject());
531 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
532 if (false_label_ != fall_through_)
__ jmp(false_label_);
533 }
else if (lit->IsTrue() || lit->IsJSObject()) {
534 if (true_label_ != fall_through_)
__ jmp(true_label_);
535 }
else if (lit->IsString()) {
537 if (false_label_ != fall_through_)
__ jmp(false_label_);
539 if (true_label_ != fall_through_)
__ jmp(true_label_);
541 }
else if (lit->IsSmi()) {
543 if (false_label_ != fall_through_)
__ jmp(false_label_);
545 if (true_label_ != fall_through_)
__ jmp(true_label_);
549 __ Move(result_register(), lit);
550 codegen()->DoTest(
this);
555 void FullCodeGenerator::EffectContext::DropAndPlug(
int count,
556 Register reg)
const {
562 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
564 Register reg)
const {
567 __ Move(result_register(), reg);
571 void FullCodeGenerator::StackValueContext::DropAndPlug(
int count,
572 Register reg)
const {
574 if (count > 1)
__ Drop(count - 1);
575 __ movq(Operand(
rsp, 0), reg);
579 void FullCodeGenerator::TestContext::DropAndPlug(
int count,
580 Register reg)
const {
584 __ Move(result_register(), reg);
585 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
586 codegen()->DoTest(
this);
590 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
591 Label* materialize_false)
const {
592 ASSERT(materialize_true == materialize_false);
593 __ bind(materialize_true);
597 void FullCodeGenerator::AccumulatorValueContext::Plug(
598 Label* materialize_true,
599 Label* materialize_false)
const {
601 __ bind(materialize_true);
602 __ Move(result_register(), isolate()->factory()->true_value());
603 __ jmp(&done, Label::kNear);
604 __ bind(materialize_false);
605 __ Move(result_register(), isolate()->factory()->false_value());
610 void FullCodeGenerator::StackValueContext::Plug(
611 Label* materialize_true,
612 Label* materialize_false)
const {
614 __ bind(materialize_true);
615 __ Push(isolate()->factory()->true_value());
616 __ jmp(&done, Label::kNear);
617 __ bind(materialize_false);
618 __ Push(isolate()->factory()->false_value());
623 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
624 Label* materialize_false)
const {
625 ASSERT(materialize_true == true_label_);
626 ASSERT(materialize_false == false_label_);
630 void FullCodeGenerator::EffectContext::Plug(
bool flag)
const {
634 void FullCodeGenerator::AccumulatorValueContext::Plug(
bool flag)
const {
636 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
637 __ LoadRoot(result_register(), value_root_index);
641 void FullCodeGenerator::StackValueContext::Plug(
bool flag)
const {
643 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
644 __ PushRoot(value_root_index);
648 void FullCodeGenerator::TestContext::Plug(
bool flag)
const {
649 codegen()->PrepareForBailoutBeforeSplit(condition(),
654 if (true_label_ != fall_through_)
__ jmp(true_label_);
656 if (false_label_ != fall_through_)
__ jmp(false_label_);
661 void FullCodeGenerator::DoTest(Expression* condition,
664 Label* fall_through) {
665 ToBooleanStub stub(result_register());
666 __ push(result_register());
667 __ CallStub(&stub, condition->test_id());
668 __ testq(result_register(), result_register());
670 Split(
not_zero, if_true, if_false, fall_through);
677 Label* fall_through) {
678 if (if_false == fall_through) {
680 }
else if (if_true == fall_through) {
689 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
690 ASSERT(var->IsStackAllocated());
694 if (var->IsParameter()) {
699 return Operand(
rbp, offset);
703 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
704 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
705 if (var->IsContextSlot()) {
707 __ LoadContext(scratch, context_chain_length);
710 return StackOperand(var);
715 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
716 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
718 __ movq(dest, location);
722 void FullCodeGenerator::SetVar(Variable* var,
726 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
727 ASSERT(!scratch0.is(src));
728 ASSERT(!scratch0.is(scratch1));
729 ASSERT(!scratch1.is(src));
730 MemOperand location = VarOperand(var, scratch0);
731 __ movq(location, src);
734 if (var->IsContextSlot()) {
741 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
742 bool should_normalize,
751 if (should_normalize)
__ jmp(&skip, Label::kNear);
752 PrepareForBailout(expr,
TOS_REG);
753 if (should_normalize) {
754 __ CompareRoot(
rax, Heap::kTrueValueRootIndex);
761 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
764 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
765 if (generate_debug_code_) {
768 __ CompareRoot(
rbx, Heap::kWithContextMapRootIndex);
769 __ Check(
not_equal,
"Declaration in with context.");
770 __ CompareRoot(
rbx, Heap::kCatchContextMapRootIndex);
771 __ Check(
not_equal,
"Declaration in catch context.");
776 void FullCodeGenerator::VisitVariableDeclaration(
777 VariableDeclaration* declaration) {
781 VariableProxy* proxy = declaration->proxy();
783 Variable* variable = proxy->var();
785 switch (variable->location()) {
787 globals_->
Add(variable->name(),
zone());
788 globals_->
Add(variable->binding_needs_init()
789 ? isolate()->factory()->the_hole_value()
790 : isolate()->factory()->undefined_value(),
797 Comment cmnt(masm_,
"[ VariableDeclaration");
805 Comment cmnt(masm_,
"[ VariableDeclaration");
806 EmitDebugCheckDeclarationContext(variable);
815 Comment cmnt(masm_,
"[ VariableDeclaration");
817 __ Push(variable->name());
828 __ PushRoot(Heap::kTheHoleValueRootIndex);
832 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
839 void FullCodeGenerator::VisitFunctionDeclaration(
840 FunctionDeclaration* declaration) {
841 VariableProxy* proxy = declaration->proxy();
842 Variable* variable = proxy->var();
843 switch (variable->location()) {
845 globals_->
Add(variable->name(),
zone());
846 Handle<SharedFunctionInfo>
function =
849 if (
function.is_null())
return SetStackOverflow();
850 globals_->
Add(
function,
zone());
856 Comment cmnt(masm_,
"[ FunctionDeclaration");
857 VisitForAccumulatorValue(declaration->fun());
858 __ movq(StackOperand(variable), result_register());
863 Comment cmnt(masm_,
"[ FunctionDeclaration");
864 EmitDebugCheckDeclarationContext(variable);
865 VisitForAccumulatorValue(declaration->fun());
869 __ RecordWriteContextSlot(
rsi,
881 Comment cmnt(masm_,
"[ FunctionDeclaration");
883 __ Push(variable->name());
885 VisitForStackValue(declaration->fun());
886 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
893 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
894 VariableProxy* proxy = declaration->proxy();
895 Variable* variable = proxy->var();
896 Handle<JSModule> instance = declaration->module()->interface()->Instance();
897 ASSERT(!instance.is_null());
899 switch (variable->location()) {
901 Comment cmnt(masm_,
"[ ModuleDeclaration");
902 globals_->
Add(variable->name(),
zone());
903 globals_->
Add(instance,
zone());
904 Visit(declaration->module());
909 Comment cmnt(masm_,
"[ ModuleDeclaration");
910 EmitDebugCheckDeclarationContext(variable);
912 Visit(declaration->module());
924 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
925 VariableProxy* proxy = declaration->proxy();
926 Variable* variable = proxy->var();
927 switch (variable->location()) {
933 Comment cmnt(masm_,
"[ ImportDeclaration");
934 EmitDebugCheckDeclarationContext(variable);
947 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
952 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray>
pairs) {
957 __ CallRuntime(Runtime::kDeclareGlobals, 3);
962 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
963 Comment cmnt(masm_,
"[ SwitchStatement");
964 Breakable nested_statement(
this, stmt);
965 SetStatementPosition(stmt);
968 VisitForStackValue(stmt->tag());
971 ZoneList<CaseClause*>* clauses = stmt->cases();
972 CaseClause* default_clause =
NULL;
976 for (
int i = 0; i < clauses->length(); i++) {
977 CaseClause* clause = clauses->at(i);
978 clause->body_target()->Unuse();
981 if (clause->is_default()) {
982 default_clause = clause;
986 Comment cmnt(masm_,
"[ Case comparison");
991 VisitForAccumulatorValue(clause->label());
995 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
996 JumpPatchSite patch_site(masm_);
997 if (inline_smi_code) {
1001 patch_site.EmitJumpIfNotSmi(
rcx, &slow_case, Label::kNear);
1006 __ jmp(clause->body_target());
1007 __ bind(&slow_case);
1011 SetSourcePosition(clause->position());
1013 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1014 patch_site.EmitPatchInfo();
1019 __ jmp(clause->body_target());
1024 __ bind(&next_test);
1026 if (default_clause ==
NULL) {
1027 __ jmp(nested_statement.break_label());
1029 __ jmp(default_clause->body_target());
1033 for (
int i = 0; i < clauses->length(); i++) {
1034 Comment cmnt(masm_,
"[ Case body");
1035 CaseClause* clause = clauses->at(i);
1036 __ bind(clause->body_target());
1037 PrepareForBailoutForId(clause->EntryId(),
NO_REGISTERS);
1038 VisitStatements(clause->statements());
1041 __ bind(nested_statement.break_label());
1046 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1047 Comment cmnt(masm_,
"[ ForInStatement");
1048 SetStatementPosition(stmt);
1051 ForIn loop_statement(
this, stmt);
1052 increment_loop_depth();
1057 VisitForAccumulatorValue(stmt->enumerable());
1058 __ CompareRoot(
rax, Heap::kUndefinedValueRootIndex);
1060 Register null_value =
rdi;
1061 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1062 __ cmpq(
rax, null_value);
1065 PrepareForBailoutForId(stmt->PrepareId(),
TOS_REG);
1068 Label convert, done_convert;
1069 __ JumpIfSmi(
rax, &convert);
1075 __ bind(&done_convert);
1088 __ CheckEnumCache(null_value, &call_runtime);
1094 __ jmp(&use_cache, Label::kNear);
1097 __ bind(&call_runtime);
1099 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1106 Heap::kMetaMapRootIndex);
1110 __ bind(&use_cache);
1112 Label no_descriptors;
1118 __ LoadInstanceDescriptors(
rax,
rcx);
1129 __ bind(&no_descriptors);
1130 __ addq(
rsp, Immediate(kPointerSize));
1135 __ bind(&fixed_array);
1137 Handle<JSGlobalPropertyCell> cell =
1138 isolate()->factory()->NewJSGlobalPropertyCell(
1141 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1142 __ LoadHeapObject(
rbx, cell);
1147 __ movq(
rcx, Operand(
rsp, 0 * kPointerSize));
1152 __ bind(&non_proxy);
1162 __ movq(
rax, Operand(
rsp, 0 * kPointerSize));
1163 __ cmpq(
rax, Operand(
rsp, 1 * kPointerSize));
1167 __ movq(
rbx, Operand(
rsp, 2 * kPointerSize));
1176 __ movq(
rdx, Operand(
rsp, 3 * kPointerSize));
1181 __ movq(
rcx, Operand(
rsp, 4 * kPointerSize));
1183 __ j(
equal, &update_each, Label::kNear);
1188 __ j(
equal, &update_each, Label::kNear);
1197 __ j(
equal, loop_statement.continue_label());
1202 __ bind(&update_each);
1203 __ movq(result_register(),
rbx);
1205 { EffectContext context(
this);
1206 EmitAssignment(stmt->each());
1210 Visit(stmt->body());
1214 __ bind(loop_statement.continue_label());
1217 EmitStackCheck(stmt, &loop);
1221 __ bind(loop_statement.break_label());
1222 __ addq(
rsp, Immediate(5 * kPointerSize));
1227 decrement_loop_depth();
1231 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1239 if (!FLAG_always_opt &&
1240 !FLAG_prepare_always_opt &&
1242 scope()->is_function_scope() &&
1243 info->num_literals() == 0) {
1244 FastNewClosureStub stub(info->language_mode());
1251 ? isolate()->factory()->true_value()
1252 : isolate()->factory()->false_value());
1253 __ CallRuntime(Runtime::kNewClosure, 3);
1255 context()->Plug(
rax);
1259 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1260 Comment cmnt(masm_,
"[ VariableProxy");
1261 EmitVariableLoad(expr);
1265 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1268 Register context =
rsi;
1269 Register temp =
rdx;
1273 if (s->num_heap_slots() > 0) {
1274 if (s->calls_non_strict_eval()) {
1288 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope())
break;
1289 s = s->outer_scope();
1292 if (s !=
NULL && s->is_eval_scope()) {
1296 if (!context.is(temp)) {
1297 __ movq(temp, context);
1304 __ j(
equal, &fast, Label::kNear);
1317 __ Move(
rcx, var->name());
1318 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1320 ? RelocInfo::CODE_TARGET
1321 : RelocInfo::CODE_TARGET_CONTEXT;
1326 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1328 ASSERT(var->IsContextSlot());
1329 Register context =
rsi;
1330 Register temp =
rbx;
1332 for (Scope* s = scope(); s != var->scope(); s = s->
outer_scope()) {
1333 if (s->num_heap_slots() > 0) {
1334 if (s->calls_non_strict_eval()) {
1356 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1366 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1369 Variable* local = var->local_if_not_shadowed();
1370 __ movq(
rax, ContextSlotOperandCheckExtensions(local, slow));
1371 if (local->mode() ==
CONST ||
1373 local->mode() ==
LET) {
1374 __ CompareRoot(
rax, Heap::kTheHoleValueRootIndex);
1376 if (local->mode() ==
CONST) {
1377 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
1379 __ Push(var->name());
1380 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1388 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1390 SetSourcePosition(proxy->position());
1391 Variable* var = proxy->var();
1395 switch (var->location()) {
1397 Comment cmnt(masm_,
"Global variable");
1400 __ Move(
rcx, var->name());
1402 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1403 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1404 context()->Plug(
rax);
1411 Comment cmnt(masm_, var->IsContextSlot() ?
"Context slot" :
"Stack slot");
1412 if (var->binding_needs_init()) {
1436 bool skip_init_check;
1438 skip_init_check =
false;
1441 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1442 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1443 skip_init_check = var->mode() !=
CONST &&
1444 var->initializer_position() < proxy->position();
1447 if (!skip_init_check) {
1451 __ CompareRoot(
rax, Heap::kTheHoleValueRootIndex);
1456 __ Push(var->name());
1457 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1461 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
1464 context()->Plug(
rax);
1468 context()->Plug(var);
1478 Comment cmnt(masm_,
"Lookup slot");
1480 __ Push(var->name());
1481 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1483 context()->Plug(
rax);
1490 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1491 Comment cmnt(masm_,
"[ RegExpLiteral");
1500 int literal_offset =
1503 __ CompareRoot(
rbx, Heap::kUndefinedValueRootIndex);
1510 __ Push(expr->pattern());
1511 __ Push(expr->flags());
1512 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1515 __ bind(&materialized);
1517 Label allocated, runtime_allocate;
1521 __ bind(&runtime_allocate);
1524 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1527 __ bind(&allocated);
1536 if ((size % (2 * kPointerSize)) != 0) {
1540 context()->Plug(
rax);
1544 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1545 if (expression ==
NULL) {
1546 __ PushRoot(Heap::kNullValueRootIndex);
1548 VisitForStackValue(expression);
1553 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1554 Comment cmnt(masm_,
"[ ObjectLiteral");
1555 Handle<FixedArray> constant_properties = expr->constant_properties();
1559 __ Push(constant_properties);
1560 int flags = expr->fast_elements()
1563 flags |= expr->has_function()
1567 int properties_count = constant_properties->length() / 2;
1568 if (expr->depth() > 1) {
1569 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1572 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1574 FastCloneShallowObjectStub stub(properties_count);
1580 bool result_saved =
false;
1585 expr->CalculateEmitStore(
zone());
1587 AccessorTable accessor_table(
zone());
1588 for (
int i = 0; i < expr->properties()->length(); i++) {
1589 ObjectLiteral::Property*
property = expr->properties()->at(i);
1590 if (property->IsCompileTimeValue())
continue;
1592 Literal* key =
property->key();
1593 Expression* value =
property->value();
1594 if (!result_saved) {
1596 result_saved =
true;
1598 switch (property->kind()) {
1605 if (key->handle()->IsSymbol()) {
1606 if (property->emit_store()) {
1607 VisitForAccumulatorValue(value);
1608 __ Move(
rcx, key->handle());
1610 Handle<Code> ic = is_classic_mode()
1611 ? isolate()->builtins()->StoreIC_Initialize()
1612 : isolate()->builtins()->StoreIC_Initialize_Strict();
1613 CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
1616 VisitForEffect(value);
1622 __ push(Operand(
rsp, 0));
1623 VisitForStackValue(key);
1624 VisitForStackValue(value);
1625 if (property->emit_store()) {
1627 __ CallRuntime(Runtime::kSetProperty, 4);
1633 accessor_table.lookup(key)->second->getter = value;
1636 accessor_table.lookup(key)->second->setter = value;
1643 for (AccessorTable::Iterator it = accessor_table.begin();
1644 it != accessor_table.end();
1646 __ push(Operand(
rsp, 0));
1647 VisitForStackValue(it->first);
1648 EmitAccessor(it->second->getter);
1649 EmitAccessor(it->second->setter);
1651 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1654 if (expr->has_function()) {
1656 __ push(Operand(
rsp, 0));
1657 __ CallRuntime(Runtime::kToFastProperties, 1);
1661 context()->PlugTOS();
1663 context()->Plug(
rax);
1668 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1669 Comment cmnt(masm_,
"[ ArrayLiteral");
1671 ZoneList<Expression*>* subexprs = expr->values();
1672 int length = subexprs->length();
1673 Handle<FixedArray> constant_elements = expr->constant_elements();
1674 ASSERT_EQ(2, constant_elements->length());
1677 bool has_constant_fast_elements =
1679 Handle<FixedArrayBase> constant_elements_values(
1685 __ Push(constant_elements);
1686 Heap* heap = isolate()->heap();
1687 if (has_constant_fast_elements &&
1688 constant_elements_values->map() == heap->fixed_cow_array_map()) {
1691 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1692 FastCloneShallowArrayStub stub(
1696 }
else if (expr->depth() > 1) {
1697 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1699 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1702 FLAG_smi_only_arrays);
1708 FastCloneShallowArrayStub stub(mode, length);
1712 bool result_saved =
false;
1716 for (
int i = 0; i < length; i++) {
1717 Expression* subexpr = subexprs->at(i);
1720 if (subexpr->AsLiteral() !=
NULL ||
1725 if (!result_saved) {
1727 result_saved =
true;
1729 VisitForAccumulatorValue(subexpr);
1740 __ RecordWriteField(
rbx, offset, result_register(),
rcx,
1750 StoreArrayLiteralElementStub stub;
1754 PrepareForBailoutForId(expr->GetIdForElement(i),
NO_REGISTERS);
1758 context()->PlugTOS();
1760 context()->Plug(
rax);
1765 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1766 Comment cmnt(masm_,
"[ Assignment");
1769 if (!expr->target()->IsValidLeftHandSide()) {
1770 VisitForEffect(expr->target());
1776 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1777 LhsKind assign_type = VARIABLE;
1778 Property*
property = expr->target()->AsProperty();
1779 if (property !=
NULL) {
1780 assign_type = (
property->key()->IsPropertyName())
1786 switch (assign_type) {
1790 case NAMED_PROPERTY:
1791 if (expr->is_compound()) {
1793 VisitForAccumulatorValue(property->obj());
1794 __ push(result_register());
1796 VisitForStackValue(property->obj());
1799 case KEYED_PROPERTY: {
1800 if (expr->is_compound()) {
1801 VisitForStackValue(property->obj());
1802 VisitForAccumulatorValue(property->key());
1806 VisitForStackValue(property->obj());
1807 VisitForStackValue(property->key());
1815 if (expr->is_compound()) {
1816 { AccumulatorValueContext context(
this);
1817 switch (assign_type) {
1819 EmitVariableLoad(expr->target()->AsVariableProxy());
1820 PrepareForBailout(expr->target(),
TOS_REG);
1822 case NAMED_PROPERTY:
1823 EmitNamedPropertyLoad(property);
1824 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1826 case KEYED_PROPERTY:
1827 EmitKeyedPropertyLoad(property);
1828 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1835 VisitForAccumulatorValue(expr->value());
1837 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1840 SetSourcePosition(expr->position() + 1);
1841 AccumulatorValueContext context(
this);
1842 if (ShouldInlineSmiCase(op)) {
1843 EmitInlineSmiBinaryOp(expr->binary_operation(),
1849 EmitBinaryOp(expr->binary_operation(), op, mode);
1852 PrepareForBailout(expr->binary_operation(),
TOS_REG);
1854 VisitForAccumulatorValue(expr->value());
1858 SetSourcePosition(expr->position());
1861 switch (assign_type) {
1863 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1865 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
1866 context()->Plug(
rax);
1868 case NAMED_PROPERTY:
1869 EmitNamedPropertyAssignment(expr);
1871 case KEYED_PROPERTY:
1872 EmitKeyedPropertyAssignment(expr);
1878 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1879 SetSourcePosition(prop->position());
1880 Literal* key = prop->key()->AsLiteral();
1881 __ Move(
rcx, key->handle());
1882 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1883 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1887 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1888 SetSourcePosition(prop->position());
1889 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1890 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1894 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1898 Expression* right) {
1902 Label done, stub_call, smi_case;
1906 JumpPatchSite patch_site(masm_);
1907 patch_site.EmitJumpIfSmi(
rax, &smi_case, Label::kNear);
1909 __ bind(&stub_call);
1911 BinaryOpStub stub(op, mode);
1912 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
1913 expr->BinaryOperationFeedbackId());
1914 patch_site.EmitPatchInfo();
1915 __ jmp(&done, Label::kNear);
1926 __ SmiShiftLogicalRight(
rax,
rdx,
rcx, &stub_call);
1940 case Token::BIT_AND:
1943 case Token::BIT_XOR:
1952 context()->Plug(
rax);
1956 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1960 BinaryOpStub stub(op, mode);
1961 JumpPatchSite patch_site(masm_);
1962 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
1963 expr->BinaryOperationFeedbackId());
1964 patch_site.EmitPatchInfo();
1965 context()->Plug(
rax);
1969 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1972 if (!expr->IsValidLeftHandSide()) {
1973 VisitForEffect(expr);
1979 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1980 LhsKind assign_type = VARIABLE;
1981 Property* prop = expr->AsProperty();
1983 assign_type = (prop->key()->IsPropertyName())
1988 switch (assign_type) {
1990 Variable* var = expr->AsVariableProxy()->var();
1991 EffectContext context(
this);
1992 EmitVariableAssignment(var, Token::ASSIGN);
1995 case NAMED_PROPERTY: {
1997 VisitForAccumulatorValue(prop->obj());
2000 __ Move(
rcx, prop->key()->AsLiteral()->handle());
2001 Handle<Code> ic = is_classic_mode()
2002 ? isolate()->builtins()->StoreIC_Initialize()
2003 : isolate()->builtins()->StoreIC_Initialize_Strict();
2007 case KEYED_PROPERTY: {
2009 VisitForStackValue(prop->obj());
2010 VisitForAccumulatorValue(prop->key());
2014 Handle<Code> ic = is_classic_mode()
2015 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2016 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2021 context()->Plug(
rax);
2025 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2027 if (var->IsUnallocated()) {
2029 __ Move(
rcx, var->name());
2031 Handle<Code> ic = is_classic_mode()
2032 ? isolate()->builtins()->StoreIC_Initialize()
2033 : isolate()->builtins()->StoreIC_Initialize_Strict();
2034 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2035 }
else if (op == Token::INIT_CONST) {
2037 ASSERT(!var->IsParameter());
2038 if (var->IsStackLocal()) {
2040 __ movq(
rdx, StackOperand(var));
2041 __ CompareRoot(
rdx, Heap::kTheHoleValueRootIndex);
2043 __ movq(StackOperand(var),
rax);
2046 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2054 __ Push(var->name());
2055 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2058 }
else if (var->mode() ==
LET && op != Token::INIT_LET) {
2060 if (var->IsLookupSlot()) {
2063 __ Push(var->name());
2065 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2067 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2070 __ movq(
rdx, location);
2071 __ CompareRoot(
rdx, Heap::kTheHoleValueRootIndex);
2073 __ Push(var->name());
2074 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2076 __ movq(location,
rax);
2077 if (var->IsContextSlot()) {
2079 __ RecordWriteContextSlot(
2084 }
else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2087 if (var->IsStackAllocated() || var->IsContextSlot()) {
2089 if (generate_debug_code_ && op == Token::INIT_LET) {
2091 __ movq(
rdx, location);
2092 __ CompareRoot(
rdx, Heap::kTheHoleValueRootIndex);
2093 __ Check(
equal,
"Let binding re-initialization.");
2096 __ movq(location,
rax);
2097 if (var->IsContextSlot()) {
2099 __ RecordWriteContextSlot(
2103 ASSERT(var->IsLookupSlot());
2106 __ Push(var->name());
2108 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2115 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2117 Property* prop = expr->target()->AsProperty();
2122 SetSourcePosition(expr->position());
2123 __ Move(
rcx, prop->key()->AsLiteral()->handle());
2125 Handle<Code> ic = is_classic_mode()
2126 ? isolate()->builtins()->StoreIC_Initialize()
2127 : isolate()->builtins()->StoreIC_Initialize_Strict();
2128 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2130 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2131 context()->Plug(
rax);
2135 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2141 SetSourcePosition(expr->position());
2142 Handle<Code> ic = is_classic_mode()
2143 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2144 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2145 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2147 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2148 context()->Plug(
rax);
2152 void FullCodeGenerator::VisitProperty(Property* expr) {
2153 Comment cmnt(masm_,
"[ Property");
2154 Expression* key = expr->key();
2156 if (key->IsPropertyName()) {
2157 VisitForAccumulatorValue(expr->obj());
2158 EmitNamedPropertyLoad(expr);
2159 PrepareForBailoutForId(expr->LoadId(),
TOS_REG);
2160 context()->Plug(
rax);
2162 VisitForStackValue(expr->obj());
2163 VisitForAccumulatorValue(expr->key());
2165 EmitKeyedPropertyLoad(expr);
2166 context()->Plug(
rax);
2171 void FullCodeGenerator::CallIC(Handle<Code>
code,
2172 RelocInfo::Mode rmode,
2173 TypeFeedbackId ast_id) {
2175 __ call(code, rmode, ast_id);
2179 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2180 Handle<Object> name,
2181 RelocInfo::Mode mode) {
2183 ZoneList<Expression*>* args = expr->arguments();
2184 int arg_count = args->length();
2185 { PreservePositionScope scope(masm()->positions_recorder());
2186 for (
int i = 0; i < arg_count; i++) {
2187 VisitForStackValue(args->at(i));
2192 SetSourcePosition(expr->position());
2195 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2196 CallIC(ic, mode, expr->CallFeedbackId());
2197 RecordJSReturnSite(expr);
2200 context()->Plug(
rax);
2204 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2207 VisitForAccumulatorValue(key);
2216 ZoneList<Expression*>* args = expr->arguments();
2217 int arg_count = args->length();
2218 { PreservePositionScope scope(masm()->positions_recorder());
2219 for (
int i = 0; i < arg_count; i++) {
2220 VisitForStackValue(args->at(i));
2224 SetSourcePosition(expr->position());
2227 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2228 __ movq(
rcx, Operand(
rsp, (arg_count + 1) * kPointerSize));
2229 CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
2230 RecordJSReturnSite(expr);
2233 context()->DropAndPlug(1,
rax);
2239 ZoneList<Expression*>* args = expr->arguments();
2240 int arg_count = args->length();
2241 { PreservePositionScope scope(masm()->positions_recorder());
2242 for (
int i = 0; i < arg_count; i++) {
2243 VisitForStackValue(args->at(i));
2247 SetSourcePosition(expr->position());
2251 Handle<Object> uninitialized =
2253 Handle<JSGlobalPropertyCell> cell =
2254 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2255 RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2258 CallFunctionStub stub(arg_count, flags);
2259 __ movq(
rdi, Operand(
rsp, (arg_count + 1) * kPointerSize));
2260 __ CallStub(&stub, expr->CallFeedbackId());
2261 RecordJSReturnSite(expr);
2265 context()->DropAndPlug(1,
rax);
2269 void FullCodeGenerator::EmitResolvePossiblyDirectEval(
int arg_count) {
2271 if (arg_count > 0) {
2272 __ push(Operand(
rsp, arg_count * kPointerSize));
2274 __ PushRoot(Heap::kUndefinedValueRootIndex);
2287 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2291 void FullCodeGenerator::VisitCall(Call* expr) {
2295 expr->return_is_recorded_ =
false;
2298 Comment cmnt(masm_,
"[ Call");
2299 Expression* callee = expr->expression();
2300 VariableProxy* proxy = callee->AsVariableProxy();
2301 Property*
property = callee->AsProperty();
2303 if (proxy !=
NULL && proxy->var()->is_possibly_eval()) {
2307 ZoneList<Expression*>* args = expr->arguments();
2308 int arg_count = args->length();
2309 { PreservePositionScope pos_scope(masm()->positions_recorder());
2310 VisitForStackValue(callee);
2311 __ PushRoot(Heap::kUndefinedValueRootIndex);
2314 for (
int i = 0; i < arg_count; i++) {
2315 VisitForStackValue(args->at(i));
2320 __ push(Operand(
rsp, (arg_count + 1) * kPointerSize));
2321 EmitResolvePossiblyDirectEval(arg_count);
2325 __ movq(Operand(
rsp, (arg_count + 0) * kPointerSize),
rdx);
2326 __ movq(Operand(
rsp, (arg_count + 1) * kPointerSize),
rax);
2329 SetSourcePosition(expr->position());
2331 __ movq(
rdi, Operand(
rsp, (arg_count + 1) * kPointerSize));
2333 RecordJSReturnSite(expr);
2336 context()->DropAndPlug(1,
rax);
2337 }
else if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
2341 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2342 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
2346 { PreservePositionScope scope(masm()->positions_recorder());
2354 __ push(context_register());
2355 __ Push(proxy->name());
2356 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2362 if (done.is_linked()) {
2364 __ jmp(&call, Label::kNear);
2370 __ PushRoot(Heap::kTheHoleValueRootIndex);
2378 }
else if (property !=
NULL) {
2379 { PreservePositionScope scope(masm()->positions_recorder());
2380 VisitForStackValue(property->obj());
2382 if (property->key()->IsPropertyName()) {
2383 EmitCallWithIC(expr,
2384 property->key()->AsLiteral()->handle(),
2385 RelocInfo::CODE_TARGET);
2387 EmitKeyedCallWithIC(expr, property->key());
2391 { PreservePositionScope scope(masm()->positions_recorder());
2392 VisitForStackValue(callee);
2403 ASSERT(expr->return_is_recorded_);
2408 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2409 Comment cmnt(masm_,
"[ CallNew");
2417 VisitForStackValue(expr->expression());
2420 ZoneList<Expression*>* args = expr->arguments();
2421 int arg_count = args->length();
2422 for (
int i = 0; i < arg_count; i++) {
2423 VisitForStackValue(args->at(i));
2428 SetSourcePosition(expr->position());
2431 __ Set(
rax, arg_count);
2432 __ movq(
rdi, Operand(
rsp, arg_count * kPointerSize));
2435 Handle<Object> uninitialized =
2437 Handle<JSGlobalPropertyCell> cell =
2438 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2439 RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2443 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2444 PrepareForBailoutForId(expr->ReturnId(),
TOS_REG);
2445 context()->Plug(
rax);
2449 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2450 ZoneList<Expression*>* args = expr->arguments();
2451 ASSERT(args->length() == 1);
2453 VisitForAccumulatorValue(args->at(0));
2455 Label materialize_true, materialize_false;
2456 Label* if_true =
NULL;
2457 Label* if_false =
NULL;
2458 Label* fall_through =
NULL;
2459 context()->PrepareTest(&materialize_true, &materialize_false,
2460 &if_true, &if_false, &fall_through);
2462 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2463 __ JumpIfSmi(
rax, if_true);
2466 context()->Plug(if_true, if_false);
2470 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2471 ZoneList<Expression*>* args = expr->arguments();
2472 ASSERT(args->length() == 1);
2474 VisitForAccumulatorValue(args->at(0));
2476 Label materialize_true, materialize_false;
2477 Label* if_true =
NULL;
2478 Label* if_false =
NULL;
2479 Label* fall_through =
NULL;
2480 context()->PrepareTest(&materialize_true, &materialize_false,
2481 &if_true, &if_false, &fall_through);
2483 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2485 Split(non_negative_smi, if_true, if_false, fall_through);
2487 context()->Plug(if_true, if_false);
2491 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2492 ZoneList<Expression*>* args = expr->arguments();
2493 ASSERT(args->length() == 1);
2495 VisitForAccumulatorValue(args->at(0));
2497 Label materialize_true, materialize_false;
2498 Label* if_true =
NULL;
2499 Label* if_false =
NULL;
2500 Label* fall_through =
NULL;
2501 context()->PrepareTest(&materialize_true, &materialize_false,
2502 &if_true, &if_false, &fall_through);
2504 __ JumpIfSmi(
rax, if_false);
2505 __ CompareRoot(
rax, Heap::kNullValueRootIndex);
2516 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2517 Split(
below_equal, if_true, if_false, fall_through);
2519 context()->Plug(if_true, if_false);
2523 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2524 ZoneList<Expression*>* args = expr->arguments();
2525 ASSERT(args->length() == 1);
2527 VisitForAccumulatorValue(args->at(0));
2529 Label materialize_true, materialize_false;
2530 Label* if_true =
NULL;
2531 Label* if_false =
NULL;
2532 Label* fall_through =
NULL;
2533 context()->PrepareTest(&materialize_true, &materialize_false,
2534 &if_true, &if_false, &fall_through);
2536 __ JumpIfSmi(
rax, if_false);
2538 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2539 Split(
above_equal, if_true, if_false, fall_through);
2541 context()->Plug(if_true, if_false);
2545 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2546 ZoneList<Expression*>* args = expr->arguments();
2547 ASSERT(args->length() == 1);
2549 VisitForAccumulatorValue(args->at(0));
2551 Label materialize_true, materialize_false;
2552 Label* if_true =
NULL;
2553 Label* if_false =
NULL;
2554 Label* fall_through =
NULL;
2555 context()->PrepareTest(&materialize_true, &materialize_false,
2556 &if_true, &if_false, &fall_through);
2558 __ JumpIfSmi(
rax, if_false);
2562 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2563 Split(
not_zero, if_true, if_false, fall_through);
2565 context()->Plug(if_true, if_false);
2569 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2570 CallRuntime* expr) {
2571 ZoneList<Expression*>* args = expr->arguments();
2572 ASSERT(args->length() == 1);
2574 VisitForAccumulatorValue(args->at(0));
2576 Label materialize_true, materialize_false;
2577 Label* if_true =
NULL;
2578 Label* if_false =
NULL;
2579 Label* fall_through =
NULL;
2580 context()->PrepareTest(&materialize_true, &materialize_false,
2581 &if_true, &if_false, &fall_through);
2583 __ AssertNotSmi(
rax);
2595 __ CompareRoot(
rcx, Heap::kHashTableMapRootIndex);
2601 Label entry, loop, done;
2604 __ NumberOfOwnDescriptors(
rcx,
rbx);
2605 __ cmpq(
rcx, Immediate(0));
2608 __ LoadInstanceDescriptors(
rbx,
rbx);
2652 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2653 context()->Plug(if_true, if_false);
2657 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2658 ZoneList<Expression*>* args = expr->arguments();
2659 ASSERT(args->length() == 1);
2661 VisitForAccumulatorValue(args->at(0));
2663 Label materialize_true, materialize_false;
2664 Label* if_true =
NULL;
2665 Label* if_false =
NULL;
2666 Label* fall_through =
NULL;
2667 context()->PrepareTest(&materialize_true, &materialize_false,
2668 &if_true, &if_false, &fall_through);
2670 __ JumpIfSmi(
rax, if_false);
2672 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2673 Split(
equal, if_true, if_false, fall_through);
2675 context()->Plug(if_true, if_false);
2679 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2680 ZoneList<Expression*>* args = expr->arguments();
2681 ASSERT(args->length() == 1);
2683 VisitForAccumulatorValue(args->at(0));
2685 Label materialize_true, materialize_false;
2686 Label* if_true =
NULL;
2687 Label* if_false =
NULL;
2688 Label* fall_through =
NULL;
2689 context()->PrepareTest(&materialize_true, &materialize_false,
2690 &if_true, &if_false, &fall_through);
2692 __ JumpIfSmi(
rax, if_false);
2694 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2695 Split(
equal, if_true, if_false, fall_through);
2697 context()->Plug(if_true, if_false);
2701 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2702 ZoneList<Expression*>* args = expr->arguments();
2703 ASSERT(args->length() == 1);
2705 VisitForAccumulatorValue(args->at(0));
2707 Label materialize_true, materialize_false;
2708 Label* if_true =
NULL;
2709 Label* if_false =
NULL;
2710 Label* fall_through =
NULL;
2711 context()->PrepareTest(&materialize_true, &materialize_false,
2712 &if_true, &if_false, &fall_through);
2714 __ JumpIfSmi(
rax, if_false);
2716 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2717 Split(
equal, if_true, if_false, fall_through);
2719 context()->Plug(if_true, if_false);
2724 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2725 ASSERT(expr->arguments()->length() == 0);
2727 Label materialize_true, materialize_false;
2728 Label* if_true =
NULL;
2729 Label* if_false =
NULL;
2730 Label* fall_through =
NULL;
2731 context()->PrepareTest(&materialize_true, &materialize_false,
2732 &if_true, &if_false, &fall_through);
2738 Label check_frame_marker;
2745 __ bind(&check_frame_marker);
2748 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2749 Split(
equal, if_true, if_false, fall_through);
2751 context()->Plug(if_true, if_false);
2755 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2756 ZoneList<Expression*>* args = expr->arguments();
2757 ASSERT(args->length() == 2);
2760 VisitForStackValue(args->at(0));
2761 VisitForAccumulatorValue(args->at(1));
2763 Label materialize_true, materialize_false;
2764 Label* if_true =
NULL;
2765 Label* if_false =
NULL;
2766 Label* fall_through =
NULL;
2767 context()->PrepareTest(&materialize_true, &materialize_false,
2768 &if_true, &if_false, &fall_through);
2772 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2773 Split(
equal, if_true, if_false, fall_through);
2775 context()->Plug(if_true, if_false);
2779 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2780 ZoneList<Expression*>* args = expr->arguments();
2781 ASSERT(args->length() == 1);
2785 VisitForAccumulatorValue(args->at(0));
2790 context()->Plug(
rax);
2794 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2795 ASSERT(expr->arguments()->length() == 0);
2813 context()->Plug(
rax);
2817 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2818 ZoneList<Expression*>* args = expr->arguments();
2819 ASSERT(args->length() == 1);
2820 Label done, null,
function, non_function_constructor;
2822 VisitForAccumulatorValue(args->at(0));
2825 __ JumpIfSmi(
rax, &null);
2859 __ Move(
rax, isolate()->factory()->function_class_symbol());
2863 __ bind(&non_function_constructor);
2864 __ Move(
rax, isolate()->factory()->Object_symbol());
2869 __ LoadRoot(
rax, Heap::kNullValueRootIndex);
2874 context()->Plug(
rax);
2878 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2886 ZoneList<Expression*>* args = expr->arguments();
2889 VisitForStackValue(args->at(1));
2890 VisitForStackValue(args->at(2));
2891 __ CallRuntime(Runtime::kLog, 2);
2894 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
2895 context()->Plug(
rax);
2899 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2900 ASSERT(expr->arguments()->length() == 0);
2902 Label slow_allocate_heapnumber;
2903 Label heapnumber_allocated;
2905 __ AllocateHeapNumber(
rbx,
rcx, &slow_allocate_heapnumber);
2906 __ jmp(&heapnumber_allocated);
2908 __ bind(&slow_allocate_heapnumber);
2910 __ CallRuntime(Runtime::kNumberAlloc, 0);
2913 __ bind(&heapnumber_allocated);
2917 __ PrepareCallCFunction(1);
2928 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2933 __ movl(
rcx, Immediate(0x49800000));
2942 context()->Plug(
rax);
2946 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
2949 ZoneList<Expression*>* args = expr->arguments();
2950 ASSERT(args->length() == 3);
2951 VisitForStackValue(args->at(0));
2952 VisitForStackValue(args->at(1));
2953 VisitForStackValue(args->at(2));
2955 context()->Plug(
rax);
2959 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
2961 RegExpExecStub stub;
2962 ZoneList<Expression*>* args = expr->arguments();
2963 ASSERT(args->length() == 4);
2964 VisitForStackValue(args->at(0));
2965 VisitForStackValue(args->at(1));
2966 VisitForStackValue(args->at(2));
2967 VisitForStackValue(args->at(3));
2969 context()->Plug(
rax);
2973 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2974 ZoneList<Expression*>* args = expr->arguments();
2975 ASSERT(args->length() == 1);
2977 VisitForAccumulatorValue(args->at(0));
2981 __ JumpIfSmi(
rax, &done);
2988 context()->Plug(
rax);
2992 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
2993 ZoneList<Expression*>* args = expr->arguments();
2994 ASSERT(args->length() == 2);
2996 Smi* index =
Smi::cast(*(args->at(1)->AsLiteral()->handle()));
2998 VisitForAccumulatorValue(args->at(0));
3000 Label runtime, done, not_date_object;
3001 Register
object =
rax;
3002 Register result =
rax;
3003 Register scratch =
rcx;
3005 __ JumpIfSmi(
object, ¬_date_object);
3009 if (index->value() == 0) {
3014 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3015 __ movq(scratch, stamp);
3019 kPointerSize * index->value()));
3023 __ PrepareCallCFunction(2);
3025 __ movq(
rcx,
object);
3028 __ movq(
rdi,
object);
3031 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3036 __ bind(¬_date_object);
3037 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3039 context()->Plug(
rax);
3043 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3045 ZoneList<Expression*>* args = expr->arguments();
3046 ASSERT(args->length() == 2);
3047 VisitForStackValue(args->at(0));
3048 VisitForStackValue(args->at(1));
3051 context()->Plug(
rax);
3055 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3056 ZoneList<Expression*>* args = expr->arguments();
3057 ASSERT(args->length() == 2);
3059 VisitForStackValue(args->at(0));
3060 VisitForAccumulatorValue(args->at(1));
3065 __ JumpIfSmi(
rbx, &done);
3079 context()->Plug(
rax);
3083 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3084 ZoneList<Expression*>* args = expr->arguments();
3088 VisitForStackValue(args->at(0));
3090 NumberToStringStub stub;
3092 context()->Plug(
rax);
3096 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3097 ZoneList<Expression*>* args = expr->arguments();
3098 ASSERT(args->length() == 1);
3100 VisitForAccumulatorValue(args->at(0));
3107 NopRuntimeCallHelper call_helper;
3108 generator.GenerateSlow(masm_, call_helper);
3111 context()->Plug(
rbx);
3115 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3116 ZoneList<Expression*>* args = expr->arguments();
3117 ASSERT(args->length() == 2);
3119 VisitForStackValue(args->at(0));
3120 VisitForAccumulatorValue(args->at(1));
3122 Register
object =
rbx;
3123 Register index =
rax;
3124 Register result =
rdx;
3128 Label need_conversion;
3129 Label index_out_of_range;
3131 StringCharCodeAtGenerator
generator(
object,
3136 &index_out_of_range,
3141 __ bind(&index_out_of_range);
3144 __ LoadRoot(result, Heap::kNanValueRootIndex);
3147 __ bind(&need_conversion);
3150 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3153 NopRuntimeCallHelper call_helper;
3154 generator.GenerateSlow(masm_, call_helper);
3157 context()->Plug(result);
3161 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3162 ZoneList<Expression*>* args = expr->arguments();
3163 ASSERT(args->length() == 2);
3165 VisitForStackValue(args->at(0));
3166 VisitForAccumulatorValue(args->at(1));
3168 Register
object =
rbx;
3169 Register index =
rax;
3170 Register scratch =
rdx;
3171 Register result =
rax;
3175 Label need_conversion;
3176 Label index_out_of_range;
3184 &index_out_of_range,
3189 __ bind(&index_out_of_range);
3192 __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3195 __ bind(&need_conversion);
3201 NopRuntimeCallHelper call_helper;
3202 generator.GenerateSlow(masm_, call_helper);
3205 context()->Plug(result);
3209 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3210 ZoneList<Expression*>* args = expr->arguments();
3213 VisitForStackValue(args->at(0));
3214 VisitForStackValue(args->at(1));
3218 context()->Plug(
rax);
3222 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3223 ZoneList<Expression*>* args = expr->arguments();
3226 VisitForStackValue(args->at(0));
3227 VisitForStackValue(args->at(1));
3229 StringCompareStub stub;
3231 context()->Plug(
rax);
3235 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3239 ZoneList<Expression*>* args = expr->arguments();
3240 ASSERT(args->length() == 1);
3241 VisitForStackValue(args->at(0));
3243 context()->Plug(
rax);
3247 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3251 ZoneList<Expression*>* args = expr->arguments();
3252 ASSERT(args->length() == 1);
3253 VisitForStackValue(args->at(0));
3255 context()->Plug(
rax);
3259 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3263 ZoneList<Expression*>* args = expr->arguments();
3264 ASSERT(args->length() == 1);
3265 VisitForStackValue(args->at(0));
3267 context()->Plug(
rax);
3271 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3275 ZoneList<Expression*>* args = expr->arguments();
3276 ASSERT(args->length() == 1);
3277 VisitForStackValue(args->at(0));
3279 context()->Plug(
rax);
3283 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3285 ZoneList<Expression*>* args = expr->arguments();
3286 ASSERT(args->length() == 1);
3287 VisitForStackValue(args->at(0));
3288 __ CallRuntime(Runtime::kMath_sqrt, 1);
3289 context()->Plug(
rax);
3293 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3294 ZoneList<Expression*>* args = expr->arguments();
3295 ASSERT(args->length() >= 2);
3297 int arg_count = args->length() - 2;
3298 for (
int i = 0; i < arg_count + 1; i++) {
3299 VisitForStackValue(args->at(i));
3301 VisitForAccumulatorValue(args->last());
3303 Label runtime, done;
3305 __ JumpIfSmi(
rax, &runtime);
3310 __ movq(
rdi, result_register());
3311 ParameterCount count(arg_count);
3319 __ CallRuntime(Runtime::kCall, args->length());
3322 context()->Plug(
rax);
3326 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3327 RegExpConstructResultStub stub;
3328 ZoneList<Expression*>* args = expr->arguments();
3329 ASSERT(args->length() == 3);
3330 VisitForStackValue(args->at(0));
3331 VisitForStackValue(args->at(1));
3332 VisitForStackValue(args->at(2));
3334 context()->Plug(
rax);
3338 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3339 ZoneList<Expression*>* args = expr->arguments();
3343 int cache_id =
Smi::cast(*(args->at(0)->AsLiteral()->handle()))->
value();
3345 Handle<FixedArray> jsfunction_result_caches(
3346 isolate()->native_context()->jsfunction_result_caches());
3347 if (jsfunction_result_caches->length() <= cache_id) {
3348 __ Abort(
"Attempt to use undefined cache.");
3349 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
3350 context()->Plug(
rax);
3354 VisitForAccumulatorValue(args->at(1));
3357 Register cache =
rbx;
3367 Label done, not_found;
3382 __ jmp(&done, Label::kNear);
3384 __ bind(¬_found);
3388 __ CallRuntime(Runtime::kGetFromCache, 2);
3391 context()->Plug(
rax);
3395 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3396 ZoneList<Expression*>* args = expr->arguments();
3399 Register right =
rax;
3400 Register left =
rbx;
3403 VisitForStackValue(args->at(0));
3404 VisitForAccumulatorValue(args->at(1));
3407 Label done, fail, ok;
3408 __ cmpq(left, right);
3409 __ j(
equal, &ok, Label::kNear);
3412 __ j(either_smi, &fail, Label::kNear);
3413 __ j(
zero, &fail, Label::kNear);
3422 __ j(
equal, &ok, Label::kNear);
3424 __ Move(
rax, isolate()->factory()->false_value());
3425 __ jmp(&done, Label::kNear);
3427 __ Move(
rax, isolate()->factory()->true_value());
3430 context()->Plug(
rax);
3434 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3435 ZoneList<Expression*>* args = expr->arguments();
3436 ASSERT(args->length() == 1);
3438 VisitForAccumulatorValue(args->at(0));
3440 Label materialize_true, materialize_false;
3441 Label* if_true =
NULL;
3442 Label* if_false =
NULL;
3443 Label* fall_through =
NULL;
3444 context()->PrepareTest(&materialize_true, &materialize_false,
3445 &if_true, &if_false, &fall_through);
3449 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3453 context()->Plug(if_true, if_false);
3457 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3458 ZoneList<Expression*>* args = expr->arguments();
3459 ASSERT(args->length() == 1);
3460 VisitForAccumulatorValue(args->at(0));
3462 __ AssertString(
rax);
3468 context()->Plug(
rax);
3472 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3473 Label bailout, return_result, done, one_char_separator, long_separator,
3474 non_trivial_array, not_size_one_array, loop,
3475 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3476 ZoneList<Expression*>* args = expr->arguments();
3477 ASSERT(args->length() == 2);
3479 VisitForStackValue(args->at(1));
3481 VisitForAccumulatorValue(args->at(0));
3483 Register array =
rax;
3484 Register elements =
no_reg;
3486 Register index =
rdx;
3488 Register string_length =
rcx;
3490 Register
string =
rsi;
3492 Register scratch =
rbx;
3494 Register array_length =
rdi;
3495 Register result_pos =
no_reg;
3497 Operand separator_operand = Operand(
rsp, 2 * kPointerSize);
3498 Operand result_operand = Operand(
rsp, 1 * kPointerSize);
3499 Operand array_length_operand = Operand(
rsp, 0 * kPointerSize);
3503 __ subq(
rsp, Immediate(2 * kPointerSize));
3506 __ JumpIfSmi(array, &bailout);
3511 __ CheckFastElements(scratch, &bailout);
3518 __ LoadRoot(
rax, Heap::kEmptyStringRootIndex);
3519 __ jmp(&return_result);
3522 __ bind(&non_trivial_array);
3523 __ SmiToInteger32(array_length, array_length);
3524 __ movl(array_length_operand, array_length);
3536 __ Set(string_length, 0);
3540 if (generate_debug_code_) {
3541 __ cmpq(index, array_length);
3542 __ Assert(
below,
"No empty arrays here in EmitFastAsciiArrayJoin");
3549 __ JumpIfSmi(
string, &bailout);
3552 __ andb(scratch, Immediate(
3556 __ AddSmiField(string_length,
3560 __ cmpl(index, array_length);
3570 __ cmpl(array_length, Immediate(1));
3573 __ jmp(&return_result);
3575 __ bind(¬_size_one_array);
3578 result_pos = array_length;
3587 __ movq(
string, separator_operand);
3588 __ JumpIfSmi(
string, &bailout);
3591 __ andb(scratch, Immediate(
3603 __ SmiToInteger32(scratch,
3606 __ imull(scratch, index);
3608 __ addl(string_length, scratch);
3614 __ AllocateAsciiString(result_pos, string_length, scratch,
3615 index,
string, &bailout);
3616 __ movq(result_operand, result_pos);
3619 __ movq(
string, separator_operand);
3622 __ j(
equal, &one_char_separator);
3628 __ movl(scratch, array_length_operand);
3629 __ jmp(&loop_1_condition);
3643 __ SmiToInteger32(string_length,
3647 __ CopyBytes(result_pos,
string, string_length);
3649 __ bind(&loop_1_condition);
3650 __ cmpl(index, scratch);
3656 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
3657 __ jmp(&return_result);
3661 __ bind(&one_char_separator);
3668 __ jmp(&loop_2_entry);
3679 __ movb(Operand(result_pos, 0), scratch);
3680 __ incq(result_pos);
3682 __ bind(&loop_2_entry);
3687 __ SmiToInteger32(string_length,
3691 __ CopyBytes(result_pos,
string, string_length);
3693 __ cmpl(index, array_length_operand);
3699 __ bind(&long_separator);
3704 __ movl(index, array_length_operand);
3711 __ movq(
string, separator_operand);
3712 __ SmiToInteger32(scratch,
3716 __ movq(separator_operand,
string);
3720 __ jmp(&loop_3_entry);
3731 __ movq(
string, separator_operand);
3732 __ movl(string_length, scratch);
3733 __ CopyBytes(result_pos,
string, string_length, 2);
3735 __ bind(&loop_3_entry);
3738 __ SmiToInteger32(string_length,
3742 __ CopyBytes(result_pos,
string, string_length);
3747 __ movq(
rax, result_operand);
3749 __ bind(&return_result);
3751 __ addq(
rsp, Immediate(3 * kPointerSize));
3753 context()->Plug(
rax);
3757 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3758 Handle<String> name = expr->name();
3759 if (name->length() > 0 && name->Get(0) ==
'_') {
3760 Comment cmnt(masm_,
"[ InlineRuntimeCall");
3761 EmitInlineRuntimeCall(expr);
3765 Comment cmnt(masm_,
"[ CallRuntime");
3766 ZoneList<Expression*>* args = expr->arguments();
3768 if (expr->is_jsruntime()) {
3775 int arg_count = args->length();
3776 for (
int i = 0; i < arg_count; i++) {
3777 VisitForStackValue(args->at(i));
3780 if (expr->is_jsruntime()) {
3782 __ Move(
rcx, expr->name());
3783 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3785 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3786 CallIC(ic, mode, expr->CallRuntimeFeedbackId());
3790 __ CallRuntime(expr->function(), arg_count);
3792 context()->Plug(
rax);
3796 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3797 switch (expr->op()) {
3798 case Token::DELETE: {
3799 Comment cmnt(masm_,
"[ UnaryOperation (DELETE)");
3800 Property*
property = expr->expression()->AsProperty();
3801 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3803 if (property !=
NULL) {
3804 VisitForStackValue(property->obj());
3805 VisitForStackValue(property->key());
3810 context()->Plug(
rax);
3811 }
else if (proxy !=
NULL) {
3812 Variable* var = proxy->var();
3816 if (var->IsUnallocated()) {
3818 __ Push(var->name());
3821 context()->Plug(
rax);
3822 }
else if (var->IsStackAllocated() || var->IsContextSlot()) {
3826 context()->Plug(var->is_this());
3830 __ push(context_register());
3831 __ Push(var->name());
3832 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3833 context()->Plug(
rax);
3838 VisitForEffect(expr->expression());
3839 context()->Plug(
true);
3845 Comment cmnt(masm_,
"[ UnaryOperation (VOID)");
3846 VisitForEffect(expr->expression());
3847 context()->Plug(Heap::kUndefinedValueRootIndex);
3852 Comment cmnt(masm_,
"[ UnaryOperation (NOT)");
3853 if (context()->IsEffect()) {
3856 VisitForEffect(expr->expression());
3857 }
else if (context()->IsTest()) {
3860 VisitForControl(expr->expression(),
3861 test->false_label(),
3863 test->fall_through());
3864 context()->Plug(test->true_label(), test->false_label());
3870 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3871 Label materialize_true, materialize_false, done;
3872 VisitForControl(expr->expression(),
3876 __ bind(&materialize_true);
3877 PrepareForBailoutForId(expr->MaterializeTrueId(),
NO_REGISTERS);
3878 if (context()->IsAccumulatorValue()) {
3879 __ LoadRoot(
rax, Heap::kTrueValueRootIndex);
3881 __ PushRoot(Heap::kTrueValueRootIndex);
3883 __ jmp(&done, Label::kNear);
3884 __ bind(&materialize_false);
3885 PrepareForBailoutForId(expr->MaterializeFalseId(),
NO_REGISTERS);
3886 if (context()->IsAccumulatorValue()) {
3887 __ LoadRoot(
rax, Heap::kFalseValueRootIndex);
3889 __ PushRoot(Heap::kFalseValueRootIndex);
3896 case Token::TYPEOF: {
3897 Comment cmnt(masm_,
"[ UnaryOperation (TYPEOF)");
3898 { StackValueContext context(
this);
3899 VisitForTypeofValue(expr->expression());
3901 __ CallRuntime(Runtime::kTypeof, 1);
3902 context()->Plug(
rax);
3907 Comment cmt(masm_,
"[ UnaryOperation (ADD)");
3908 VisitForAccumulatorValue(expr->expression());
3909 Label no_conversion;
3910 __ JumpIfSmi(result_register(), &no_conversion);
3911 ToNumberStub convert_stub;
3912 __ CallStub(&convert_stub);
3913 __ bind(&no_conversion);
3914 context()->Plug(result_register());
3919 EmitUnaryOperation(expr,
"[ UnaryOperation (SUB)");
3922 case Token::BIT_NOT:
3923 EmitUnaryOperation(expr,
"[ UnaryOperation (BIT_NOT)");
3932 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3933 const char* comment) {
3935 Comment cmt(masm_, comment);
3936 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3939 UnaryOpStub stub(expr->op(), overwrite);
3942 VisitForAccumulatorValue(expr->expression());
3943 SetSourcePosition(expr->position());
3944 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
3945 expr->UnaryOperationFeedbackId());
3946 context()->Plug(
rax);
3950 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3951 Comment cmnt(masm_,
"[ CountOperation");
3952 SetSourcePosition(expr->position());
3956 if (!expr->expression()->IsValidLeftHandSide()) {
3957 VisitForEffect(expr->expression());
3963 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3964 LhsKind assign_type = VARIABLE;
3965 Property* prop = expr->expression()->AsProperty();
3970 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3974 if (assign_type == VARIABLE) {
3975 ASSERT(expr->expression()->AsVariableProxy()->var() !=
NULL);
3976 AccumulatorValueContext context(
this);
3977 EmitVariableLoad(expr->expression()->AsVariableProxy());
3980 if (expr->is_postfix() && !context()->IsEffect()) {
3983 if (assign_type == NAMED_PROPERTY) {
3984 VisitForAccumulatorValue(prop->obj());
3986 EmitNamedPropertyLoad(prop);
3988 VisitForStackValue(prop->obj());
3989 VisitForAccumulatorValue(prop->key());
3992 EmitKeyedPropertyLoad(prop);
3998 if (assign_type == VARIABLE) {
3999 PrepareForBailout(expr->expression(),
TOS_REG);
4001 PrepareForBailoutForId(prop->LoadId(),
TOS_REG);
4005 Label no_conversion;
4006 __ JumpIfSmi(
rax, &no_conversion, Label::kNear);
4007 ToNumberStub convert_stub;
4008 __ CallStub(&convert_stub);
4009 __ bind(&no_conversion);
4012 if (expr->is_postfix()) {
4013 if (!context()->IsEffect()) {
4017 switch (assign_type) {
4021 case NAMED_PROPERTY:
4022 __ movq(Operand(
rsp, kPointerSize),
rax);
4024 case KEYED_PROPERTY:
4025 __ movq(Operand(
rsp, 2 * kPointerSize),
rax);
4032 Label done, stub_call;
4033 JumpPatchSite patch_site(masm_);
4035 if (ShouldInlineSmiCase(expr->op())) {
4036 if (expr->op() == Token::INC) {
4044 patch_site.EmitJumpIfSmi(
rax, &done, Label::kNear);
4046 __ bind(&stub_call);
4048 if (expr->op() == Token::INC) {
4056 SetSourcePosition(expr->position());
4060 if (expr->op() == Token::INC) {
4066 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
4067 patch_site.EmitPatchInfo();
4071 switch (assign_type) {
4073 if (expr->is_postfix()) {
4075 { EffectContext context(
this);
4076 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4078 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4083 if (!context()->IsEffect()) {
4084 context()->PlugTOS();
4088 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4090 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4091 context()->Plug(
rax);
4094 case NAMED_PROPERTY: {
4095 __ Move(
rcx, prop->key()->AsLiteral()->handle());
4097 Handle<Code> ic = is_classic_mode()
4098 ? isolate()->builtins()->StoreIC_Initialize()
4099 : isolate()->builtins()->StoreIC_Initialize_Strict();
4100 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4101 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4102 if (expr->is_postfix()) {
4103 if (!context()->IsEffect()) {
4104 context()->PlugTOS();
4107 context()->Plug(
rax);
4111 case KEYED_PROPERTY: {
4114 Handle<Code> ic = is_classic_mode()
4115 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4116 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4117 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4118 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4119 if (expr->is_postfix()) {
4120 if (!context()->IsEffect()) {
4121 context()->PlugTOS();
4124 context()->Plug(
rax);
4132 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4133 VariableProxy* proxy = expr->AsVariableProxy();
4134 ASSERT(!context()->IsEffect());
4135 ASSERT(!context()->IsTest());
4137 if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
4138 Comment cmnt(masm_,
"Global variable");
4139 __ Move(
rcx, proxy->name());
4141 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4145 PrepareForBailout(expr,
TOS_REG);
4146 context()->Plug(
rax);
4147 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
4152 EmitDynamicLookupFastCase(proxy->var(),
INSIDE_TYPEOF, &slow, &done);
4156 __ Push(proxy->name());
4157 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4158 PrepareForBailout(expr,
TOS_REG);
4161 context()->Plug(
rax);
4164 VisitInDuplicateContext(expr);
4169 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4170 Expression* sub_expr,
4171 Handle<String>
check) {
4172 Label materialize_true, materialize_false;
4173 Label* if_true =
NULL;
4174 Label* if_false =
NULL;
4175 Label* fall_through =
NULL;
4176 context()->PrepareTest(&materialize_true, &materialize_false,
4177 &if_true, &if_false, &fall_through);
4179 { AccumulatorValueContext context(
this);
4180 VisitForTypeofValue(sub_expr);
4182 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4184 if (check->Equals(isolate()->heap()->number_symbol())) {
4185 __ JumpIfSmi(
rax, if_true);
4187 __ CompareRoot(
rax, Heap::kHeapNumberMapRootIndex);
4188 Split(
equal, if_true, if_false, fall_through);
4189 }
else if (check->Equals(isolate()->heap()->string_symbol())) {
4190 __ JumpIfSmi(
rax, if_false);
4196 Split(
zero, if_true, if_false, fall_through);
4197 }
else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4198 __ CompareRoot(
rax, Heap::kTrueValueRootIndex);
4200 __ CompareRoot(
rax, Heap::kFalseValueRootIndex);
4201 Split(
equal, if_true, if_false, fall_through);
4202 }
else if (FLAG_harmony_typeof &&
4203 check->Equals(isolate()->heap()->null_symbol())) {
4204 __ CompareRoot(
rax, Heap::kNullValueRootIndex);
4205 Split(
equal, if_true, if_false, fall_through);
4206 }
else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4207 __ CompareRoot(
rax, Heap::kUndefinedValueRootIndex);
4209 __ JumpIfSmi(
rax, if_false);
4214 Split(
not_zero, if_true, if_false, fall_through);
4215 }
else if (check->Equals(isolate()->heap()->function_symbol())) {
4216 __ JumpIfSmi(
rax, if_false);
4221 Split(
equal, if_true, if_false, fall_through);
4222 }
else if (check->Equals(isolate()->heap()->object_symbol())) {
4223 __ JumpIfSmi(
rax, if_false);
4224 if (!FLAG_harmony_typeof) {
4225 __ CompareRoot(
rax, Heap::kNullValueRootIndex);
4235 Split(
zero, if_true, if_false, fall_through);
4237 if (if_false != fall_through)
__ jmp(if_false);
4239 context()->Plug(if_true, if_false);
4243 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4244 Comment cmnt(masm_,
"[ CompareOperation");
4245 SetSourcePosition(expr->position());
4249 if (TryLiteralCompare(expr))
return;
4253 Label materialize_true, materialize_false;
4254 Label* if_true =
NULL;
4255 Label* if_false =
NULL;
4256 Label* fall_through =
NULL;
4257 context()->PrepareTest(&materialize_true, &materialize_false,
4258 &if_true, &if_false, &fall_through);
4261 VisitForStackValue(expr->left());
4264 VisitForStackValue(expr->right());
4266 PrepareForBailoutBeforeSplit(expr,
false,
NULL,
NULL);
4267 __ CompareRoot(
rax, Heap::kTrueValueRootIndex);
4268 Split(
equal, if_true, if_false, fall_through);
4271 case Token::INSTANCEOF: {
4272 VisitForStackValue(expr->right());
4275 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4278 Split(
zero, if_true, if_false, fall_through);
4283 VisitForAccumulatorValue(expr->right());
4286 case Token::EQ_STRICT:
4303 case Token::INSTANCEOF:
4309 bool inline_smi_code = ShouldInlineSmiCase(op);
4310 JumpPatchSite patch_site(masm_);
4311 if (inline_smi_code) {
4315 patch_site.EmitJumpIfNotSmi(
rcx, &slow_case, Label::kNear);
4317 Split(cc, if_true, if_false,
NULL);
4318 __ bind(&slow_case);
4322 SetSourcePosition(expr->position());
4324 CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4325 patch_site.EmitPatchInfo();
4327 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4329 Split(cc, if_true, if_false, fall_through);
4335 context()->Plug(if_true, if_false);
4339 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4340 Expression* sub_expr,
4342 Label materialize_true, materialize_false;
4343 Label* if_true =
NULL;
4344 Label* if_false =
NULL;
4345 Label* fall_through =
NULL;
4346 context()->PrepareTest(&materialize_true, &materialize_false,
4347 &if_true, &if_false, &fall_through);
4349 VisitForAccumulatorValue(sub_expr);
4350 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4352 Heap::kNullValueRootIndex :
4353 Heap::kUndefinedValueRootIndex;
4354 __ CompareRoot(
rax, nil_value);
4355 if (expr->op() == Token::EQ_STRICT) {
4356 Split(
equal, if_true, if_false, fall_through);
4359 Heap::kUndefinedValueRootIndex :
4360 Heap::kNullValueRootIndex;
4362 __ CompareRoot(
rax, other_nil_value);
4364 __ JumpIfSmi(
rax, if_false);
4369 Split(
not_zero, if_true, if_false, fall_through);
4371 context()->Plug(if_true, if_false);
4375 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4377 context()->Plug(
rax);
4381 Register FullCodeGenerator::result_register() {
4386 Register FullCodeGenerator::context_register() {
4391 void FullCodeGenerator::StoreToFrameField(
int frame_offset, Register value) {
4393 __ movq(Operand(
rbp, frame_offset), value);
4397 void FullCodeGenerator::LoadContextField(Register dst,
int context_index) {
4402 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4404 if (declaration_scope->is_global_scope() ||
4405 declaration_scope->is_module_scope()) {
4411 }
else if (declaration_scope->is_eval_scope()) {
4417 ASSERT(declaration_scope->is_function_scope());
4427 void FullCodeGenerator::EnterFinallyBlock() {
4438 __ push(result_register());
4441 ExternalReference pending_message_obj =
4442 ExternalReference::address_of_pending_message_obj(isolate());
4446 ExternalReference has_pending_message =
4447 ExternalReference::address_of_has_pending_message(isolate());
4452 ExternalReference pending_message_script =
4453 ExternalReference::address_of_pending_message_script(isolate());
4459 void FullCodeGenerator::ExitFinallyBlock() {
4464 ExternalReference pending_message_script =
4465 ExternalReference::address_of_pending_message_script(isolate());
4466 __ Store(pending_message_script,
rdx);
4470 ExternalReference has_pending_message =
4471 ExternalReference::address_of_has_pending_message(isolate());
4472 __ Store(has_pending_message,
rdx);
4475 ExternalReference pending_message_obj =
4476 ExternalReference::address_of_pending_message_obj(isolate());
4477 __ Store(pending_message_obj,
rdx);
4480 __ pop(result_register());
4493 #define __ ACCESS_MASM(masm())
4497 int* context_length) {
4504 __ Drop(*stack_depth);
4505 if (*context_length > 0) {
4511 __ call(finally_entry_);
4514 *context_length = 0;
4523 #endif // V8_TARGET_ARCH_X64
static const int kCallerFPOffset
static const int kLengthOffset
static const int kBitFieldOffset
Scope * DeclarationScope()
const intptr_t kSmiTagMask
VariableDeclaration * function() const
static int SlotOffset(int index)
static const int kFingerOffset
static const int kBuiltinsOffset
static const int kEnumCacheOffset
static String * cast(Object *obj)
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
static const int kDataOffset
static const int kGlobalReceiverOffset
int SizeOfCodeGeneratedSince(Label *label)
Scope * outer_scope() const
static Handle< Object > UninitializedSentinel(Isolate *isolate)
SmiIndex SmiToIndex(Register dst, Register src, int shift)
static const int kCallerSPOffset
#define ASSERT(condition)
const int kPointerSizeLog2
static const int kMaxBackEdgeWeight
static const int kInObjectFieldCount
static bool IsCompileTimeValue(Expression *expression)
const uint32_t kStringRepresentationMask
static const int kMaximumSlots
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
bool IsOptimizable() const
Variable * parameter(int index) const
MemOperand ContextOperand(Register context, int index)
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
static const int kHashFieldOffset
static const int kLiteralsOffset
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
static const int kValueOffset
Variable * arguments() const
static const int kForInSlowCaseMarker
static const int kFirstOffset
static BailoutId Declarations()
static const int kJSReturnSequenceLength
static const int kForInFastCaseMarker
static const int kStringWrapperSafeForDefaultValueOf
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
bool IsAligned(T value, U alignment)
bool ShouldSelfOptimize()
static const int kContextOffset
static const int kCacheStampOffset
static TestContext * cast(AstContext *context)
static const int kDescriptorSize
static const int kPropertiesOffset
int num_parameters() const
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
static const int kHeaderSize
static const int kElementsOffset
static const int kContainsCachedArrayIndexMask
static BailoutId FunctionEntry()
const uint32_t kStringTag
bool IsDeclaredVariableMode(VariableMode mode)
friend class NestedStatement
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
static const int kMaxLoopNestingMarker
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
static const int kMapOffset
static const int kValueOffset
static const int kEnumCacheBridgeCacheOffset
const uint32_t kIsNotStringMask
void VisitIllegalRedeclaration(AstVisitor *visitor)
static const int kLengthOffset
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
const Register kScratchRegister
static const int kContextOffset
static const int kFunctionOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
v8::Handle< v8::Value > Load(const v8::Arguments &args)
Handle< Object > CodeObject()
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
static const int kConstructorOffset
Condition CheckNonNegativeSmi(Register src)
#define ASSERT_NE(v1, v2)
static const int kIsUndetectable
Condition CheckEitherSmi(Register first, Register second, Register scratch=kScratchRegister)
static bool ShouldGenerateLog(Expression *type)
static const int kMaximumClonedProperties
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static const int kPrototypeOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
bool IsImmutableVariableMode(VariableMode mode)
static const int kNativeContextOffset
const uint32_t kAsciiStringTag
static const int kMarkerOffset
static const int kHashShift
static const int kSharedFunctionInfoOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
static FixedArrayBase * cast(Object *object)
static const int kLocal0Offset
static const int kMaxValue
static const int kBitField2Offset
static Handle< Code > GetUninitialized(Token::Value op)
void check(i::Vector< const char > string)
static const int kMaximumClonedLength
static const int kValueOffset
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag