30 #if V8_TARGET_ARCH_X64
45 #define __ ACCESS_MASM(masm_)
50 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
52 info_emitted_ =
false;
57 ASSERT(patch_site_.is_bound() == info_emitted_);
60 void EmitJumpIfNotSmi(Register reg,
62 Label::Distance near_jump = Label::kFar) {
67 void EmitJumpIfSmi(Register reg,
69 Label::Distance near_jump = Label::kFar) {
71 EmitJump(
carry, target, near_jump);
74 void EmitPatchInfo() {
75 if (patch_site_.is_bound()) {
76 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
77 ASSERT(is_int8(delta_to_patch_site));
78 __ testl(
rax, Immediate(delta_to_patch_site));
89 void EmitJump(
Condition cc, Label* target, Label::Distance near_jump) {
90 ASSERT(!patch_site_.is_bound() && !info_emitted_);
92 __ bind(&patch_site_);
93 __ j(cc, target, near_jump);
96 MacroAssembler* masm_;
104 static void EmitStackCheck(MacroAssembler* masm_,
106 Register scratch =
rsp) {
107 Isolate* isolate = masm_->isolate();
109 ASSERT(scratch.is(
rsp) == (pointers == 0));
111 __ movq(scratch,
rsp);
114 __ CompareRoot(scratch, Heap::kStackLimitRootIndex);
116 __ call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
134 void FullCodeGenerator::Generate() {
135 CompilationInfo*
info = info_;
137 isolate()->factory()->NewFixedArray(
function()->handler_count(),
TENURED);
139 InitializeFeedbackVector();
141 profiling_counter_ = isolate()->factory()->NewCell(
142 Handle<Smi>(
Smi::FromInt(FLAG_interrupt_budget), isolate()));
143 SetFunctionPosition(
function());
144 Comment cmnt(masm_,
"[ function compiled by full code generator");
149 if (strlen(FLAG_stop_at) > 0 &&
150 info->function()->name()->IsUtf8EqualTo(
CStrVector(FLAG_stop_at))) {
158 if (info->strict_mode() ==
SLOPPY && !info->is_native()) {
161 StackArgumentsAccessor args(
rsp, info->scope()->num_parameters());
162 __ movp(
rcx, args.GetReceiverOperand());
164 __ CompareRoot(
rcx, Heap::kUndefinedValueRootIndex);
170 __ movp(args.GetReceiverOperand(),
rcx);
178 FrameScope frame_scope(masm_, StackFrame::MANUAL);
180 info->set_prologue_offset(masm_->pc_offset());
182 info->AddNoFrameRange(0, masm_->pc_offset());
184 { Comment cmnt(masm_,
"[ Allocate locals");
185 int locals_count = info->scope()->num_stack_slots();
187 ASSERT(!info->function()->is_generator() || locals_count == 0);
188 if (locals_count == 1) {
189 __ PushRoot(Heap::kUndefinedValueRootIndex);
190 }
else if (locals_count > 1) {
191 if (locals_count >= 128) {
192 EmitStackCheck(masm_, locals_count,
rcx);
194 __ LoadRoot(
rdx, Heap::kUndefinedValueRootIndex);
195 const int kMaxPushes = 32;
196 if (locals_count >= kMaxPushes) {
197 int loop_iterations = locals_count / kMaxPushes;
198 __ movq(
rcx, Immediate(loop_iterations));
200 __ bind(&loop_header);
202 for (
int i = 0; i < kMaxPushes; i++) {
209 int remaining = locals_count % kMaxPushes;
211 for (
int i = 0; i < remaining; i++) {
217 bool function_in_register =
true;
221 if (heap_slots > 0) {
222 Comment cmnt(masm_,
"[ Allocate context");
224 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
226 __ Push(info->scope()->GetScopeInfo());
227 __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
228 }
else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
229 FastNewContextStub stub(heap_slots);
233 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
235 function_in_register =
false;
242 int num_parameters = info->scope()->num_parameters();
243 for (
int i = 0; i < num_parameters; i++) {
245 if (var->IsContextSlot()) {
249 __ movp(
rax, Operand(
rbp, parameter_offset));
252 __ movp(Operand(
rsi, context_offset),
rax);
254 __ RecordWriteContextSlot(
261 Variable* arguments = scope()->
arguments();
262 if (arguments !=
NULL) {
265 Comment cmnt(masm_,
"[ Allocate arguments object");
266 if (function_in_register) {
272 int num_parameters = info->scope()->num_parameters();
283 if (strict_mode() ==
STRICT) {
285 }
else if (
function()->has_duplicate_parameters()) {
290 ArgumentsAccessStub stub(type);
297 __ CallRuntime(Runtime::kTraceEnter, 0);
302 if (scope()->HasIllegalRedeclaration()) {
303 Comment cmnt(masm_,
"[ Declarations");
308 { Comment cmnt(masm_,
"[ Declarations");
311 if (scope()->is_function_scope() && scope()->
function() !=
NULL) {
312 VariableDeclaration*
function = scope()->
function();
313 ASSERT(function->proxy()->var()->mode() ==
CONST ||
316 VisitVariableDeclaration(
function);
318 VisitDeclarations(scope()->declarations());
321 { Comment cmnt(masm_,
"[ Stack check");
323 EmitStackCheck(masm_);
326 { Comment cmnt(masm_,
"[ Body");
327 ASSERT(loop_depth() == 0);
328 VisitStatements(
function()->body());
329 ASSERT(loop_depth() == 0);
335 { Comment cmnt(masm_,
"[ return <undefined>;");
336 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
337 EmitReturnSequence();
342 void FullCodeGenerator::ClearAccumulator() {
347 void FullCodeGenerator::EmitProfilingCounterDecrement(
int delta) {
348 __ Move(
rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
354 void FullCodeGenerator::EmitProfilingCounterReset() {
355 int reset_value = FLAG_interrupt_budget;
356 __ Move(
rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
362 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
363 Label* back_edge_target) {
364 Comment cmnt(masm_,
"[ Back edge bookkeeping");
367 ASSERT(back_edge_target->is_bound());
368 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
370 Max(1, distance / kCodeSizeMultiplier));
371 EmitProfilingCounterDecrement(weight);
373 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
378 RecordBackEdge(stmt->OsrEntryId());
380 EmitProfilingCounterReset();
387 PrepareForBailoutForId(stmt->OsrEntryId(),
NO_REGISTERS);
391 void FullCodeGenerator::EmitReturnSequence() {
392 Comment cmnt(masm_,
"[ Return sequence");
393 if (return_label_.is_bound()) {
394 __ jmp(&return_label_);
396 __ bind(&return_label_);
399 __ CallRuntime(Runtime::kTraceExit, 1);
404 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
406 int distance = masm_->pc_offset();
408 Max(1, distance / kCodeSizeMultiplier));
410 EmitProfilingCounterDecrement(weight);
414 __ call(isolate()->builtins()->InterruptCheck(),
415 RelocInfo::CODE_TARGET);
417 EmitProfilingCounterReset();
421 Label check_exit_codesize;
422 masm_->bind(&check_exit_codesize);
430 int no_frame_start = masm_->pc_offset();
433 __ Ret(arguments_bytes,
rcx);
435 #ifdef ENABLE_DEBUGGER_SUPPORT
440 for (
int i = 0; i < kPadding; ++i) {
446 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
453 void FullCodeGenerator::EffectContext::Plug(Variable* var)
const {
454 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
458 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var)
const {
459 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
460 codegen()->GetVar(result_register(), var);
464 void FullCodeGenerator::StackValueContext::Plug(Variable* var)
const {
465 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
466 MemOperand operand = codegen()->VarOperand(var, result_register());
471 void FullCodeGenerator::TestContext::Plug(Variable* var)
const {
472 codegen()->GetVar(result_register(), var);
473 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
474 codegen()->DoTest(
this);
482 void FullCodeGenerator::AccumulatorValueContext::Plug(
484 __ LoadRoot(result_register(), index);
488 void FullCodeGenerator::StackValueContext::Plug(
495 codegen()->PrepareForBailoutBeforeSplit(condition(),
499 if (index == Heap::kUndefinedValueRootIndex ||
500 index == Heap::kNullValueRootIndex ||
501 index == Heap::kFalseValueRootIndex) {
502 if (false_label_ != fall_through_)
__ jmp(false_label_);
503 }
else if (index == Heap::kTrueValueRootIndex) {
504 if (true_label_ != fall_through_)
__ jmp(true_label_);
506 __ LoadRoot(result_register(), index);
507 codegen()->DoTest(
this);
512 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit)
const {
516 void FullCodeGenerator::AccumulatorValueContext::Plug(
517 Handle<Object> lit)
const {
521 __ Move(result_register(), lit);
526 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit)
const {
535 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit)
const {
536 codegen()->PrepareForBailoutBeforeSplit(condition(),
540 ASSERT(!lit->IsUndetectableObject());
541 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
542 if (false_label_ != fall_through_)
__ jmp(false_label_);
543 }
else if (lit->IsTrue() || lit->IsJSObject()) {
544 if (true_label_ != fall_through_)
__ jmp(true_label_);
545 }
else if (lit->IsString()) {
547 if (false_label_ != fall_through_)
__ jmp(false_label_);
549 if (true_label_ != fall_through_)
__ jmp(true_label_);
551 }
else if (lit->IsSmi()) {
553 if (false_label_ != fall_through_)
__ jmp(false_label_);
555 if (true_label_ != fall_through_)
__ jmp(true_label_);
559 __ Move(result_register(), lit);
560 codegen()->DoTest(
this);
565 void FullCodeGenerator::EffectContext::DropAndPlug(
int count,
566 Register reg)
const {
572 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
574 Register reg)
const {
577 __ Move(result_register(), reg);
581 void FullCodeGenerator::StackValueContext::DropAndPlug(
int count,
582 Register reg)
const {
584 if (count > 1)
__ Drop(count - 1);
585 __ movp(Operand(
rsp, 0), reg);
589 void FullCodeGenerator::TestContext::DropAndPlug(
int count,
590 Register reg)
const {
594 __ Move(result_register(), reg);
595 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
596 codegen()->DoTest(
this);
600 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
601 Label* materialize_false)
const {
602 ASSERT(materialize_true == materialize_false);
603 __ bind(materialize_true);
607 void FullCodeGenerator::AccumulatorValueContext::Plug(
608 Label* materialize_true,
609 Label* materialize_false)
const {
611 __ bind(materialize_true);
612 __ Move(result_register(), isolate()->factory()->true_value());
613 __ jmp(&done, Label::kNear);
614 __ bind(materialize_false);
615 __ Move(result_register(), isolate()->factory()->false_value());
620 void FullCodeGenerator::StackValueContext::Plug(
621 Label* materialize_true,
622 Label* materialize_false)
const {
624 __ bind(materialize_true);
625 __ Push(isolate()->factory()->true_value());
626 __ jmp(&done, Label::kNear);
627 __ bind(materialize_false);
628 __ Push(isolate()->factory()->false_value());
633 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
634 Label* materialize_false)
const {
635 ASSERT(materialize_true == true_label_);
636 ASSERT(materialize_false == false_label_);
640 void FullCodeGenerator::EffectContext::Plug(
bool flag)
const {
644 void FullCodeGenerator::AccumulatorValueContext::Plug(
bool flag)
const {
646 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
647 __ LoadRoot(result_register(), value_root_index);
651 void FullCodeGenerator::StackValueContext::Plug(
bool flag)
const {
653 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
654 __ PushRoot(value_root_index);
658 void FullCodeGenerator::TestContext::Plug(
bool flag)
const {
659 codegen()->PrepareForBailoutBeforeSplit(condition(),
664 if (true_label_ != fall_through_)
__ jmp(true_label_);
666 if (false_label_ != fall_through_)
__ jmp(false_label_);
671 void FullCodeGenerator::DoTest(Expression* condition,
674 Label* fall_through) {
676 CallIC(ic, condition->test_id());
677 __ testp(result_register(), result_register());
679 Split(
not_zero, if_true, if_false, fall_through);
686 Label* fall_through) {
687 if (if_false == fall_through) {
689 }
else if (if_true == fall_through) {
698 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
699 ASSERT(var->IsStackAllocated());
703 if (var->IsParameter()) {
709 return Operand(
rbp, offset);
713 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
714 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
715 if (var->IsContextSlot()) {
717 __ LoadContext(scratch, context_chain_length);
720 return StackOperand(var);
725 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
726 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
728 __ movp(dest, location);
732 void FullCodeGenerator::SetVar(Variable* var,
736 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
737 ASSERT(!scratch0.is(src));
738 ASSERT(!scratch0.is(scratch1));
739 ASSERT(!scratch1.is(src));
740 MemOperand location = VarOperand(var, scratch0);
741 __ movp(location, src);
744 if (var->IsContextSlot()) {
751 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
752 bool should_normalize,
761 if (should_normalize)
__ jmp(&skip, Label::kNear);
762 PrepareForBailout(expr,
TOS_REG);
763 if (should_normalize) {
764 __ CompareRoot(
rax, Heap::kTrueValueRootIndex);
771 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
773 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
774 if (generate_debug_code_) {
777 __ CompareRoot(
rbx, Heap::kWithContextMapRootIndex);
779 __ CompareRoot(
rbx, Heap::kCatchContextMapRootIndex);
785 void FullCodeGenerator::VisitVariableDeclaration(
786 VariableDeclaration* declaration) {
790 VariableProxy* proxy = declaration->proxy();
792 Variable* variable = proxy->var();
794 switch (variable->location()) {
796 globals_->Add(variable->name(), zone());
797 globals_->Add(variable->binding_needs_init()
798 ? isolate()->factory()->the_hole_value()
799 : isolate()->factory()->undefined_value(),
806 Comment cmnt(masm_,
"[ VariableDeclaration");
814 Comment cmnt(masm_,
"[ VariableDeclaration");
815 EmitDebugCheckDeclarationContext(variable);
824 Comment cmnt(masm_,
"[ VariableDeclaration");
826 __ Push(variable->name());
837 __ PushRoot(Heap::kTheHoleValueRootIndex);
841 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
848 void FullCodeGenerator::VisitFunctionDeclaration(
849 FunctionDeclaration* declaration) {
850 VariableProxy* proxy = declaration->proxy();
851 Variable* variable = proxy->var();
852 switch (variable->location()) {
854 globals_->Add(variable->name(), zone());
855 Handle<SharedFunctionInfo>
function =
858 if (
function.is_null())
return SetStackOverflow();
859 globals_->Add(
function, zone());
865 Comment cmnt(masm_,
"[ FunctionDeclaration");
866 VisitForAccumulatorValue(declaration->fun());
867 __ movp(StackOperand(variable), result_register());
872 Comment cmnt(masm_,
"[ FunctionDeclaration");
873 EmitDebugCheckDeclarationContext(variable);
874 VisitForAccumulatorValue(declaration->fun());
878 __ RecordWriteContextSlot(
rsi,
890 Comment cmnt(masm_,
"[ FunctionDeclaration");
892 __ Push(variable->name());
894 VisitForStackValue(declaration->fun());
895 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
902 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
903 Variable* variable = declaration->proxy()->var();
905 ASSERT(variable->interface()->IsFrozen());
907 Comment cmnt(masm_,
"[ ModuleDeclaration");
908 EmitDebugCheckDeclarationContext(variable);
918 __ RecordWriteContextSlot(
rsi,
925 PrepareForBailoutForId(declaration->proxy()->id(),
NO_REGISTERS);
928 Visit(declaration->module());
932 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
933 VariableProxy* proxy = declaration->proxy();
934 Variable* variable = proxy->var();
935 switch (variable->location()) {
941 Comment cmnt(masm_,
"[ ImportDeclaration");
942 EmitDebugCheckDeclarationContext(variable);
955 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
960 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray>
pairs) {
965 __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
970 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
972 __ Push(descriptions);
973 __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
978 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
979 Comment cmnt(masm_,
"[ SwitchStatement");
980 Breakable nested_statement(
this, stmt);
981 SetStatementPosition(stmt);
984 VisitForStackValue(stmt->tag());
987 ZoneList<CaseClause*>* clauses = stmt->cases();
988 CaseClause* default_clause =
NULL;
992 for (
int i = 0; i < clauses->length(); i++) {
993 CaseClause* clause = clauses->at(i);
994 clause->body_target()->Unuse();
997 if (clause->is_default()) {
998 default_clause = clause;
1002 Comment cmnt(masm_,
"[ Case comparison");
1003 __ bind(&next_test);
1007 VisitForAccumulatorValue(clause->label());
1011 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1012 JumpPatchSite patch_site(masm_);
1013 if (inline_smi_code) {
1017 patch_site.EmitJumpIfNotSmi(
rcx, &slow_case, Label::kNear);
1022 __ jmp(clause->body_target());
1023 __ bind(&slow_case);
1027 SetSourcePosition(clause->position());
1029 CallIC(ic, clause->CompareId());
1030 patch_site.EmitPatchInfo();
1033 __ jmp(&skip, Label::kNear);
1034 PrepareForBailout(clause,
TOS_REG);
1035 __ CompareRoot(
rax, Heap::kTrueValueRootIndex);
1038 __ jmp(clause->body_target());
1044 __ jmp(clause->body_target());
1049 __ bind(&next_test);
1051 if (default_clause ==
NULL) {
1052 __ jmp(nested_statement.break_label());
1054 __ jmp(default_clause->body_target());
1058 for (
int i = 0; i < clauses->length(); i++) {
1059 Comment cmnt(masm_,
"[ Case body");
1060 CaseClause* clause = clauses->at(i);
1061 __ bind(clause->body_target());
1062 PrepareForBailoutForId(clause->EntryId(),
NO_REGISTERS);
1063 VisitStatements(clause->statements());
1066 __ bind(nested_statement.break_label());
1071 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1072 Comment cmnt(masm_,
"[ ForInStatement");
1073 int slot = stmt->ForInFeedbackSlot();
1074 SetStatementPosition(stmt);
1077 ForIn loop_statement(
this, stmt);
1078 increment_loop_depth();
1082 VisitForAccumulatorValue(stmt->enumerable());
1083 __ CompareRoot(
rax, Heap::kUndefinedValueRootIndex);
1085 Register null_value =
rdi;
1086 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1087 __ cmpp(
rax, null_value);
1090 PrepareForBailoutForId(stmt->PrepareId(),
TOS_REG);
1093 Label convert, done_convert;
1094 __ JumpIfSmi(
rax, &convert);
1100 __ bind(&done_convert);
1113 __ CheckEnumCache(null_value, &call_runtime);
1119 __ jmp(&use_cache, Label::kNear);
1122 __ bind(&call_runtime);
1124 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1131 Heap::kMetaMapRootIndex);
1135 __ bind(&use_cache);
1137 Label no_descriptors;
1143 __ LoadInstanceDescriptors(
rax,
rcx);
1154 __ bind(&no_descriptors);
1155 __ addp(
rsp, Immediate(kPointerSize));
1160 __ bind(&fixed_array);
1162 Handle<Object> feedback = Handle<Object>(
1165 StoreFeedbackVectorSlot(slot, feedback);
1168 __ Move(
rbx, FeedbackVector());
1172 __ movp(
rcx, Operand(
rsp, 0 * kPointerSize));
1177 __ bind(&non_proxy);
1187 __ movp(
rax, Operand(
rsp, 0 * kPointerSize));
1188 __ cmpp(
rax, Operand(
rsp, 1 * kPointerSize));
1192 __ movp(
rbx, Operand(
rsp, 2 * kPointerSize));
1201 __ movp(
rdx, Operand(
rsp, 3 * kPointerSize));
1206 __ movp(
rcx, Operand(
rsp, 4 * kPointerSize));
1208 __ j(
equal, &update_each, Label::kNear);
1213 __ j(
equal, &update_each, Label::kNear);
1222 __ j(
equal, loop_statement.continue_label());
1227 __ bind(&update_each);
1228 __ movp(result_register(),
rbx);
1230 { EffectContext context(
this);
1231 EmitAssignment(stmt->each());
1235 Visit(stmt->body());
1239 __ bind(loop_statement.continue_label());
1242 EmitBackEdgeBookkeeping(stmt, &loop);
1246 __ bind(loop_statement.break_label());
1247 __ addp(
rsp, Immediate(5 * kPointerSize));
1252 decrement_loop_depth();
1256 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1257 Comment cmnt(masm_,
"[ ForOfStatement");
1258 SetStatementPosition(stmt);
1260 Iteration loop_statement(
this, stmt);
1261 increment_loop_depth();
1264 VisitForAccumulatorValue(stmt->assign_iterator());
1267 __ CompareRoot(
rax, Heap::kUndefinedValueRootIndex);
1268 __ j(
equal, loop_statement.break_label());
1269 __ CompareRoot(
rax, Heap::kNullValueRootIndex);
1270 __ j(
equal, loop_statement.break_label());
1273 Label convert, done_convert;
1274 __ JumpIfSmi(
rax, &convert);
1280 __ bind(&done_convert);
1283 __ bind(loop_statement.continue_label());
1286 VisitForEffect(stmt->next_result());
1289 Label result_not_done;
1290 VisitForControl(stmt->result_done(),
1291 loop_statement.break_label(),
1294 __ bind(&result_not_done);
1297 VisitForEffect(stmt->assign_each());
1300 Visit(stmt->body());
1303 PrepareForBailoutForId(stmt->BackEdgeId(),
NO_REGISTERS);
1304 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1305 __ jmp(loop_statement.continue_label());
1309 __ bind(loop_statement.break_label());
1310 decrement_loop_depth();
1314 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1322 if (!FLAG_always_opt &&
1323 !FLAG_prepare_always_opt &&
1325 scope()->is_function_scope() &&
1326 info->num_literals() == 0) {
1327 FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1334 ? isolate()->factory()->true_value()
1335 : isolate()->factory()->false_value());
1336 __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1338 context()->Plug(
rax);
1342 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1343 Comment cmnt(masm_,
"[ VariableProxy");
1344 EmitVariableLoad(expr);
1348 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1351 Register context =
rsi;
1352 Register temp =
rdx;
1356 if (s->num_heap_slots() > 0) {
1357 if (s->calls_sloppy_eval()) {
1371 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope())
break;
1372 s = s->outer_scope();
1375 if (s !=
NULL && s->is_eval_scope()) {
1379 if (!context.is(temp)) {
1380 __ movp(temp, context);
1387 __ j(
equal, &fast, Label::kNear);
1400 __ Move(
rcx, var->name());
1408 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1410 ASSERT(var->IsContextSlot());
1411 Register context =
rsi;
1412 Register temp =
rbx;
1414 for (Scope* s = scope(); s != var->scope(); s = s->
outer_scope()) {
1415 if (s->num_heap_slots() > 0) {
1416 if (s->calls_sloppy_eval()) {
1438 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1448 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1451 Variable* local = var->local_if_not_shadowed();
1452 __ movp(
rax, ContextSlotOperandCheckExtensions(local, slow));
1453 if (local->mode() ==
LET || local->mode() ==
CONST ||
1455 __ CompareRoot(
rax, Heap::kTheHoleValueRootIndex);
1458 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
1460 __ Push(var->name());
1461 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1469 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1471 SetSourcePosition(proxy->position());
1472 Variable* var = proxy->var();
1476 switch (var->location()) {
1478 Comment cmnt(masm_,
"[ Global variable");
1481 __ Move(
rcx, var->name());
1484 context()->Plug(
rax);
1491 Comment cmnt(masm_, var->IsContextSlot() ?
"[ Context slot"
1493 if (var->binding_needs_init()) {
1517 bool skip_init_check;
1519 skip_init_check =
false;
1522 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1523 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1525 var->initializer_position() < proxy->position();
1528 if (!skip_init_check) {
1532 __ CompareRoot(
rax, Heap::kTheHoleValueRootIndex);
1534 if (var->mode() ==
LET || var->mode() ==
CONST) {
1537 __ Push(var->name());
1538 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1542 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
1545 context()->Plug(
rax);
1549 context()->Plug(var);
1554 Comment cmnt(masm_,
"[ Lookup slot");
1561 __ Push(var->name());
1562 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1564 context()->Plug(
rax);
1571 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1572 Comment cmnt(masm_,
"[ RegExpLiteral");
1581 int literal_offset =
1584 __ CompareRoot(
rbx, Heap::kUndefinedValueRootIndex);
1591 __ Push(expr->pattern());
1592 __ Push(expr->flags());
1593 __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1596 __ bind(&materialized);
1598 Label allocated, runtime_allocate;
1602 __ bind(&runtime_allocate);
1605 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1608 __ bind(&allocated);
1617 if ((size % (2 * kPointerSize)) != 0) {
1621 context()->Plug(
rax);
1625 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1626 if (expression ==
NULL) {
1627 __ PushRoot(Heap::kNullValueRootIndex);
1629 VisitForStackValue(expression);
1634 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1635 Comment cmnt(masm_,
"[ ObjectLiteral");
1637 expr->BuildConstantProperties(isolate());
1638 Handle<FixedArray> constant_properties = expr->constant_properties();
1639 int flags = expr->fast_elements()
1640 ? ObjectLiteral::kFastElements
1641 : ObjectLiteral::kNoFlags;
1642 flags |= expr->has_function()
1643 ? ObjectLiteral::kHasFunction
1644 : ObjectLiteral::kNoFlags;
1645 int properties_count = constant_properties->length() / 2;
1647 flags != ObjectLiteral::kFastElements ||
1652 __ Push(constant_properties);
1654 __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1659 __ Move(
rcx, constant_properties);
1661 FastCloneShallowObjectStub stub(properties_count);
1667 bool result_saved =
false;
1672 expr->CalculateEmitStore(zone());
1674 AccessorTable accessor_table(zone());
1675 for (
int i = 0; i < expr->properties()->length(); i++) {
1676 ObjectLiteral::Property*
property = expr->properties()->at(i);
1677 if (property->IsCompileTimeValue())
continue;
1679 Literal* key =
property->key();
1680 Expression* value =
property->value();
1681 if (!result_saved) {
1683 result_saved =
true;
1685 switch (property->kind()) {
1688 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1691 case ObjectLiteral::Property::COMPUTED:
1692 if (key->value()->IsInternalizedString()) {
1693 if (property->emit_store()) {
1694 VisitForAccumulatorValue(value);
1695 __ Move(
rcx, key->value());
1697 CallStoreIC(key->LiteralFeedbackId());
1700 VisitForEffect(value);
1704 __ Push(Operand(
rsp, 0));
1705 VisitForStackValue(key);
1706 VisitForStackValue(value);
1707 if (property->emit_store()) {
1709 __ CallRuntime(Runtime::kSetProperty, 4);
1714 case ObjectLiteral::Property::PROTOTYPE:
1715 __ Push(Operand(
rsp, 0));
1716 VisitForStackValue(value);
1717 if (property->emit_store()) {
1718 __ CallRuntime(Runtime::kSetPrototype, 2);
1723 case ObjectLiteral::Property::GETTER:
1724 accessor_table.lookup(key)->second->getter = value;
1726 case ObjectLiteral::Property::SETTER:
1727 accessor_table.lookup(key)->second->setter = value;
1734 for (AccessorTable::Iterator it = accessor_table.begin();
1735 it != accessor_table.end();
1737 __ Push(Operand(
rsp, 0));
1738 VisitForStackValue(it->first);
1739 EmitAccessor(it->second->getter);
1740 EmitAccessor(it->second->setter);
1742 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1745 if (expr->has_function()) {
1747 __ Push(Operand(
rsp, 0));
1748 __ CallRuntime(Runtime::kToFastProperties, 1);
1752 context()->PlugTOS();
1754 context()->Plug(
rax);
1759 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1760 Comment cmnt(masm_,
"[ ArrayLiteral");
1762 expr->BuildConstantElements(isolate());
1763 int flags = expr->depth() == 1
1764 ? ArrayLiteral::kShallowElements
1765 : ArrayLiteral::kNoFlags;
1767 ZoneList<Expression*>* subexprs = expr->values();
1768 int length = subexprs->length();
1769 Handle<FixedArray> constant_elements = expr->constant_elements();
1770 ASSERT_EQ(2, constant_elements->length());
1773 bool has_constant_fast_elements =
1775 Handle<FixedArrayBase> constant_elements_values(
1779 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1785 Heap* heap = isolate()->heap();
1786 if (has_constant_fast_elements &&
1787 constant_elements_values->map() == heap->fixed_cow_array_map()) {
1790 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1794 __ Move(
rcx, constant_elements);
1795 FastCloneShallowArrayStub stub(
1797 allocation_site_mode,
1805 __ Push(constant_elements);
1807 __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1810 FLAG_smi_only_arrays);
1816 if (has_constant_fast_elements) {
1823 __ Move(
rcx, constant_elements);
1824 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1828 bool result_saved =
false;
1832 for (
int i = 0; i < length; i++) {
1833 Expression* subexpr = subexprs->at(i);
1838 if (!result_saved) {
1841 result_saved =
true;
1843 VisitForAccumulatorValue(subexpr);
1849 __ movp(
rbx, Operand(
rsp, kPointerSize));
1854 __ RecordWriteField(
rbx, offset, result_register(),
rcx,
1861 StoreArrayLiteralElementStub stub;
1865 PrepareForBailoutForId(expr->GetIdForElement(i),
NO_REGISTERS);
1869 __ addp(
rsp, Immediate(kPointerSize));
1870 context()->PlugTOS();
1872 context()->Plug(
rax);
1877 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1878 ASSERT(expr->target()->IsValidLeftHandSide());
1880 Comment cmnt(masm_,
"[ Assignment");
1884 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1885 LhsKind assign_type = VARIABLE;
1886 Property*
property = expr->target()->AsProperty();
1887 if (property !=
NULL) {
1888 assign_type = (
property->key()->IsPropertyName())
1894 switch (assign_type) {
1898 case NAMED_PROPERTY:
1899 if (expr->is_compound()) {
1901 VisitForAccumulatorValue(property->obj());
1902 __ Push(result_register());
1904 VisitForStackValue(property->obj());
1907 case KEYED_PROPERTY: {
1908 if (expr->is_compound()) {
1909 VisitForStackValue(property->obj());
1910 VisitForAccumulatorValue(property->key());
1914 VisitForStackValue(property->obj());
1915 VisitForStackValue(property->key());
1923 if (expr->is_compound()) {
1924 { AccumulatorValueContext context(
this);
1925 switch (assign_type) {
1927 EmitVariableLoad(expr->target()->AsVariableProxy());
1928 PrepareForBailout(expr->target(),
TOS_REG);
1930 case NAMED_PROPERTY:
1931 EmitNamedPropertyLoad(property);
1932 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1934 case KEYED_PROPERTY:
1935 EmitKeyedPropertyLoad(property);
1936 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1943 VisitForAccumulatorValue(expr->value());
1945 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1948 SetSourcePosition(expr->position() + 1);
1949 AccumulatorValueContext context(
this);
1950 if (ShouldInlineSmiCase(op)) {
1951 EmitInlineSmiBinaryOp(expr->binary_operation(),
1957 EmitBinaryOp(expr->binary_operation(), op,
mode);
1960 PrepareForBailout(expr->binary_operation(),
TOS_REG);
1962 VisitForAccumulatorValue(expr->value());
1966 SetSourcePosition(expr->position());
1969 switch (assign_type) {
1971 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1973 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
1974 context()->Plug(
rax);
1976 case NAMED_PROPERTY:
1977 EmitNamedPropertyAssignment(expr);
1979 case KEYED_PROPERTY:
1980 EmitKeyedPropertyAssignment(expr);
1986 void FullCodeGenerator::VisitYield(Yield* expr) {
1987 Comment cmnt(masm_,
"[ Yield");
1990 VisitForStackValue(expr->expression());
1992 switch (expr->yield_kind()) {
1993 case Yield::SUSPEND:
1995 EmitCreateIteratorResult(
false);
1996 __ Push(result_register());
1998 case Yield::INITIAL: {
1999 Label suspend, continuation, post_runtime, resume;
2003 __ bind(&continuation);
2007 VisitForAccumulatorValue(expr->generator_object());
2019 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2020 __ movp(context_register(),
2022 __ bind(&post_runtime);
2024 __ Pop(result_register());
2025 EmitReturnSequence();
2028 context()->Plug(result_register());
2032 case Yield::FINAL: {
2033 VisitForAccumulatorValue(expr->generator_object());
2038 EmitCreateIteratorResult(
true);
2039 EmitUnwindBeforeReturn();
2040 EmitReturnSequence();
2044 case Yield::DELEGATING: {
2045 VisitForStackValue(expr->generator_object());
2051 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2052 Label l_next, l_call, l_loop;
2054 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
2059 handler_table()->set(expr->index(),
Smi::FromInt(l_catch.pos()));
2060 __ LoadRoot(
rcx, Heap::kthrow_stringRootIndex);
2062 __ Push(Operand(
rsp, 2 * kPointerSize));
2071 __ PushTryHandler(StackHandler::CATCH, expr->index());
2075 __ bind(&l_continuation);
2077 __ bind(&l_suspend);
2078 const int generator_object_depth = kPointerSize + handler_size;
2079 __ movp(
rax, Operand(
rsp, generator_object_depth));
2088 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2089 __ movp(context_register(),
2092 EmitReturnSequence();
2098 __ LoadRoot(
rcx, Heap::knext_stringRootIndex);
2100 __ Push(Operand(
rsp, 2 * kPointerSize));
2105 __ movp(
rdx, Operand(
rsp, kPointerSize));
2106 __ movp(
rax, Operand(
rsp, 2 * kPointerSize));
2107 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2110 __ movp(Operand(
rsp, 2 * kPointerSize),
rdi);
2120 __ LoadRoot(
rcx, Heap::kdone_stringRootIndex);
2124 __ testp(result_register(), result_register());
2129 __ LoadRoot(
rcx, Heap::kvalue_stringRootIndex);
2131 context()->DropAndPlug(2,
rax);
2138 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2145 VisitForStackValue(generator);
2146 VisitForAccumulatorValue(value);
2150 Label wrong_state, closed_state, done;
2156 __ j(
less, &wrong_state);
2170 __ LoadRoot(
rcx, Heap::kTheHoleValueRootIndex);
2171 Label push_argument_holes, push_frame;
2172 __ bind(&push_argument_holes);
2173 __ subp(
rdx, Immediate(1));
2176 __ jmp(&push_argument_holes);
2181 __ bind(&push_frame);
2182 __ call(&resume_frame);
2184 __ bind(&resume_frame);
2199 __ cmpp(
rdx, Immediate(0));
2202 __ SmiToInteger64(
rcx,
2208 __ bind(&slow_resume);
2213 Label push_operand_holes, call_resume;
2214 __ bind(&push_operand_holes);
2215 __ subp(
rdx, Immediate(1));
2218 __ jmp(&push_operand_holes);
2219 __ bind(&call_resume);
2221 __ Push(result_register());
2223 __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
2225 __ Abort(kGeneratorFailedToResume);
2228 __ bind(&closed_state);
2231 __ PushRoot(Heap::kUndefinedValueRootIndex);
2233 EmitCreateIteratorResult(
true);
2237 __ CallRuntime(Runtime::kHiddenThrow, 1);
2242 __ bind(&wrong_state);
2244 __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
2247 context()->Plug(result_register());
2251 void FullCodeGenerator::EmitCreateIteratorResult(
bool done) {
2255 Handle<Map>
map(isolate()->native_context()->generator_result_map());
2260 __ bind(&gc_required);
2262 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
2263 __ movp(context_register(),
2266 __ bind(&allocated);
2269 __ Move(rdx, isolate()->factory()->ToBoolean(done));
2273 isolate()->factory()->empty_fixed_array());
2275 isolate()->factory()->empty_fixed_array());
2288 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2289 SetSourcePosition(prop->position());
2290 Literal* key = prop->key()->AsLiteral();
2291 __ Move(rcx, key->value());
2296 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2297 SetSourcePosition(prop->position());
2298 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2299 CallIC(ic, prop->PropertyFeedbackId());
2303 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2307 Expression* right) {
2311 Label done, stub_call, smi_case;
2315 JumpPatchSite patch_site(masm_);
2316 patch_site.EmitJumpIfSmi(
rax, &smi_case, Label::kNear);
2318 __ bind(&stub_call);
2320 BinaryOpICStub stub(op, mode);
2321 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2322 patch_site.EmitPatchInfo();
2323 __ jmp(&done, Label::kNear);
2328 __ SmiShiftArithmeticRight(
rax, rdx, rcx);
2331 __ SmiShiftLeft(
rax, rdx, rcx);
2334 __ SmiShiftLogicalRight(
rax, rdx, rcx, &stub_call);
2337 __ SmiAdd(
rax, rdx, rcx, &stub_call);
2340 __ SmiSub(
rax, rdx, rcx, &stub_call);
2343 __ SmiMul(
rax, rdx, rcx, &stub_call);
2346 __ SmiOr(
rax, rdx, rcx);
2348 case Token::BIT_AND:
2349 __ SmiAnd(
rax, rdx, rcx);
2351 case Token::BIT_XOR:
2352 __ SmiXor(
rax, rdx, rcx);
2360 context()->Plug(
rax);
2364 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2368 BinaryOpICStub stub(op, mode);
2369 JumpPatchSite patch_site(masm_);
2370 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2371 patch_site.EmitPatchInfo();
2372 context()->Plug(
rax);
2376 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2377 ASSERT(expr->IsValidLeftHandSide());
2381 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2382 LhsKind assign_type = VARIABLE;
2383 Property* prop = expr->AsProperty();
2385 assign_type = (prop->key()->IsPropertyName())
2390 switch (assign_type) {
2392 Variable* var = expr->AsVariableProxy()->var();
2393 EffectContext context(
this);
2394 EmitVariableAssignment(var, Token::ASSIGN);
2397 case NAMED_PROPERTY: {
2399 VisitForAccumulatorValue(prop->obj());
2402 __ Move(rcx, prop->key()->AsLiteral()->value());
2406 case KEYED_PROPERTY: {
2408 VisitForStackValue(prop->obj());
2409 VisitForAccumulatorValue(prop->key());
2413 Handle<Code> ic = strict_mode() ==
SLOPPY
2414 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2415 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2420 context()->Plug(
rax);
2424 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2426 __ movp(location,
rax);
2427 if (var->IsContextSlot()) {
2429 __ RecordWriteContextSlot(
2435 void FullCodeGenerator::EmitCallStoreContextSlot(
2441 __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2445 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2447 if (var->IsUnallocated()) {
2449 __ Move(rcx, var->name());
2453 }
else if (op == Token::INIT_CONST_LEGACY) {
2455 ASSERT(!var->IsParameter());
2456 if (var->IsLookupSlot()) {
2459 __ Push(var->name());
2460 __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2462 ASSERT(var->IsStackLocal() || var->IsContextSlot());
2465 __ movp(rdx, location);
2466 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2468 EmitStoreToStackLocalOrContextSlot(var, location);
2472 }
else if (var->mode() ==
LET && op != Token::INIT_LET) {
2474 if (var->IsLookupSlot()) {
2475 EmitCallStoreContextSlot(var->name(), strict_mode());
2477 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2480 __ movp(rdx, location);
2481 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2483 __ Push(var->name());
2484 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2486 EmitStoreToStackLocalOrContextSlot(var, location);
2489 }
else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2492 if (var->IsLookupSlot()) {
2493 EmitCallStoreContextSlot(var->name(), strict_mode());
2495 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2497 if (generate_debug_code_ && op == Token::INIT_LET) {
2499 __ movp(rdx, location);
2500 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2501 __ Check(
equal, kLetBindingReInitialization);
2503 EmitStoreToStackLocalOrContextSlot(var, location);
2510 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2512 Property* prop = expr->target()->AsProperty();
2517 SetSourcePosition(expr->position());
2518 __ Move(rcx, prop->key()->AsLiteral()->value());
2520 CallStoreIC(expr->AssignmentFeedbackId());
2522 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2523 context()->Plug(
rax);
2527 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2533 SetSourcePosition(expr->position());
2534 Handle<Code> ic = strict_mode() ==
SLOPPY
2535 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2536 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2537 CallIC(ic, expr->AssignmentFeedbackId());
2539 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2540 context()->Plug(
rax);
2544 void FullCodeGenerator::VisitProperty(Property* expr) {
2545 Comment cmnt(masm_,
"[ Property");
2546 Expression* key = expr->key();
2548 if (key->IsPropertyName()) {
2549 VisitForAccumulatorValue(expr->obj());
2550 EmitNamedPropertyLoad(expr);
2551 PrepareForBailoutForId(expr->LoadId(),
TOS_REG);
2552 context()->Plug(
rax);
2554 VisitForStackValue(expr->obj());
2555 VisitForAccumulatorValue(expr->key());
2557 EmitKeyedPropertyLoad(expr);
2558 context()->Plug(
rax);
2563 void FullCodeGenerator::CallIC(Handle<Code>
code,
2564 TypeFeedbackId ast_id) {
2566 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2571 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2572 Expression* callee = expr->expression();
2573 ZoneList<Expression*>* args = expr->arguments();
2574 int arg_count = args->length();
2578 if (callee->IsVariableProxy()) {
2579 { StackValueContext context(
this);
2580 EmitVariableLoad(callee->AsVariableProxy());
2585 __ Push(isolate()->factory()->undefined_value());
2589 ASSERT(callee->IsProperty());
2591 EmitNamedPropertyLoad(callee->AsProperty());
2592 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
TOS_REG);
2594 __ Push(Operand(
rsp, 0));
2595 __ movp(Operand(
rsp, kPointerSize),
rax);
2600 { PreservePositionScope scope(masm()->positions_recorder());
2601 for (
int i = 0; i < arg_count; i++) {
2602 VisitForStackValue(args->at(i));
2607 SetSourcePosition(expr->position());
2608 CallFunctionStub stub(arg_count, flags);
2609 __ movp(
rdi, Operand(
rsp, (arg_count + 1) * kPointerSize));
2612 RecordJSReturnSite(expr);
2617 context()->DropAndPlug(1,
rax);
2622 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2625 VisitForAccumulatorValue(key);
2627 Expression* callee = expr->expression();
2628 ZoneList<Expression*>* args = expr->arguments();
2629 int arg_count = args->length();
2632 ASSERT(callee->IsProperty());
2633 __ movp(rdx, Operand(
rsp, 0));
2634 EmitKeyedPropertyLoad(callee->AsProperty());
2635 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
TOS_REG);
2638 __ Push(Operand(
rsp, 0));
2639 __ movp(Operand(
rsp, kPointerSize),
rax);
2642 { PreservePositionScope scope(masm()->positions_recorder());
2643 for (
int i = 0; i < arg_count; i++) {
2644 VisitForStackValue(args->at(i));
2649 SetSourcePosition(expr->position());
2651 __ movp(
rdi, Operand(
rsp, (arg_count + 1) * kPointerSize));
2654 RecordJSReturnSite(expr);
2658 context()->DropAndPlug(1,
rax);
2662 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2664 ZoneList<Expression*>* args = expr->arguments();
2665 int arg_count = args->length();
2666 { PreservePositionScope scope(masm()->positions_recorder());
2667 for (
int i = 0; i < arg_count; i++) {
2668 VisitForStackValue(args->at(i));
2672 SetSourcePosition(expr->position());
2674 Handle<Object> uninitialized =
2676 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2677 __ Move(
rbx, FeedbackVector());
2682 __ movp(
rdi, Operand(
rsp, (arg_count + 1) * kPointerSize));
2684 RecordJSReturnSite(expr);
2688 context()->DropAndPlug(1,
rax);
2692 void FullCodeGenerator::EmitResolvePossiblyDirectEval(
int arg_count) {
2694 if (arg_count > 0) {
2695 __ Push(Operand(
rsp, arg_count * kPointerSize));
2697 __ PushRoot(Heap::kUndefinedValueRootIndex);
2702 __ Push(args.GetReceiverOperand());
2711 __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2715 void FullCodeGenerator::VisitCall(Call* expr) {
2719 expr->return_is_recorded_ =
false;
2722 Comment cmnt(masm_,
"[ Call");
2723 Expression* callee = expr->expression();
2724 Call::CallType call_type = expr->GetCallType(isolate());
2726 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2730 ZoneList<Expression*>* args = expr->arguments();
2731 int arg_count = args->length();
2732 { PreservePositionScope pos_scope(masm()->positions_recorder());
2733 VisitForStackValue(callee);
2734 __ PushRoot(Heap::kUndefinedValueRootIndex);
2737 for (
int i = 0; i < arg_count; i++) {
2738 VisitForStackValue(args->at(i));
2743 __ Push(Operand(
rsp, (arg_count + 1) * kPointerSize));
2744 EmitResolvePossiblyDirectEval(arg_count);
2748 __ movp(Operand(
rsp, (arg_count + 0) * kPointerSize), rdx);
2749 __ movp(Operand(
rsp, (arg_count + 1) * kPointerSize),
rax);
2752 SetSourcePosition(expr->position());
2754 __ movp(
rdi, Operand(
rsp, (arg_count + 1) * kPointerSize));
2756 RecordJSReturnSite(expr);
2759 context()->DropAndPlug(1,
rax);
2760 }
else if (call_type == Call::GLOBAL_CALL) {
2761 EmitCallWithIC(expr);
2763 }
else if (call_type == Call::LOOKUP_SLOT_CALL) {
2765 VariableProxy* proxy = callee->AsVariableProxy();
2768 { PreservePositionScope scope(masm()->positions_recorder());
2776 __ Push(context_register());
2777 __ Push(proxy->name());
2778 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2784 if (done.is_linked()) {
2786 __ jmp(&call, Label::kNear);
2792 __ PushRoot(Heap::kUndefinedValueRootIndex);
2798 EmitCallWithStub(expr);
2799 }
else if (call_type == Call::PROPERTY_CALL) {
2800 Property*
property = callee->AsProperty();
2801 { PreservePositionScope scope(masm()->positions_recorder());
2802 VisitForStackValue(property->obj());
2804 if (property->key()->IsPropertyName()) {
2805 EmitCallWithIC(expr);
2807 EmitKeyedCallWithIC(expr, property->key());
2810 ASSERT(call_type == Call::OTHER_CALL);
2812 { PreservePositionScope scope(masm()->positions_recorder());
2813 VisitForStackValue(callee);
2815 __ PushRoot(Heap::kUndefinedValueRootIndex);
2817 EmitCallWithStub(expr);
2822 ASSERT(expr->return_is_recorded_);
2827 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2828 Comment cmnt(masm_,
"[ CallNew");
2836 VisitForStackValue(expr->expression());
2839 ZoneList<Expression*>* args = expr->arguments();
2840 int arg_count = args->length();
2841 for (
int i = 0; i < arg_count; i++) {
2842 VisitForStackValue(args->at(i));
2847 SetSourcePosition(expr->position());
2850 __ Set(
rax, arg_count);
2851 __ movp(
rdi, Operand(
rsp, arg_count * kPointerSize));
2854 Handle<Object> uninitialized =
2856 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2857 if (FLAG_pretenuring_call_new) {
2858 StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
2859 isolate()->factory()->NewAllocationSite());
2860 ASSERT(expr->AllocationSiteFeedbackSlot() ==
2861 expr->CallNewFeedbackSlot() + 1);
2864 __ Move(
rbx, FeedbackVector());
2868 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2869 PrepareForBailoutForId(expr->ReturnId(),
TOS_REG);
2870 context()->Plug(
rax);
2874 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2875 ZoneList<Expression*>* args = expr->arguments();
2876 ASSERT(args->length() == 1);
2878 VisitForAccumulatorValue(args->at(0));
2880 Label materialize_true, materialize_false;
2881 Label* if_true =
NULL;
2882 Label* if_false =
NULL;
2883 Label* fall_through =
NULL;
2884 context()->PrepareTest(&materialize_true, &materialize_false,
2885 &if_true, &if_false, &fall_through);
2887 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2888 __ JumpIfSmi(
rax, if_true);
2891 context()->Plug(if_true, if_false);
2895 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2896 ZoneList<Expression*>* args = expr->arguments();
2897 ASSERT(args->length() == 1);
2899 VisitForAccumulatorValue(args->at(0));
2901 Label materialize_true, materialize_false;
2902 Label* if_true =
NULL;
2903 Label* if_false =
NULL;
2904 Label* fall_through =
NULL;
2905 context()->PrepareTest(&materialize_true, &materialize_false,
2906 &if_true, &if_false, &fall_through);
2908 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2910 Split(non_negative_smi, if_true, if_false, fall_through);
2912 context()->Plug(if_true, if_false);
2916 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2917 ZoneList<Expression*>* args = expr->arguments();
2918 ASSERT(args->length() == 1);
2920 VisitForAccumulatorValue(args->at(0));
2922 Label materialize_true, materialize_false;
2923 Label* if_true =
NULL;
2924 Label* if_false =
NULL;
2925 Label* fall_through =
NULL;
2926 context()->PrepareTest(&materialize_true, &materialize_false,
2927 &if_true, &if_false, &fall_through);
2929 __ JumpIfSmi(
rax, if_false);
2930 __ CompareRoot(
rax, Heap::kNullValueRootIndex);
2941 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2942 Split(
below_equal, if_true, if_false, fall_through);
2944 context()->Plug(if_true, if_false);
2948 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2949 ZoneList<Expression*>* args = expr->arguments();
2950 ASSERT(args->length() == 1);
2952 VisitForAccumulatorValue(args->at(0));
2954 Label materialize_true, materialize_false;
2955 Label* if_true =
NULL;
2956 Label* if_false =
NULL;
2957 Label* fall_through =
NULL;
2958 context()->PrepareTest(&materialize_true, &materialize_false,
2959 &if_true, &if_false, &fall_through);
2961 __ JumpIfSmi(
rax, if_false);
2963 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2964 Split(
above_equal, if_true, if_false, fall_through);
2966 context()->Plug(if_true, if_false);
2970 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2971 ZoneList<Expression*>* args = expr->arguments();
2972 ASSERT(args->length() == 1);
2974 VisitForAccumulatorValue(args->at(0));
2976 Label materialize_true, materialize_false;
2977 Label* if_true =
NULL;
2978 Label* if_false =
NULL;
2979 Label* fall_through =
NULL;
2980 context()->PrepareTest(&materialize_true, &materialize_false,
2981 &if_true, &if_false, &fall_through);
2983 __ JumpIfSmi(
rax, if_false);
2987 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2988 Split(
not_zero, if_true, if_false, fall_through);
2990 context()->Plug(if_true, if_false);
2994 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2995 CallRuntime* expr) {
2996 ZoneList<Expression*>* args = expr->arguments();
2997 ASSERT(args->length() == 1);
2999 VisitForAccumulatorValue(args->at(0));
3001 Label materialize_true, materialize_false, skip_lookup;
3002 Label* if_true =
NULL;
3003 Label* if_false =
NULL;
3004 Label* fall_through =
NULL;
3005 context()->PrepareTest(&materialize_true, &materialize_false,
3006 &if_true, &if_false, &fall_through);
3008 __ AssertNotSmi(
rax);
3020 __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
3026 Label entry, loop, done;
3029 __ NumberOfOwnDescriptors(rcx,
rbx);
3030 __ cmpp(rcx, Immediate(0));
3033 __ LoadInstanceDescriptors(
rbx,
r8);
3049 __ Cmp(rdx, isolate()->factory()->value_of_string());
3062 __ bind(&skip_lookup);
3074 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3075 Split(
equal, if_true, if_false, fall_through);
3077 context()->Plug(if_true, if_false);
3081 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3082 ZoneList<Expression*>* args = expr->arguments();
3083 ASSERT(args->length() == 1);
3085 VisitForAccumulatorValue(args->at(0));
3087 Label materialize_true, materialize_false;
3088 Label* if_true =
NULL;
3089 Label* if_false =
NULL;
3090 Label* fall_through =
NULL;
3091 context()->PrepareTest(&materialize_true, &materialize_false,
3092 &if_true, &if_false, &fall_through);
3094 __ JumpIfSmi(
rax, if_false);
3096 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3097 Split(
equal, if_true, if_false, fall_through);
3099 context()->Plug(if_true, if_false);
3103 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3104 ZoneList<Expression*>* args = expr->arguments();
3105 ASSERT(args->length() == 1);
3107 VisitForAccumulatorValue(args->at(0));
3109 Label materialize_true, materialize_false;
3110 Label* if_true =
NULL;
3111 Label* if_false =
NULL;
3112 Label* fall_through =
NULL;
3113 context()->PrepareTest(&materialize_true, &materialize_false,
3114 &if_true, &if_false, &fall_through);
3122 Immediate(0x00000000));
3123 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3124 Split(
equal, if_true, if_false, fall_through);
3126 context()->Plug(if_true, if_false);
3130 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3131 ZoneList<Expression*>* args = expr->arguments();
3132 ASSERT(args->length() == 1);
3134 VisitForAccumulatorValue(args->at(0));
3136 Label materialize_true, materialize_false;
3137 Label* if_true =
NULL;
3138 Label* if_false =
NULL;
3139 Label* fall_through =
NULL;
3140 context()->PrepareTest(&materialize_true, &materialize_false,
3141 &if_true, &if_false, &fall_through);
3143 __ JumpIfSmi(
rax, if_false);
3145 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3146 Split(
equal, if_true, if_false, fall_through);
3148 context()->Plug(if_true, if_false);
3152 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3153 ZoneList<Expression*>* args = expr->arguments();
3154 ASSERT(args->length() == 1);
3156 VisitForAccumulatorValue(args->at(0));
3158 Label materialize_true, materialize_false;
3159 Label* if_true =
NULL;
3160 Label* if_false =
NULL;
3161 Label* fall_through =
NULL;
3162 context()->PrepareTest(&materialize_true, &materialize_false,
3163 &if_true, &if_false, &fall_through);
3165 __ JumpIfSmi(
rax, if_false);
3167 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3168 Split(
equal, if_true, if_false, fall_through);
3170 context()->Plug(if_true, if_false);
3175 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3176 ASSERT(expr->arguments()->length() == 0);
3178 Label materialize_true, materialize_false;
3179 Label* if_true =
NULL;
3180 Label* if_false =
NULL;
3181 Label* fall_through =
NULL;
3182 context()->PrepareTest(&materialize_true, &materialize_false,
3183 &if_true, &if_false, &fall_through);
3189 Label check_frame_marker;
3196 __ bind(&check_frame_marker);
3199 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3200 Split(
equal, if_true, if_false, fall_through);
3202 context()->Plug(if_true, if_false);
3206 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3207 ZoneList<Expression*>* args = expr->arguments();
3208 ASSERT(args->length() == 2);
3211 VisitForStackValue(args->at(0));
3212 VisitForAccumulatorValue(args->at(1));
3214 Label materialize_true, materialize_false;
3215 Label* if_true =
NULL;
3216 Label* if_false =
NULL;
3217 Label* fall_through =
NULL;
3218 context()->PrepareTest(&materialize_true, &materialize_false,
3219 &if_true, &if_false, &fall_through);
3223 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3224 Split(
equal, if_true, if_false, fall_through);
3226 context()->Plug(if_true, if_false);
3230 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3231 ZoneList<Expression*>* args = expr->arguments();
3232 ASSERT(args->length() == 1);
3236 VisitForAccumulatorValue(args->at(0));
3241 context()->Plug(
rax);
3245 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3246 ASSERT(expr->arguments()->length() == 0);
3264 context()->Plug(
rax);
3268 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3269 ZoneList<Expression*>* args = expr->arguments();
3270 ASSERT(args->length() == 1);
3271 Label done, null,
function, non_function_constructor;
3273 VisitForAccumulatorValue(args->at(0));
3276 __ JumpIfSmi(
rax, &null);
3310 __ Move(
rax, isolate()->factory()->function_class_string());
3314 __ bind(&non_function_constructor);
3315 __ Move(
rax, isolate()->factory()->Object_string());
3320 __ LoadRoot(
rax, Heap::kNullValueRootIndex);
3325 context()->Plug(
rax);
3329 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3337 ZoneList<Expression*>* args = expr->arguments();
3340 VisitForStackValue(args->at(1));
3341 VisitForStackValue(args->at(2));
3342 __ CallRuntime(Runtime::kHiddenLog, 2);
3345 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
3346 context()->Plug(
rax);
3350 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3353 ZoneList<Expression*>* args = expr->arguments();
3354 ASSERT(args->length() == 3);
3355 VisitForStackValue(args->at(0));
3356 VisitForStackValue(args->at(1));
3357 VisitForStackValue(args->at(2));
3359 context()->Plug(
rax);
3363 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3365 RegExpExecStub stub;
3366 ZoneList<Expression*>* args = expr->arguments();
3367 ASSERT(args->length() == 4);
3368 VisitForStackValue(args->at(0));
3369 VisitForStackValue(args->at(1));
3370 VisitForStackValue(args->at(2));
3371 VisitForStackValue(args->at(3));
3373 context()->Plug(
rax);
3377 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3378 ZoneList<Expression*>* args = expr->arguments();
3379 ASSERT(args->length() == 1);
3381 VisitForAccumulatorValue(args->at(0));
3385 __ JumpIfSmi(
rax, &done);
3392 context()->Plug(
rax);
3396 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3397 ZoneList<Expression*>* args = expr->arguments();
3398 ASSERT(args->length() == 2);
3400 Smi* index =
Smi::cast(*(args->at(1)->AsLiteral()->value()));
3402 VisitForAccumulatorValue(args->at(0));
3404 Label runtime, done, not_date_object;
3405 Register
object =
rax;
3406 Register result =
rax;
3407 Register scratch =
rcx;
3409 __ JumpIfSmi(
object, ¬_date_object);
3413 if (index->value() == 0) {
3418 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3419 Operand stamp_operand =
__ ExternalOperand(stamp);
3420 __ movp(scratch, stamp_operand);
3424 kPointerSize * index->value()));
3428 __ PrepareCallCFunction(2);
3431 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3436 __ bind(¬_date_object);
3437 __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3439 context()->Plug(
rax);
3443 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3444 ZoneList<Expression*>* args = expr->arguments();
3447 Register
string =
rax;
3448 Register index =
rbx;
3449 Register value =
rcx;
3451 VisitForStackValue(args->at(1));
3452 VisitForStackValue(args->at(2));
3453 VisitForAccumulatorValue(args->at(0));
3457 if (FLAG_debug_code) {
3458 __ Check(
__ CheckSmi(value), kNonSmiValue);
3459 __ Check(
__ CheckSmi(index), kNonSmiValue);
3462 __ SmiToInteger32(value, value);
3463 __ SmiToInteger32(index, index);
3465 if (FLAG_debug_code) {
3467 __ EmitSeqStringSetCharCheck(
string, index, value, one_byte_seq_type);
3472 context()->Plug(
string);
3476 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3477 ZoneList<Expression*>* args = expr->arguments();
3480 Register
string =
rax;
3481 Register index =
rbx;
3482 Register value =
rcx;
3484 VisitForStackValue(args->at(1));
3485 VisitForStackValue(args->at(2));
3486 VisitForAccumulatorValue(args->at(0));
3490 if (FLAG_debug_code) {
3491 __ Check(
__ CheckSmi(value), kNonSmiValue);
3492 __ Check(
__ CheckSmi(index), kNonSmiValue);
3495 __ SmiToInteger32(value, value);
3496 __ SmiToInteger32(index, index);
3498 if (FLAG_debug_code) {
3500 __ EmitSeqStringSetCharCheck(
string, index, value, two_byte_seq_type);
3505 context()->Plug(
rax);
3509 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3511 ZoneList<Expression*>* args = expr->arguments();
3512 ASSERT(args->length() == 2);
3513 VisitForStackValue(args->at(0));
3514 VisitForStackValue(args->at(1));
3517 context()->Plug(
rax);
3521 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3522 ZoneList<Expression*>* args = expr->arguments();
3523 ASSERT(args->length() == 2);
3525 VisitForStackValue(args->at(0));
3526 VisitForAccumulatorValue(args->at(1));
3531 __ JumpIfSmi(
rbx, &done);
3545 context()->Plug(
rax);
3549 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3550 ZoneList<Expression*>* args = expr->arguments();
3554 VisitForAccumulatorValue(args->at(0));
3556 NumberToStringStub stub;
3558 context()->Plug(
rax);
3562 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3563 ZoneList<Expression*>* args = expr->arguments();
3564 ASSERT(args->length() == 1);
3566 VisitForAccumulatorValue(args->at(0));
3569 StringCharFromCodeGenerator generator(
rax,
rbx);
3570 generator.GenerateFast(masm_);
3573 NopRuntimeCallHelper call_helper;
3574 generator.GenerateSlow(masm_, call_helper);
3577 context()->Plug(
rbx);
3581 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3582 ZoneList<Expression*>* args = expr->arguments();
3583 ASSERT(args->length() == 2);
3585 VisitForStackValue(args->at(0));
3586 VisitForAccumulatorValue(args->at(1));
3588 Register
object =
rbx;
3589 Register index =
rax;
3590 Register result =
rdx;
3594 Label need_conversion;
3595 Label index_out_of_range;
3597 StringCharCodeAtGenerator generator(
object,
3602 &index_out_of_range,
3604 generator.GenerateFast(masm_);
3607 __ bind(&index_out_of_range);
3610 __ LoadRoot(result, Heap::kNanValueRootIndex);
3613 __ bind(&need_conversion);
3616 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3619 NopRuntimeCallHelper call_helper;
3620 generator.GenerateSlow(masm_, call_helper);
3623 context()->Plug(result);
3627 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3628 ZoneList<Expression*>* args = expr->arguments();
3629 ASSERT(args->length() == 2);
3631 VisitForStackValue(args->at(0));
3632 VisitForAccumulatorValue(args->at(1));
3634 Register
object =
rbx;
3635 Register index =
rax;
3636 Register scratch =
rdx;
3637 Register result =
rax;
3641 Label need_conversion;
3642 Label index_out_of_range;
3644 StringCharAtGenerator generator(
object,
3650 &index_out_of_range,
3652 generator.GenerateFast(masm_);
3655 __ bind(&index_out_of_range);
3658 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3661 __ bind(&need_conversion);
3667 NopRuntimeCallHelper call_helper;
3668 generator.GenerateSlow(masm_, call_helper);
3671 context()->Plug(result);
3675 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3676 ZoneList<Expression*>* args = expr->arguments();
3678 VisitForStackValue(args->at(0));
3679 VisitForAccumulatorValue(args->at(1));
3684 context()->Plug(
rax);
3688 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3689 ZoneList<Expression*>* args = expr->arguments();
3692 VisitForStackValue(args->at(0));
3693 VisitForStackValue(args->at(1));
3695 StringCompareStub stub;
3697 context()->Plug(
rax);
3701 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3703 ZoneList<Expression*>* args = expr->arguments();
3704 ASSERT(args->length() == 1);
3705 VisitForStackValue(args->at(0));
3706 __ CallRuntime(Runtime::kMath_log, 1);
3707 context()->Plug(
rax);
3711 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3713 ZoneList<Expression*>* args = expr->arguments();
3714 ASSERT(args->length() == 1);
3715 VisitForStackValue(args->at(0));
3716 __ CallRuntime(Runtime::kMath_sqrt, 1);
3717 context()->Plug(
rax);
3721 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3722 ZoneList<Expression*>* args = expr->arguments();
3723 ASSERT(args->length() >= 2);
3725 int arg_count = args->length() - 2;
3726 for (
int i = 0; i < arg_count + 1; i++) {
3727 VisitForStackValue(args->at(i));
3729 VisitForAccumulatorValue(args->last());
3731 Label runtime, done;
3733 __ JumpIfSmi(
rax, &runtime);
3738 __ movp(
rdi, result_register());
3739 ParameterCount count(arg_count);
3746 __ CallRuntime(Runtime::kCall, args->length());
3749 context()->Plug(
rax);
3753 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3754 RegExpConstructResultStub stub;
3755 ZoneList<Expression*>* args = expr->arguments();
3756 ASSERT(args->length() == 3);
3757 VisitForStackValue(args->at(0));
3758 VisitForStackValue(args->at(1));
3759 VisitForAccumulatorValue(args->at(2));
3763 context()->Plug(
rax);
3767 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3768 ZoneList<Expression*>* args = expr->arguments();
3772 int cache_id =
Smi::cast(*(args->at(0)->AsLiteral()->value()))->
value();
3774 Handle<FixedArray> jsfunction_result_caches(
3775 isolate()->native_context()->jsfunction_result_caches());
3776 if (jsfunction_result_caches->length() <= cache_id) {
3777 __ Abort(kAttemptToUseUndefinedCache);
3778 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
3779 context()->Plug(
rax);
3783 VisitForAccumulatorValue(args->at(1));
3786 Register cache =
rbx;
3796 Label done, not_found;
3811 __ jmp(&done, Label::kNear);
3813 __ bind(¬_found);
3817 __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3820 context()->Plug(
rax);
3824 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3825 ZoneList<Expression*>* args = expr->arguments();
3826 ASSERT(args->length() == 1);
3828 VisitForAccumulatorValue(args->at(0));
3830 Label materialize_true, materialize_false;
3831 Label* if_true =
NULL;
3832 Label* if_false =
NULL;
3833 Label* fall_through =
NULL;
3834 context()->PrepareTest(&materialize_true, &materialize_false,
3835 &if_true, &if_false, &fall_through);
3839 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3843 context()->Plug(if_true, if_false);
3847 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3848 ZoneList<Expression*>* args = expr->arguments();
3849 ASSERT(args->length() == 1);
3850 VisitForAccumulatorValue(args->at(0));
3852 __ AssertString(
rax);
3858 context()->Plug(
rax);
3862 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3863 Label bailout, return_result, done, one_char_separator, long_separator,
3864 non_trivial_array, not_size_one_array, loop,
3865 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3866 ZoneList<Expression*>* args = expr->arguments();
3867 ASSERT(args->length() == 2);
3869 VisitForStackValue(args->at(1));
3871 VisitForAccumulatorValue(args->at(0));
3873 Register array =
rax;
3874 Register elements =
no_reg;
3876 Register index =
rdx;
3878 Register string_length =
rcx;
3880 Register
string =
rsi;
3882 Register scratch =
rbx;
3884 Register array_length =
rdi;
3885 Register result_pos =
no_reg;
3887 Operand separator_operand = Operand(
rsp, 2 * kPointerSize);
3888 Operand result_operand = Operand(
rsp, 1 * kPointerSize);
3889 Operand array_length_operand = Operand(
rsp, 0 * kPointerSize);
3893 __ subp(
rsp, Immediate(2 * kPointerSize));
3896 __ JumpIfSmi(array, &bailout);
3901 __ CheckFastElements(scratch, &bailout);
3908 __ LoadRoot(
rax, Heap::kempty_stringRootIndex);
3909 __ jmp(&return_result);
3912 __ bind(&non_trivial_array);
3913 __ SmiToInteger32(array_length, array_length);
3914 __ movl(array_length_operand, array_length);
3926 __ Set(string_length, 0);
3930 if (generate_debug_code_) {
3931 __ cmpp(index, array_length);
3932 __ Assert(
below, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3939 __ JumpIfSmi(
string, &bailout);
3942 __ andb(scratch, Immediate(
3946 __ AddSmiField(string_length,
3950 __ cmpl(index, array_length);
3960 __ cmpl(array_length, Immediate(1));
3963 __ jmp(&return_result);
3965 __ bind(¬_size_one_array);
3968 result_pos = array_length;
3977 __ movp(
string, separator_operand);
3978 __ JumpIfSmi(
string, &bailout);
3981 __ andb(scratch, Immediate(
3993 __ SmiToInteger32(scratch,
3996 __ imull(scratch, index);
3998 __ addl(string_length, scratch);
4004 __ AllocateAsciiString(result_pos, string_length, scratch,
4005 index,
string, &bailout);
4006 __ movp(result_operand, result_pos);
4009 __ movp(
string, separator_operand);
4012 __ j(
equal, &one_char_separator);
4018 __ movl(scratch, array_length_operand);
4019 __ jmp(&loop_1_condition);
4033 __ SmiToInteger32(string_length,
4039 __ bind(&loop_1_condition);
4040 __ cmpl(index, scratch);
4046 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
4047 __ jmp(&return_result);
4051 __ bind(&one_char_separator);
4058 __ jmp(&loop_2_entry);
4069 __ movb(Operand(result_pos, 0), scratch);
4070 __ incp(result_pos);
4072 __ bind(&loop_2_entry);
4077 __ SmiToInteger32(string_length,
4083 __ cmpl(index, array_length_operand);
4089 __ bind(&long_separator);
4094 __ movl(index, array_length_operand);
4101 __ movp(
string, separator_operand);
4102 __ SmiToInteger32(scratch,
4106 __ movp(separator_operand,
string);
4110 __ jmp(&loop_3_entry);
4121 __ movp(
string, separator_operand);
4122 __ movl(string_length, scratch);
4123 __ CopyBytes(result_pos,
string, string_length, 2);
4125 __ bind(&loop_3_entry);
4128 __ SmiToInteger32(string_length,
4137 __ movp(
rax, result_operand);
4139 __ bind(&return_result);
4141 __ addp(
rsp, Immediate(3 * kPointerSize));
4143 context()->Plug(
rax);
4147 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4148 if (expr->function() !=
NULL &&
4150 Comment cmnt(masm_,
"[ InlineRuntimeCall");
4151 EmitInlineRuntimeCall(expr);
4155 Comment cmnt(masm_,
"[ CallRuntime");
4156 ZoneList<Expression*>* args = expr->arguments();
4157 int arg_count = args->length();
4159 if (expr->is_jsruntime()) {
4166 __ Move(rcx, expr->name());
4170 __ Push(Operand(
rsp, 0));
4171 __ movp(Operand(
rsp, kPointerSize),
rax);
4174 for (
int i = 0; i < arg_count; i++) {
4175 VisitForStackValue(args->at(i));
4179 SetSourcePosition(expr->position());
4181 __ movp(
rdi, Operand(
rsp, (arg_count + 1) * kPointerSize));
4186 context()->DropAndPlug(1,
rax);
4190 for (
int i = 0; i < arg_count; i++) {
4191 VisitForStackValue(args->at(i));
4195 __ CallRuntime(expr->function(), arg_count);
4196 context()->Plug(
rax);
4201 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4202 switch (expr->op()) {
4203 case Token::DELETE: {
4204 Comment cmnt(masm_,
"[ UnaryOperation (DELETE)");
4205 Property*
property = expr->expression()->AsProperty();
4206 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4208 if (property !=
NULL) {
4209 VisitForStackValue(property->obj());
4210 VisitForStackValue(property->key());
4213 context()->Plug(
rax);
4214 }
else if (proxy !=
NULL) {
4215 Variable* var = proxy->var();
4219 if (var->IsUnallocated()) {
4221 __ Push(var->name());
4224 context()->Plug(
rax);
4225 }
else if (var->IsStackAllocated() || var->IsContextSlot()) {
4229 context()->Plug(var->is_this());
4233 __ Push(context_register());
4234 __ Push(var->name());
4235 __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
4236 context()->Plug(
rax);
4241 VisitForEffect(expr->expression());
4242 context()->Plug(
true);
4248 Comment cmnt(masm_,
"[ UnaryOperation (VOID)");
4249 VisitForEffect(expr->expression());
4250 context()->Plug(Heap::kUndefinedValueRootIndex);
4255 Comment cmnt(masm_,
"[ UnaryOperation (NOT)");
4256 if (context()->IsEffect()) {
4259 VisitForEffect(expr->expression());
4260 }
else if (context()->IsTest()) {
4261 const TestContext* test = TestContext::cast(context());
4263 VisitForControl(expr->expression(),
4264 test->false_label(),
4266 test->fall_through());
4267 context()->Plug(test->true_label(), test->false_label());
4273 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4274 Label materialize_true, materialize_false, done;
4275 VisitForControl(expr->expression(),
4279 __ bind(&materialize_true);
4280 PrepareForBailoutForId(expr->MaterializeTrueId(),
NO_REGISTERS);
4281 if (context()->IsAccumulatorValue()) {
4282 __ LoadRoot(
rax, Heap::kTrueValueRootIndex);
4284 __ PushRoot(Heap::kTrueValueRootIndex);
4286 __ jmp(&done, Label::kNear);
4287 __ bind(&materialize_false);
4288 PrepareForBailoutForId(expr->MaterializeFalseId(),
NO_REGISTERS);
4289 if (context()->IsAccumulatorValue()) {
4290 __ LoadRoot(
rax, Heap::kFalseValueRootIndex);
4292 __ PushRoot(Heap::kFalseValueRootIndex);
4299 case Token::TYPEOF: {
4300 Comment cmnt(masm_,
"[ UnaryOperation (TYPEOF)");
4301 { StackValueContext context(
this);
4302 VisitForTypeofValue(expr->expression());
4304 __ CallRuntime(Runtime::kTypeof, 1);
4305 context()->Plug(
rax);
4315 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4316 ASSERT(expr->expression()->IsValidLeftHandSide());
4318 Comment cmnt(masm_,
"[ CountOperation");
4319 SetSourcePosition(expr->position());
4323 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4324 LhsKind assign_type = VARIABLE;
4325 Property* prop = expr->expression()->AsProperty();
4330 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4334 if (assign_type == VARIABLE) {
4335 ASSERT(expr->expression()->AsVariableProxy()->var() !=
NULL);
4336 AccumulatorValueContext context(
this);
4337 EmitVariableLoad(expr->expression()->AsVariableProxy());
4340 if (expr->is_postfix() && !context()->IsEffect()) {
4343 if (assign_type == NAMED_PROPERTY) {
4344 VisitForAccumulatorValue(prop->obj());
4346 EmitNamedPropertyLoad(prop);
4348 VisitForStackValue(prop->obj());
4349 VisitForAccumulatorValue(prop->key());
4350 __ movp(rdx, Operand(
rsp, 0));
4352 EmitKeyedPropertyLoad(prop);
4358 if (assign_type == VARIABLE) {
4359 PrepareForBailout(expr->expression(),
TOS_REG);
4361 PrepareForBailoutForId(prop->LoadId(),
TOS_REG);
4365 Label done, stub_call;
4366 JumpPatchSite patch_site(masm_);
4367 if (ShouldInlineSmiCase(expr->op())) {
4369 patch_site.EmitJumpIfNotSmi(
rax, &slow, Label::kNear);
4372 if (expr->is_postfix()) {
4373 if (!context()->IsEffect()) {
4377 switch (assign_type) {
4381 case NAMED_PROPERTY:
4382 __ movp(Operand(
rsp, kPointerSize),
rax);
4384 case KEYED_PROPERTY:
4385 __ movp(Operand(
rsp, 2 * kPointerSize),
rax);
4391 SmiOperationExecutionMode
mode;
4394 if (expr->op() == Token::INC) {
4399 __ jmp(&stub_call, Label::kNear);
4403 ToNumberStub convert_stub;
4404 __ CallStub(&convert_stub);
4407 if (expr->is_postfix()) {
4408 if (!context()->IsEffect()) {
4412 switch (assign_type) {
4416 case NAMED_PROPERTY:
4417 __ movp(Operand(
rsp, kPointerSize),
rax);
4419 case KEYED_PROPERTY:
4420 __ movp(Operand(
rsp, 2 * kPointerSize),
rax);
4427 SetSourcePosition(expr->position());
4430 __ bind(&stub_call);
4434 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4435 patch_site.EmitPatchInfo();
4439 switch (assign_type) {
4441 if (expr->is_postfix()) {
4443 { EffectContext context(
this);
4444 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4446 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4451 if (!context()->IsEffect()) {
4452 context()->PlugTOS();
4456 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4458 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4459 context()->Plug(
rax);
4462 case NAMED_PROPERTY: {
4463 __ Move(rcx, prop->key()->AsLiteral()->value());
4465 CallStoreIC(expr->CountStoreFeedbackId());
4466 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4467 if (expr->is_postfix()) {
4468 if (!context()->IsEffect()) {
4469 context()->PlugTOS();
4472 context()->Plug(
rax);
4476 case KEYED_PROPERTY: {
4479 Handle<Code> ic = strict_mode() ==
SLOPPY
4480 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4481 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4482 CallIC(ic, expr->CountStoreFeedbackId());
4483 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4484 if (expr->is_postfix()) {
4485 if (!context()->IsEffect()) {
4486 context()->PlugTOS();
4489 context()->Plug(
rax);
4497 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4498 VariableProxy* proxy = expr->AsVariableProxy();
4499 ASSERT(!context()->IsEffect());
4500 ASSERT(!context()->IsTest());
4502 if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
4503 Comment cmnt(masm_,
"[ Global variable");
4504 __ Move(rcx, proxy->name());
4509 PrepareForBailout(expr,
TOS_REG);
4510 context()->Plug(
rax);
4511 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
4512 Comment cmnt(masm_,
"[ Lookup slot");
4517 EmitDynamicLookupFastCase(proxy->var(),
INSIDE_TYPEOF, &slow, &done);
4521 __ Push(proxy->name());
4522 __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4523 PrepareForBailout(expr,
TOS_REG);
4526 context()->Plug(
rax);
4529 VisitInDuplicateContext(expr);
4534 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4535 Expression* sub_expr,
4536 Handle<String>
check) {
4537 Label materialize_true, materialize_false;
4538 Label* if_true =
NULL;
4539 Label* if_false =
NULL;
4540 Label* fall_through =
NULL;
4541 context()->PrepareTest(&materialize_true, &materialize_false,
4542 &if_true, &if_false, &fall_through);
4544 { AccumulatorValueContext context(
this);
4545 VisitForTypeofValue(sub_expr);
4547 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4549 if (check->Equals(isolate()->heap()->number_string())) {
4550 __ JumpIfSmi(
rax, if_true);
4552 __ CompareRoot(
rax, Heap::kHeapNumberMapRootIndex);
4553 Split(
equal, if_true, if_false, fall_through);
4554 }
else if (check->Equals(isolate()->heap()->string_string())) {
4555 __ JumpIfSmi(
rax, if_false);
4561 Split(
zero, if_true, if_false, fall_through);
4562 }
else if (check->Equals(isolate()->heap()->symbol_string())) {
4563 __ JumpIfSmi(
rax, if_false);
4565 Split(
equal, if_true, if_false, fall_through);
4566 }
else if (check->Equals(isolate()->heap()->boolean_string())) {
4567 __ CompareRoot(
rax, Heap::kTrueValueRootIndex);
4569 __ CompareRoot(
rax, Heap::kFalseValueRootIndex);
4570 Split(
equal, if_true, if_false, fall_through);
4571 }
else if (FLAG_harmony_typeof &&
4572 check->Equals(isolate()->heap()->null_string())) {
4573 __ CompareRoot(
rax, Heap::kNullValueRootIndex);
4574 Split(
equal, if_true, if_false, fall_through);
4575 }
else if (check->Equals(isolate()->heap()->undefined_string())) {
4576 __ CompareRoot(
rax, Heap::kUndefinedValueRootIndex);
4578 __ JumpIfSmi(
rax, if_false);
4583 Split(
not_zero, if_true, if_false, fall_through);
4584 }
else if (check->Equals(isolate()->heap()->function_string())) {
4585 __ JumpIfSmi(
rax, if_false);
4590 Split(
equal, if_true, if_false, fall_through);
4591 }
else if (check->Equals(isolate()->heap()->object_string())) {
4592 __ JumpIfSmi(
rax, if_false);
4593 if (!FLAG_harmony_typeof) {
4594 __ CompareRoot(
rax, Heap::kNullValueRootIndex);
4604 Split(
zero, if_true, if_false, fall_through);
4606 if (if_false != fall_through)
__ jmp(if_false);
4608 context()->Plug(if_true, if_false);
4612 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4613 Comment cmnt(masm_,
"[ CompareOperation");
4614 SetSourcePosition(expr->position());
4618 if (TryLiteralCompare(expr))
return;
4622 Label materialize_true, materialize_false;
4623 Label* if_true =
NULL;
4624 Label* if_false =
NULL;
4625 Label* fall_through =
NULL;
4626 context()->PrepareTest(&materialize_true, &materialize_false,
4627 &if_true, &if_false, &fall_through);
4630 VisitForStackValue(expr->left());
4633 VisitForStackValue(expr->right());
4635 PrepareForBailoutBeforeSplit(expr,
false,
NULL,
NULL);
4636 __ CompareRoot(
rax, Heap::kTrueValueRootIndex);
4637 Split(
equal, if_true, if_false, fall_through);
4640 case Token::INSTANCEOF: {
4641 VisitForStackValue(expr->right());
4644 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4647 Split(
zero, if_true, if_false, fall_through);
4652 VisitForAccumulatorValue(expr->right());
4656 bool inline_smi_code = ShouldInlineSmiCase(op);
4657 JumpPatchSite patch_site(masm_);
4658 if (inline_smi_code) {
4662 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4664 Split(cc, if_true, if_false,
NULL);
4665 __ bind(&slow_case);
4669 SetSourcePosition(expr->position());
4671 CallIC(ic, expr->CompareOperationFeedbackId());
4672 patch_site.EmitPatchInfo();
4674 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4676 Split(cc, if_true, if_false, fall_through);
4682 context()->Plug(if_true, if_false);
4686 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4687 Expression* sub_expr,
4689 Label materialize_true, materialize_false;
4690 Label* if_true =
NULL;
4691 Label* if_false =
NULL;
4692 Label* fall_through =
NULL;
4693 context()->PrepareTest(&materialize_true, &materialize_false,
4694 &if_true, &if_false, &fall_through);
4696 VisitForAccumulatorValue(sub_expr);
4697 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4698 if (expr->op() == Token::EQ_STRICT) {
4700 Heap::kNullValueRootIndex :
4701 Heap::kUndefinedValueRootIndex;
4702 __ CompareRoot(
rax, nil_value);
4703 Split(
equal, if_true, if_false, fall_through);
4706 CallIC(ic, expr->CompareOperationFeedbackId());
4708 Split(
not_zero, if_true, if_false, fall_through);
4710 context()->Plug(if_true, if_false);
4714 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4716 context()->Plug(
rax);
4720 Register FullCodeGenerator::result_register() {
4725 Register FullCodeGenerator::context_register() {
4730 void FullCodeGenerator::StoreToFrameField(
int frame_offset, Register value) {
4732 __ movp(Operand(
rbp, frame_offset), value);
4736 void FullCodeGenerator::LoadContextField(Register dst,
int context_index) {
4741 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4743 if (declaration_scope->is_global_scope() ||
4744 declaration_scope->is_module_scope()) {
4750 }
else if (declaration_scope->is_eval_scope()) {
4756 ASSERT(declaration_scope->is_function_scope());
4766 void FullCodeGenerator::EnterFinallyBlock() {
4767 ASSERT(!result_register().is(rdx));
4768 ASSERT(!result_register().is(rcx));
4770 __ PopReturnAddressTo(rdx);
4771 __ Move(rcx, masm_->CodeObject());
4773 __ Integer32ToSmi(rdx, rdx);
4777 __ Push(result_register());
4780 ExternalReference pending_message_obj =
4781 ExternalReference::address_of_pending_message_obj(isolate());
4782 __ Load(rdx, pending_message_obj);
4785 ExternalReference has_pending_message =
4786 ExternalReference::address_of_has_pending_message(isolate());
4787 __ Load(rdx, has_pending_message);
4788 __ Integer32ToSmi(rdx, rdx);
4791 ExternalReference pending_message_script =
4792 ExternalReference::address_of_pending_message_script(isolate());
4793 __ Load(rdx, pending_message_script);
4798 void FullCodeGenerator::ExitFinallyBlock() {
4799 ASSERT(!result_register().is(rdx));
4800 ASSERT(!result_register().is(rcx));
4803 ExternalReference pending_message_script =
4804 ExternalReference::address_of_pending_message_script(isolate());
4805 __ Store(pending_message_script, rdx);
4808 __ SmiToInteger32(rdx, rdx);
4809 ExternalReference has_pending_message =
4810 ExternalReference::address_of_has_pending_message(isolate());
4811 __ Store(has_pending_message, rdx);
4814 ExternalReference pending_message_obj =
4815 ExternalReference::address_of_pending_message_obj(isolate());
4816 __ Store(pending_message_obj, rdx);
4819 __ Pop(result_register());
4823 __ SmiToInteger32(rdx, rdx);
4824 __ Move(rcx, masm_->CodeObject());
4832 #define __ ACCESS_MASM(masm())
4836 int* context_length) {
4843 __ Drop(*stack_depth);
4844 if (*context_length > 0) {
4850 __ call(finally_entry_);
4853 *context_length = 0;
4861 static const byte kJnsInstruction = 0x79;
4862 static const byte kJnsOffset = 0x1d;
4863 static const byte kNopByteOne = 0x66;
4864 static const byte kNopByteTwo = 0x90;
4866 static const byte kCallInstruction = 0xe8;
4872 BackEdgeState target_state,
4873 Code* replacement_code) {
4875 Address jns_instr_address = call_target_address - 3;
4876 Address jns_offset_address = call_target_address - 2;
4878 switch (target_state) {
4884 *jns_instr_address = kJnsInstruction;
4885 *jns_offset_address = kJnsOffset;
4894 *jns_instr_address = kNopByteOne;
4895 *jns_offset_address = kNopByteTwo;
4901 replacement_code->entry());
4902 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4903 unoptimized_code, call_target_address, replacement_code);
4909 Code* unoptimized_code,
4912 Address jns_instr_address = call_target_address - 3;
4913 ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
4915 if (*jns_instr_address == kJnsInstruction) {
4916 ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
4917 ASSERT_EQ(isolate->builtins()->InterruptCheck()->entry(),
4923 ASSERT_EQ(kNopByteOne, *jns_instr_address);
4924 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
4927 unoptimized_code) ==
4928 isolate->builtins()->OnStackReplacement()->entry()) {
4932 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4941 #endif // V8_TARGET_ARCH_X64
static const int kFunctionOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
static RelocInfo::Mode RelocInfoNone()
static const int kLengthOffset
static const int kHashFieldOffset
static const int kBitFieldOffset
Scope * DeclarationScope()
Isolate * isolate() const
const intptr_t kSmiTagMask
static const int kForInFastCaseMarker
VariableDeclaration * function() const
static const int kCodeEntryOffset
static const int kValueOffset
static int SlotOffset(int index)
static const int kFingerOffset
static const int kBuiltinsOffset
static Handle< Code > GetUninitialized(Isolate *isolate)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static const int kEnumCacheOffset
static String * cast(Object *obj)
const uint32_t kTwoByteStringTag
static Smi * FromInt(int value)
static const int kResultValuePropertyOffset
bool IsFastObjectElementsKind(ElementsKind kind)
static TypeFeedbackId None()
static Handle< Code > GetUninitialized(Isolate *isolate, Token::Value op)
static const int kGlobalReceiverOffset
Scope * outer_scope() const
static const int kGeneratorClosed
static const unsigned int kContainsCachedArrayIndexMask
static const int kForInSlowCaseMarker
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
static const int kResultDonePropertyOffset
SmiIndex SmiToIndex(Register dst, Register src, int shift)
#define ASSERT(condition)
static const int kContextOffset
const int kPointerSizeLog2
static const int kMaxBackEdgeWeight
static const int kInObjectFieldCount
static bool IsCompileTimeValue(Expression *expression)
const uint32_t kStringRepresentationMask
static const int kReceiverOffset
MemOperand GlobalObjectOperand()
static const int kCallerFPOffset
static const int kInstanceClassNameOffset
bool IsOptimizable() const
Variable * parameter(int index) const
MemOperand ContextOperand(Register context, int index)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
kInstanceClassNameOffset flag
static const int kLiteralsOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static Condition ComputeCondition(Token::Value op)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
static const int kLengthOffset
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Variable * arguments() const
static const int kFirstOffset
static bool IsValid(intptr_t value)
static BailoutId Declarations()
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
void check(i::Vector< const uint8_t > string)
static const int kJSReturnSequenceLength
static const int kStringWrapperSafeForDefaultValueOf
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
bool IsAligned(T value, U alignment)
bool ShouldSelfOptimize()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kCallerSPOffset
static const int kCacheStampOffset
static const int kDescriptorSize
static const int kPropertiesOffset
int num_parameters() const
static const int kMarkerOffset
static const int kExpressionsOffset
static const int kHeaderSize
static const int kElementsOffset
static BailoutId FunctionEntry()
const uint32_t kStringTag
bool IsDeclaredVariableMode(VariableMode mode)
friend class NestedStatement
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
static const int kContextOffset
void Load(const v8::FunctionCallbackInfo< v8::Value > &args)
static const int kMapOffset
static const int kValueOffset
static const int kEnumCacheBridgeCacheOffset
const uint32_t kIsNotStringMask
void VisitIllegalRedeclaration(AstVisitor *visitor)
static const int kLengthOffset
static Handle< Object > UninitializedSentinel(Isolate *isolate)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
static const int kContextOffset
const Register kScratchRegister
static const int kFunctionOffset
static const int kFormalParameterCountOffset
void CopyBytes(uint8_t *target, uint8_t *source)
static const int kGeneratorExecuting
Condition NegateCondition(Condition cond)
static bool ShouldGenerateLog(Isolate *isolate, Expression *type)
#define ASSERT_EQ(v1, v2)
static const int kContinuationOffset
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
static const int kConstructorOffset
const uint32_t kOneByteStringTag
Condition CheckNonNegativeSmi(Register src)
#define ASSERT_NE(v1, v2)
static const int kIsUndetectable
static const int kMaximumClonedProperties
static const int kPrototypeOffset
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target)
static const int kHashShift
static const int kValueOffset
bool IsImmutableVariableMode(VariableMode mode)
static const int kNativeContextOffset
void AddNoFrameRange(int from, int to)
static const int kSharedFunctionInfoOffset
static FixedArrayBase * cast(Object *object)
static const int kLocal0Offset
static const int kBitField2Offset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
static const int kExponentOffset
static const int kMaximumClonedLength
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset
static const int kOperandStackOffset
static const int kMantissaOffset