30 #if defined(V8_TARGET_ARCH_IA32)
45 #define __ ACCESS_MASM(masm_)
50 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
52 info_emitted_ =
false;
57 ASSERT(patch_site_.is_bound() == info_emitted_);
60 void EmitJumpIfNotSmi(Register reg,
62 Label::Distance distance = Label::kFar) {
67 void EmitJumpIfSmi(Register reg,
69 Label::Distance distance = Label::kFar) {
71 EmitJump(
carry, target, distance);
74 void EmitPatchInfo() {
75 if (patch_site_.is_bound()) {
76 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
78 __ test(
eax, Immediate(delta_to_patch_site));
89 void EmitJump(
Condition cc, Label* target, Label::Distance distance) {
90 ASSERT(!patch_site_.is_bound() && !info_emitted_);
92 __ bind(&patch_site_);
93 __ j(cc, target, distance);
96 MacroAssembler* masm_;
117 void FullCodeGenerator::Generate() {
118 CompilationInfo* info = info_;
120 isolate()->factory()->NewFixedArray(
function()->handler_count(),
TENURED);
121 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
123 SetFunctionPosition(
function());
124 Comment cmnt(masm_,
"[ function compiled by full code generator");
129 if (strlen(FLAG_stop_at) > 0 &&
130 info->function()->name()->IsEqualTo(
CStrVector(FLAG_stop_at))) {
139 if (!info->is_classic_mode() || info->is_native()) {
142 __ j(
zero, &ok, Label::kNear);
144 int receiver_offset = (info->scope()->num_parameters() + 1) *
kPointerSize;
145 __ mov(
ecx, Operand(
esp, receiver_offset));
146 __ JumpIfSmi(
ecx, &ok);
149 __ mov(Operand(
esp, receiver_offset),
150 Immediate(isolate()->factory()->undefined_value()));
157 FrameScope frame_scope(masm_, StackFrame::MANUAL);
164 { Comment cmnt(masm_,
"[ Allocate locals");
165 int locals_count = info->scope()->num_stack_slots();
166 if (locals_count == 1) {
167 __ push(Immediate(isolate()->factory()->undefined_value()));
168 }
else if (locals_count > 1) {
169 __ mov(
eax, Immediate(isolate()->factory()->undefined_value()));
170 for (
int i = 0; i < locals_count; i++) {
176 bool function_in_register =
true;
180 if (heap_slots > 0) {
181 Comment cmnt(masm_,
"[ Allocate context");
184 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
185 __ Push(info->scope()->GetScopeInfo());
186 __ CallRuntime(Runtime::kNewGlobalContext, 2);
188 FastNewContextStub stub(heap_slots);
191 __ CallRuntime(Runtime::kNewFunctionContext, 1);
193 function_in_register =
false;
199 int num_parameters = info->scope()->num_parameters();
200 for (
int i = 0; i < num_parameters; i++) {
202 if (var->IsContextSlot()) {
206 __ mov(
eax, Operand(
ebp, parameter_offset));
209 __ mov(Operand(
esi, context_offset),
eax);
211 __ RecordWriteContextSlot(
esi,
220 Variable* arguments = scope()->
arguments();
221 if (arguments !=
NULL) {
223 Comment cmnt(masm_,
"[ Allocate arguments object");
224 if (function_in_register) {
230 int num_parameters = info->scope()->num_parameters();
241 if (!is_classic_mode()) {
243 }
else if (
function()->has_duplicate_parameters()) {
248 ArgumentsAccessStub stub(type);
255 __ CallRuntime(Runtime::kTraceEnter, 0);
260 if (scope()->HasIllegalRedeclaration()) {
261 Comment cmnt(masm_,
"[ Declarations");
266 { Comment cmnt(masm_,
"[ Declarations");
269 if (scope()->is_function_scope() && scope()->
function() !=
NULL) {
270 VariableDeclaration*
function = scope()->
function();
271 ASSERT(function->proxy()->var()->mode() ==
CONST ||
274 VisitVariableDeclaration(
function);
276 VisitDeclarations(scope()->declarations());
279 { Comment cmnt(masm_,
"[ Stack check");
282 ExternalReference stack_limit =
283 ExternalReference::address_of_stack_limit(isolate());
284 __ cmp(
esp, Operand::StaticVariable(stack_limit));
291 { Comment cmnt(masm_,
"[ Body");
292 ASSERT(loop_depth() == 0);
293 VisitStatements(
function()->body());
294 ASSERT(loop_depth() == 0);
300 { Comment cmnt(masm_,
"[ return <undefined>;");
301 __ mov(
eax, isolate()->factory()->undefined_value());
302 EmitReturnSequence();
307 void FullCodeGenerator::ClearAccumulator() {
312 void FullCodeGenerator::EmitProfilingCounterDecrement(
int delta) {
313 __ mov(
ebx, Immediate(profiling_counter_));
319 void FullCodeGenerator::EmitProfilingCounterReset() {
320 int reset_value = FLAG_interrupt_budget;
325 __ mov(
ebx, Immediate(profiling_counter_));
331 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
332 Label* back_edge_target) {
333 Comment cmnt(masm_,
"[ Stack check");
336 if (FLAG_count_based_interrupts) {
338 if (FLAG_weighted_back_edges) {
339 ASSERT(back_edge_target->is_bound());
342 Max(1, distance / kBackEdgeDistanceUnit));
344 EmitProfilingCounterDecrement(weight);
352 ExternalReference stack_limit =
353 ExternalReference::address_of_stack_limit(isolate());
354 __ cmp(
esp, Operand::StaticVariable(stack_limit));
363 RecordStackCheck(stmt->OsrEntryId());
372 if (FLAG_count_based_interrupts) {
373 EmitProfilingCounterReset();
381 PrepareForBailoutForId(stmt->OsrEntryId(),
NO_REGISTERS);
385 void FullCodeGenerator::EmitReturnSequence() {
386 Comment cmnt(masm_,
"[ Return sequence");
387 if (return_label_.is_bound()) {
388 __ jmp(&return_label_);
391 __ bind(&return_label_);
394 __ CallRuntime(Runtime::kTraceExit, 1);
396 if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
400 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
401 }
else if (FLAG_weighted_back_edges) {
404 Max(1, distance / kBackEdgeDistanceUnit));
406 EmitProfilingCounterDecrement(weight);
412 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
418 EmitProfilingCounterReset();
423 Label check_exit_codesize;
424 masm_->
bind(&check_exit_codesize);
426 SetSourcePosition(
function()->end_position() - 1);
434 __ Ret(arguments_bytes,
ecx);
435 #ifdef ENABLE_DEBUGGER_SUPPORT
445 void FullCodeGenerator::EffectContext::Plug(Variable* var)
const {
446 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
450 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var)
const {
451 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
452 codegen()->GetVar(result_register(), var);
456 void FullCodeGenerator::StackValueContext::Plug(Variable* var)
const {
457 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
458 MemOperand operand = codegen()->VarOperand(var, result_register());
464 void FullCodeGenerator::TestContext::Plug(Variable* var)
const {
466 codegen()->GetVar(result_register(), var);
467 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
468 codegen()->DoTest(
this);
477 void FullCodeGenerator::AccumulatorValueContext::Plug(
483 void FullCodeGenerator::StackValueContext::Plug(
494 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit)
const {
498 void FullCodeGenerator::AccumulatorValueContext::Plug(
499 Handle<Object> lit)
const {
501 __ SafeSet(result_register(), Immediate(lit));
503 __ Set(result_register(), Immediate(lit));
508 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit)
const {
510 __ SafePush(Immediate(lit));
512 __ push(Immediate(lit));
517 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit)
const {
518 codegen()->PrepareForBailoutBeforeSplit(condition(),
522 ASSERT(!lit->IsUndetectableObject());
523 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
524 if (false_label_ != fall_through_)
__ jmp(false_label_);
525 }
else if (lit->IsTrue() || lit->IsJSObject()) {
526 if (true_label_ != fall_through_)
__ jmp(true_label_);
527 }
else if (lit->IsString()) {
529 if (false_label_ != fall_through_)
__ jmp(false_label_);
531 if (true_label_ != fall_through_)
__ jmp(true_label_);
533 }
else if (lit->IsSmi()) {
535 if (false_label_ != fall_through_)
__ jmp(false_label_);
537 if (true_label_ != fall_through_)
__ jmp(true_label_);
541 __ mov(result_register(), lit);
542 codegen()->DoTest(
this);
547 void FullCodeGenerator::EffectContext::DropAndPlug(
int count,
548 Register reg)
const {
554 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
556 Register reg)
const {
559 __ Move(result_register(), reg);
563 void FullCodeGenerator::StackValueContext::DropAndPlug(
int count,
564 Register reg)
const {
566 if (count > 1)
__ Drop(count - 1);
567 __ mov(Operand(
esp, 0), reg);
571 void FullCodeGenerator::TestContext::DropAndPlug(
int count,
572 Register reg)
const {
576 __ Move(result_register(), reg);
577 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
578 codegen()->DoTest(
this);
582 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
583 Label* materialize_false)
const {
584 ASSERT(materialize_true == materialize_false);
585 __ bind(materialize_true);
589 void FullCodeGenerator::AccumulatorValueContext::Plug(
590 Label* materialize_true,
591 Label* materialize_false)
const {
593 __ bind(materialize_true);
594 __ mov(result_register(), isolate()->factory()->true_value());
595 __ jmp(&done, Label::kNear);
596 __ bind(materialize_false);
597 __ mov(result_register(), isolate()->factory()->false_value());
602 void FullCodeGenerator::StackValueContext::Plug(
603 Label* materialize_true,
604 Label* materialize_false)
const {
606 __ bind(materialize_true);
607 __ push(Immediate(isolate()->factory()->true_value()));
608 __ jmp(&done, Label::kNear);
609 __ bind(materialize_false);
610 __ push(Immediate(isolate()->factory()->false_value()));
615 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
616 Label* materialize_false)
const {
617 ASSERT(materialize_true == true_label_);
618 ASSERT(materialize_false == false_label_);
622 void FullCodeGenerator::EffectContext::Plug(
bool flag)
const {
626 void FullCodeGenerator::AccumulatorValueContext::Plug(
bool flag)
const {
627 Handle<Object> value = flag
628 ? isolate()->factory()->true_value()
629 : isolate()->factory()->false_value();
630 __ mov(result_register(), value);
634 void FullCodeGenerator::StackValueContext::Plug(
bool flag)
const {
635 Handle<Object> value = flag
636 ? isolate()->factory()->true_value()
637 : isolate()->factory()->false_value();
638 __ push(Immediate(value));
642 void FullCodeGenerator::TestContext::Plug(
bool flag)
const {
643 codegen()->PrepareForBailoutBeforeSplit(condition(),
648 if (true_label_ != fall_through_)
__ jmp(true_label_);
650 if (false_label_ != fall_through_)
__ jmp(false_label_);
655 void FullCodeGenerator::DoTest(Expression* condition,
658 Label* fall_through) {
659 ToBooleanStub stub(result_register());
660 __ push(result_register());
661 __ CallStub(&stub, condition->test_id());
662 __ test(result_register(), result_register());
664 Split(
not_zero, if_true, if_false, fall_through);
671 Label* fall_through) {
672 if (if_false == fall_through) {
674 }
else if (if_true == fall_through) {
683 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
684 ASSERT(var->IsStackAllocated());
688 if (var->IsParameter()) {
693 return Operand(
ebp, offset);
697 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
698 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
699 if (var->IsContextSlot()) {
701 __ LoadContext(scratch, context_chain_length);
704 return StackOperand(var);
709 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
710 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
712 __ mov(dest, location);
716 void FullCodeGenerator::SetVar(Variable* var,
720 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
721 ASSERT(!scratch0.is(src));
722 ASSERT(!scratch0.is(scratch1));
723 ASSERT(!scratch1.is(src));
724 MemOperand location = VarOperand(var, scratch0);
725 __ mov(location, src);
728 if (var->IsContextSlot()) {
736 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
737 bool should_normalize,
746 if (should_normalize)
__ jmp(&skip, Label::kNear);
747 PrepareForBailout(expr,
TOS_REG);
748 if (should_normalize) {
749 __ cmp(
eax, isolate()->factory()->true_value());
756 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
759 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
760 if (generate_debug_code_) {
763 __ cmp(
ebx, isolate()->factory()->with_context_map());
764 __ Check(
not_equal,
"Declaration in with context.");
765 __ cmp(
ebx, isolate()->factory()->catch_context_map());
766 __ Check(
not_equal,
"Declaration in catch context.");
771 void FullCodeGenerator::VisitVariableDeclaration(
772 VariableDeclaration* declaration) {
776 VariableProxy* proxy = declaration->proxy();
778 Variable* variable = proxy->var();
780 switch (variable->location()) {
782 globals_->
Add(variable->name(),
zone());
783 globals_->
Add(variable->binding_needs_init()
784 ? isolate()->factory()->the_hole_value()
785 : isolate()->factory()->undefined_value(),
zone());
791 Comment cmnt(masm_,
"[ VariableDeclaration");
792 __ mov(StackOperand(variable),
793 Immediate(isolate()->factory()->the_hole_value()));
799 Comment cmnt(masm_,
"[ VariableDeclaration");
800 EmitDebugCheckDeclarationContext(variable);
802 Immediate(isolate()->factory()->the_hole_value()));
809 Comment cmnt(masm_,
"[ VariableDeclaration");
811 __ push(Immediate(variable->name()));
822 __ push(Immediate(isolate()->factory()->the_hole_value()));
826 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
833 void FullCodeGenerator::VisitFunctionDeclaration(
834 FunctionDeclaration* declaration) {
835 VariableProxy* proxy = declaration->proxy();
836 Variable* variable = proxy->var();
837 switch (variable->location()) {
839 globals_->
Add(variable->name(),
zone());
840 Handle<SharedFunctionInfo>
function =
843 if (
function.is_null())
return SetStackOverflow();
844 globals_->
Add(
function,
zone());
850 Comment cmnt(masm_,
"[ FunctionDeclaration");
851 VisitForAccumulatorValue(declaration->fun());
852 __ mov(StackOperand(variable), result_register());
857 Comment cmnt(masm_,
"[ FunctionDeclaration");
858 EmitDebugCheckDeclarationContext(variable);
859 VisitForAccumulatorValue(declaration->fun());
862 __ RecordWriteContextSlot(
esi,
874 Comment cmnt(masm_,
"[ FunctionDeclaration");
876 __ push(Immediate(variable->name()));
878 VisitForStackValue(declaration->fun());
879 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
886 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
887 VariableProxy* proxy = declaration->proxy();
888 Variable* variable = proxy->var();
889 Handle<JSModule> instance = declaration->module()->interface()->Instance();
890 ASSERT(!instance.is_null());
892 switch (variable->location()) {
894 Comment cmnt(masm_,
"[ ModuleDeclaration");
895 globals_->
Add(variable->name(),
zone());
896 globals_->
Add(instance,
zone());
897 Visit(declaration->module());
902 Comment cmnt(masm_,
"[ ModuleDeclaration");
903 EmitDebugCheckDeclarationContext(variable);
905 Visit(declaration->module());
917 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
918 VariableProxy* proxy = declaration->proxy();
919 Variable* variable = proxy->var();
920 switch (variable->location()) {
926 Comment cmnt(masm_,
"[ ImportDeclaration");
927 EmitDebugCheckDeclarationContext(variable);
940 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
945 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray>
pairs) {
948 __ push(Immediate(pairs));
950 __ CallRuntime(Runtime::kDeclareGlobals, 3);
955 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
956 Comment cmnt(masm_,
"[ SwitchStatement");
957 Breakable nested_statement(
this, stmt);
958 SetStatementPosition(stmt);
961 VisitForStackValue(stmt->tag());
964 ZoneList<CaseClause*>* clauses = stmt->cases();
965 CaseClause* default_clause =
NULL;
969 for (
int i = 0; i < clauses->length(); i++) {
970 CaseClause* clause = clauses->at(i);
971 clause->body_target()->Unuse();
974 if (clause->is_default()) {
975 default_clause = clause;
979 Comment cmnt(masm_,
"[ Case comparison");
984 VisitForAccumulatorValue(clause->label());
988 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
989 JumpPatchSite patch_site(masm_);
990 if (inline_smi_code) {
994 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
999 __ jmp(clause->body_target());
1000 __ bind(&slow_case);
1004 SetSourcePosition(clause->position());
1006 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1007 patch_site.EmitPatchInfo();
1011 __ jmp(clause->body_target());
1016 __ bind(&next_test);
1018 if (default_clause ==
NULL) {
1019 __ jmp(nested_statement.break_label());
1021 __ jmp(default_clause->body_target());
1025 for (
int i = 0; i < clauses->length(); i++) {
1026 Comment cmnt(masm_,
"[ Case body");
1027 CaseClause* clause = clauses->at(i);
1028 __ bind(clause->body_target());
1029 PrepareForBailoutForId(clause->EntryId(),
NO_REGISTERS);
1030 VisitStatements(clause->statements());
1033 __ bind(nested_statement.break_label());
1038 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1039 Comment cmnt(masm_,
"[ ForInStatement");
1040 SetStatementPosition(stmt);
1043 ForIn loop_statement(
this, stmt);
1044 increment_loop_depth();
1049 VisitForAccumulatorValue(stmt->enumerable());
1050 __ cmp(
eax, isolate()->factory()->undefined_value());
1052 __ cmp(
eax, isolate()->factory()->null_value());
1055 PrepareForBailoutForId(stmt->PrepareId(),
TOS_REG);
1058 Label convert, done_convert;
1059 __ JumpIfSmi(
eax, &convert, Label::kNear);
1065 __ bind(&done_convert);
1069 Label call_runtime, use_cache, fixed_array;
1078 __ CheckEnumCache(&call_runtime);
1081 __ jmp(&use_cache, Label::kNear);
1084 __ bind(&call_runtime);
1086 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1088 isolate()->factory()->meta_map());
1093 Label no_descriptors;
1094 __ bind(&use_cache);
1100 __ LoadInstanceDescriptors(
eax, ecx);
1111 __ bind(&no_descriptors);
1112 __ add(
esp, Immediate(kPointerSize));
1117 __ bind(&fixed_array);
1119 Handle<JSGlobalPropertyCell> cell =
1120 isolate()->factory()->NewJSGlobalPropertyCell(
1123 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1124 __ LoadHeapObject(
ebx, cell);
1129 __ mov(ecx, Operand(
esp, 0 * kPointerSize));
1134 __ bind(&non_proxy);
1144 __ mov(
eax, Operand(
esp, 0 * kPointerSize));
1145 __ cmp(
eax, Operand(
esp, 1 * kPointerSize));
1149 __ mov(
ebx, Operand(
esp, 2 * kPointerSize));
1154 __ mov(
edx, Operand(
esp, 3 * kPointerSize));
1159 __ mov(ecx, Operand(
esp, 4 * kPointerSize));
1161 __ j(
equal, &update_each, Label::kNear);
1167 __ j(
zero, &update_each);
1176 __ j(
equal, loop_statement.continue_label());
1181 __ bind(&update_each);
1182 __ mov(result_register(),
ebx);
1184 { EffectContext context(
this);
1185 EmitAssignment(stmt->each());
1189 Visit(stmt->body());
1193 __ bind(loop_statement.continue_label());
1196 EmitStackCheck(stmt, &loop);
1200 __ bind(loop_statement.break_label());
1201 __ add(
esp, Immediate(5 * kPointerSize));
1206 decrement_loop_depth();
1210 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1218 if (!FLAG_always_opt &&
1219 !FLAG_prepare_always_opt &&
1221 scope()->is_function_scope() &&
1222 info->num_literals() == 0) {
1223 FastNewClosureStub stub(info->language_mode());
1224 __ push(Immediate(info));
1228 __ push(Immediate(info));
1229 __ push(Immediate(pretenure
1230 ? isolate()->factory()->true_value()
1231 : isolate()->factory()->false_value()));
1232 __ CallRuntime(Runtime::kNewClosure, 3);
1234 context()->Plug(
eax);
1238 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1239 Comment cmnt(masm_,
"[ VariableProxy");
1240 EmitVariableLoad(expr);
1244 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1247 Register context =
esi;
1248 Register temp =
edx;
1252 if (s->num_heap_slots() > 0) {
1253 if (s->calls_non_strict_eval()) {
1267 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope())
break;
1268 s = s->outer_scope();
1271 if (s !=
NULL && s->is_eval_scope()) {
1275 if (!context.is(temp)) {
1276 __ mov(temp, context);
1281 Immediate(isolate()->factory()->native_context_map()));
1282 __ j(
equal, &fast, Label::kNear);
1295 __ mov(ecx, var->name());
1296 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1298 ? RelocInfo::CODE_TARGET
1299 : RelocInfo::CODE_TARGET_CONTEXT;
1304 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1306 ASSERT(var->IsContextSlot());
1307 Register context =
esi;
1308 Register temp =
ebx;
1310 for (Scope* s = scope(); s != var->scope(); s = s->
outer_scope()) {
1311 if (s->num_heap_slots() > 0) {
1312 if (s->calls_non_strict_eval()) {
1334 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1344 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1347 Variable* local = var->local_if_not_shadowed();
1348 __ mov(
eax, ContextSlotOperandCheckExtensions(local, slow));
1349 if (local->mode() ==
CONST ||
1351 local->mode() ==
LET) {
1352 __ cmp(
eax, isolate()->factory()->the_hole_value());
1354 if (local->mode() ==
CONST) {
1355 __ mov(
eax, isolate()->factory()->undefined_value());
1357 __ push(Immediate(var->name()));
1358 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1366 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1368 SetSourcePosition(proxy->position());
1369 Variable* var = proxy->var();
1373 switch (var->location()) {
1375 Comment cmnt(masm_,
"Global variable");
1379 __ mov(ecx, var->name());
1380 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1381 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1382 context()->Plug(
eax);
1389 Comment cmnt(masm_, var->IsContextSlot()
1390 ?
"Context variable"
1391 :
"Stack variable");
1392 if (var->binding_needs_init()) {
1416 bool skip_init_check;
1418 skip_init_check =
false;
1421 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1422 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1423 skip_init_check = var->mode() !=
CONST &&
1424 var->initializer_position() < proxy->position();
1427 if (!skip_init_check) {
1431 __ cmp(
eax, isolate()->factory()->the_hole_value());
1436 __ push(Immediate(var->name()));
1437 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1441 __ mov(
eax, isolate()->factory()->undefined_value());
1444 context()->Plug(
eax);
1448 context()->Plug(var);
1458 Comment cmnt(masm_,
"Lookup variable");
1460 __ push(Immediate(var->name()));
1461 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1463 context()->Plug(
eax);
1470 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1471 Comment cmnt(masm_,
"[ RegExpLiteral");
1480 int literal_offset =
1483 __ cmp(
ebx, isolate()->factory()->undefined_value());
1490 __ push(Immediate(expr->pattern()));
1491 __ push(Immediate(expr->flags()));
1492 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1495 __ bind(&materialized);
1497 Label allocated, runtime_allocate;
1501 __ bind(&runtime_allocate);
1504 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1507 __ bind(&allocated);
1516 if ((size % (2 * kPointerSize)) != 0) {
1520 context()->Plug(
eax);
1524 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1525 if (expression ==
NULL) {
1526 __ push(Immediate(isolate()->factory()->null_value()));
1528 VisitForStackValue(expression);
1533 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1534 Comment cmnt(masm_,
"[ ObjectLiteral");
1535 Handle<FixedArray> constant_properties = expr->constant_properties();
1539 __ push(Immediate(constant_properties));
1540 int flags = expr->fast_elements()
1543 flags |= expr->has_function()
1547 int properties_count = constant_properties->length() / 2;
1548 if (expr->depth() > 1) {
1549 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1552 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1554 FastCloneShallowObjectStub stub(properties_count);
1560 bool result_saved =
false;
1565 expr->CalculateEmitStore(
zone());
1567 AccessorTable accessor_table(
zone());
1568 for (
int i = 0; i < expr->properties()->length(); i++) {
1569 ObjectLiteral::Property*
property = expr->properties()->at(i);
1570 if (property->IsCompileTimeValue())
continue;
1572 Literal* key =
property->key();
1573 Expression* value =
property->value();
1574 if (!result_saved) {
1576 result_saved =
true;
1578 switch (property->kind()) {
1585 if (key->handle()->IsSymbol()) {
1586 if (property->emit_store()) {
1587 VisitForAccumulatorValue(value);
1588 __ mov(ecx, Immediate(key->handle()));
1590 Handle<Code> ic = is_classic_mode()
1591 ? isolate()->builtins()->StoreIC_Initialize()
1592 : isolate()->builtins()->StoreIC_Initialize_Strict();
1593 CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
1596 VisitForEffect(value);
1602 __ push(Operand(
esp, 0));
1603 VisitForStackValue(key);
1604 VisitForStackValue(value);
1605 if (property->emit_store()) {
1607 __ CallRuntime(Runtime::kSetProperty, 4);
1613 accessor_table.lookup(key)->second->getter = value;
1616 accessor_table.lookup(key)->second->setter = value;
1623 for (AccessorTable::Iterator it = accessor_table.begin();
1624 it != accessor_table.end();
1626 __ push(Operand(
esp, 0));
1627 VisitForStackValue(it->first);
1628 EmitAccessor(it->second->getter);
1629 EmitAccessor(it->second->setter);
1631 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1634 if (expr->has_function()) {
1636 __ push(Operand(
esp, 0));
1637 __ CallRuntime(Runtime::kToFastProperties, 1);
1641 context()->PlugTOS();
1643 context()->Plug(
eax);
1648 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1649 Comment cmnt(masm_,
"[ ArrayLiteral");
1651 ZoneList<Expression*>* subexprs = expr->values();
1652 int length = subexprs->length();
1653 Handle<FixedArray> constant_elements = expr->constant_elements();
1654 ASSERT_EQ(2, constant_elements->length());
1657 bool has_constant_fast_elements =
1659 Handle<FixedArrayBase> constant_elements_values(
1665 __ push(Immediate(constant_elements));
1666 Heap* heap = isolate()->heap();
1667 if (has_constant_fast_elements &&
1668 constant_elements_values->map() == heap->fixed_cow_array_map()) {
1671 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1672 FastCloneShallowArrayStub stub(
1676 }
else if (expr->depth() > 1) {
1677 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1679 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1682 FLAG_smi_only_arrays);
1688 FastCloneShallowArrayStub stub(mode, length);
1692 bool result_saved =
false;
1696 for (
int i = 0; i < length; i++) {
1697 Expression* subexpr = subexprs->at(i);
1700 if (subexpr->AsLiteral() !=
NULL ||
1705 if (!result_saved) {
1707 result_saved =
true;
1709 VisitForAccumulatorValue(subexpr);
1720 __ RecordWriteField(
ebx, offset, result_register(), ecx,
1722 EMIT_REMEMBERED_SET,
1730 StoreArrayLiteralElementStub stub;
1734 PrepareForBailoutForId(expr->GetIdForElement(i),
NO_REGISTERS);
1738 context()->PlugTOS();
1740 context()->Plug(
eax);
1745 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1746 Comment cmnt(masm_,
"[ Assignment");
1749 if (!expr->target()->IsValidLeftHandSide()) {
1750 VisitForEffect(expr->target());
1756 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1757 LhsKind assign_type = VARIABLE;
1758 Property*
property = expr->target()->AsProperty();
1759 if (property !=
NULL) {
1760 assign_type = (
property->key()->IsPropertyName())
1766 switch (assign_type) {
1770 case NAMED_PROPERTY:
1771 if (expr->is_compound()) {
1773 VisitForStackValue(property->obj());
1776 VisitForStackValue(property->obj());
1779 case KEYED_PROPERTY: {
1780 if (expr->is_compound()) {
1781 VisitForStackValue(property->obj());
1782 VisitForStackValue(property->key());
1783 __ mov(
edx, Operand(
esp, kPointerSize));
1784 __ mov(ecx, Operand(
esp, 0));
1786 VisitForStackValue(property->obj());
1787 VisitForStackValue(property->key());
1795 if (expr->is_compound()) {
1796 AccumulatorValueContext result_context(
this);
1797 { AccumulatorValueContext left_operand_context(
this);
1798 switch (assign_type) {
1800 EmitVariableLoad(expr->target()->AsVariableProxy());
1801 PrepareForBailout(expr->target(),
TOS_REG);
1803 case NAMED_PROPERTY:
1804 EmitNamedPropertyLoad(property);
1805 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1807 case KEYED_PROPERTY:
1808 EmitKeyedPropertyLoad(property);
1809 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1816 VisitForAccumulatorValue(expr->value());
1818 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1821 SetSourcePosition(expr->position() + 1);
1822 if (ShouldInlineSmiCase(op)) {
1823 EmitInlineSmiBinaryOp(expr->binary_operation(),
1829 EmitBinaryOp(expr->binary_operation(), op, mode);
1833 PrepareForBailout(expr->binary_operation(),
TOS_REG);
1835 VisitForAccumulatorValue(expr->value());
1839 SetSourcePosition(expr->position());
1842 switch (assign_type) {
1844 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1846 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
1847 context()->Plug(
eax);
1849 case NAMED_PROPERTY:
1850 EmitNamedPropertyAssignment(expr);
1852 case KEYED_PROPERTY:
1853 EmitKeyedPropertyAssignment(expr);
1859 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1860 SetSourcePosition(prop->position());
1861 Literal* key = prop->key()->AsLiteral();
1862 ASSERT(!key->handle()->IsSmi());
1863 __ mov(ecx, Immediate(key->handle()));
1864 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1865 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1869 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1870 SetSourcePosition(prop->position());
1871 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1872 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1876 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1880 Expression* right) {
1883 Label smi_case, done, stub_call;
1887 JumpPatchSite patch_site(masm_);
1888 patch_site.EmitJumpIfSmi(
eax, &smi_case, Label::kNear);
1890 __ bind(&stub_call);
1892 BinaryOpStub stub(op, mode);
1893 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
1894 expr->BinaryOperationFeedbackId());
1895 patch_site.EmitPatchInfo();
1896 __ jmp(&done, Label::kNear);
1915 __ cmp(
eax, 0xc0000000);
1919 __ bind(&result_ok);
1928 __ test(
eax, Immediate(0xc0000000));
1932 __ bind(&result_ok);
1958 case Token::BIT_AND:
1961 case Token::BIT_XOR:
1969 context()->Plug(
eax);
1973 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1977 BinaryOpStub stub(op, mode);
1978 JumpPatchSite patch_site(masm_);
1979 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
1980 expr->BinaryOperationFeedbackId());
1981 patch_site.EmitPatchInfo();
1982 context()->Plug(
eax);
1986 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1989 if (!expr->IsValidLeftHandSide()) {
1990 VisitForEffect(expr);
1996 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1997 LhsKind assign_type = VARIABLE;
1998 Property* prop = expr->AsProperty();
2000 assign_type = (prop->key()->IsPropertyName())
2005 switch (assign_type) {
2007 Variable* var = expr->AsVariableProxy()->var();
2008 EffectContext context(
this);
2009 EmitVariableAssignment(var, Token::ASSIGN);
2012 case NAMED_PROPERTY: {
2014 VisitForAccumulatorValue(prop->obj());
2017 __ mov(ecx, prop->key()->AsLiteral()->handle());
2018 Handle<Code> ic = is_classic_mode()
2019 ? isolate()->builtins()->StoreIC_Initialize()
2020 : isolate()->builtins()->StoreIC_Initialize_Strict();
2024 case KEYED_PROPERTY: {
2026 VisitForStackValue(prop->obj());
2027 VisitForAccumulatorValue(prop->key());
2031 Handle<Code> ic = is_classic_mode()
2032 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2033 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2038 context()->Plug(
eax);
2042 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2044 if (var->IsUnallocated()) {
2046 __ mov(ecx, var->name());
2048 Handle<Code> ic = is_classic_mode()
2049 ? isolate()->builtins()->StoreIC_Initialize()
2050 : isolate()->builtins()->StoreIC_Initialize_Strict();
2051 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2053 }
else if (op == Token::INIT_CONST) {
2055 ASSERT(!var->IsParameter());
2056 if (var->IsStackLocal()) {
2058 __ mov(
edx, StackOperand(var));
2059 __ cmp(
edx, isolate()->factory()->the_hole_value());
2061 __ mov(StackOperand(var),
eax);
2064 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2072 __ push(Immediate(var->name()));
2073 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2076 }
else if (var->mode() ==
LET && op != Token::INIT_LET) {
2078 if (var->IsLookupSlot()) {
2081 __ push(Immediate(var->name()));
2083 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2085 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2088 __ mov(
edx, location);
2089 __ cmp(
edx, isolate()->factory()->the_hole_value());
2091 __ push(Immediate(var->name()));
2092 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2094 __ mov(location,
eax);
2095 if (var->IsContextSlot()) {
2098 __ RecordWriteContextSlot(ecx, offset,
edx,
ebx, kDontSaveFPRegs);
2102 }
else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2105 if (var->IsStackAllocated() || var->IsContextSlot()) {
2107 if (generate_debug_code_ && op == Token::INIT_LET) {
2109 __ mov(
edx, location);
2110 __ cmp(
edx, isolate()->factory()->the_hole_value());
2111 __ Check(
equal,
"Let binding re-initialization.");
2114 __ mov(location,
eax);
2115 if (var->IsContextSlot()) {
2118 __ RecordWriteContextSlot(ecx, offset,
edx,
ebx, kDontSaveFPRegs);
2121 ASSERT(var->IsLookupSlot());
2124 __ push(Immediate(var->name()));
2126 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2133 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2138 Property* prop = expr->target()->AsProperty();
2143 SetSourcePosition(expr->position());
2144 __ mov(ecx, prop->key()->AsLiteral()->handle());
2146 Handle<Code> ic = is_classic_mode()
2147 ? isolate()->builtins()->StoreIC_Initialize()
2148 : isolate()->builtins()->StoreIC_Initialize_Strict();
2149 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2151 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2152 context()->Plug(
eax);
2156 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2165 SetSourcePosition(expr->position());
2166 Handle<Code> ic = is_classic_mode()
2167 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2168 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2169 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2171 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2172 context()->Plug(
eax);
2176 void FullCodeGenerator::VisitProperty(Property* expr) {
2177 Comment cmnt(masm_,
"[ Property");
2178 Expression* key = expr->key();
2180 if (key->IsPropertyName()) {
2181 VisitForAccumulatorValue(expr->obj());
2182 __ mov(
edx, result_register());
2183 EmitNamedPropertyLoad(expr);
2184 PrepareForBailoutForId(expr->LoadId(),
TOS_REG);
2185 context()->Plug(
eax);
2187 VisitForStackValue(expr->obj());
2188 VisitForAccumulatorValue(expr->key());
2190 __ mov(ecx, result_register());
2191 EmitKeyedPropertyLoad(expr);
2192 context()->Plug(
eax);
2197 void FullCodeGenerator::CallIC(Handle<Code>
code,
2198 RelocInfo::Mode rmode,
2199 TypeFeedbackId ast_id) {
2201 __ call(code, rmode, ast_id);
2207 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2208 Handle<Object> name,
2209 RelocInfo::Mode mode) {
2211 ZoneList<Expression*>* args = expr->arguments();
2212 int arg_count = args->length();
2213 { PreservePositionScope scope(masm()->positions_recorder());
2214 for (
int i = 0; i < arg_count; i++) {
2215 VisitForStackValue(args->at(i));
2217 __ Set(ecx, Immediate(name));
2220 SetSourcePosition(expr->position());
2222 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2223 CallIC(ic, mode, expr->CallFeedbackId());
2224 RecordJSReturnSite(expr);
2227 context()->Plug(
eax);
2231 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2234 VisitForAccumulatorValue(key);
2243 ZoneList<Expression*>* args = expr->arguments();
2244 int arg_count = args->length();
2245 { PreservePositionScope scope(masm()->positions_recorder());
2246 for (
int i = 0; i < arg_count; i++) {
2247 VisitForStackValue(args->at(i));
2251 SetSourcePosition(expr->position());
2253 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2254 __ mov(ecx, Operand(
esp, (arg_count + 1) * kPointerSize));
2255 CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
2256 RecordJSReturnSite(expr);
2259 context()->DropAndPlug(1,
eax);
2265 ZoneList<Expression*>* args = expr->arguments();
2266 int arg_count = args->length();
2267 { PreservePositionScope scope(masm()->positions_recorder());
2268 for (
int i = 0; i < arg_count; i++) {
2269 VisitForStackValue(args->at(i));
2273 SetSourcePosition(expr->position());
2277 Handle<Object> uninitialized =
2279 Handle<JSGlobalPropertyCell> cell =
2280 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2281 RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2284 CallFunctionStub stub(arg_count, flags);
2285 __ mov(
edi, Operand(
esp, (arg_count + 1) * kPointerSize));
2286 __ CallStub(&stub, expr->CallFeedbackId());
2288 RecordJSReturnSite(expr);
2291 context()->DropAndPlug(1,
eax);
2295 void FullCodeGenerator::EmitResolvePossiblyDirectEval(
int arg_count) {
2297 if (arg_count > 0) {
2298 __ push(Operand(
esp, arg_count * kPointerSize));
2300 __ push(Immediate(isolate()->factory()->undefined_value()));
2312 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2316 void FullCodeGenerator::VisitCall(Call* expr) {
2320 expr->return_is_recorded_ =
false;
2323 Comment cmnt(masm_,
"[ Call");
2324 Expression* callee = expr->expression();
2325 VariableProxy* proxy = callee->AsVariableProxy();
2326 Property*
property = callee->AsProperty();
2328 if (proxy !=
NULL && proxy->var()->is_possibly_eval()) {
2332 ZoneList<Expression*>* args = expr->arguments();
2333 int arg_count = args->length();
2334 { PreservePositionScope pos_scope(masm()->positions_recorder());
2335 VisitForStackValue(callee);
2337 __ push(Immediate(isolate()->factory()->undefined_value()));
2339 for (
int i = 0; i < arg_count; i++) {
2340 VisitForStackValue(args->at(i));
2345 __ push(Operand(
esp, (arg_count + 1) * kPointerSize));
2346 EmitResolvePossiblyDirectEval(arg_count);
2350 __ mov(Operand(
esp, (arg_count + 0) * kPointerSize),
edx);
2351 __ mov(Operand(
esp, (arg_count + 1) * kPointerSize),
eax);
2354 SetSourcePosition(expr->position());
2356 __ mov(
edi, Operand(
esp, (arg_count + 1) * kPointerSize));
2358 RecordJSReturnSite(expr);
2361 context()->DropAndPlug(1,
eax);
2363 }
else if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
2366 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2368 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
2371 { PreservePositionScope scope(masm()->positions_recorder());
2379 __ push(context_register());
2380 __ push(Immediate(proxy->name()));
2381 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2387 if (done.is_linked()) {
2389 __ jmp(&call, Label::kNear);
2395 __ push(Immediate(isolate()->factory()->the_hole_value()));
2404 }
else if (property !=
NULL) {
2405 { PreservePositionScope scope(masm()->positions_recorder());
2406 VisitForStackValue(property->obj());
2408 if (property->key()->IsPropertyName()) {
2409 EmitCallWithIC(expr,
2410 property->key()->AsLiteral()->handle(),
2411 RelocInfo::CODE_TARGET);
2413 EmitKeyedCallWithIC(expr, property->key());
2418 { PreservePositionScope scope(masm()->positions_recorder());
2419 VisitForStackValue(callee);
2430 ASSERT(expr->return_is_recorded_);
2435 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2436 Comment cmnt(masm_,
"[ CallNew");
2444 VisitForStackValue(expr->expression());
2447 ZoneList<Expression*>* args = expr->arguments();
2448 int arg_count = args->length();
2449 for (
int i = 0; i < arg_count; i++) {
2450 VisitForStackValue(args->at(i));
2455 SetSourcePosition(expr->position());
2458 __ Set(
eax, Immediate(arg_count));
2459 __ mov(
edi, Operand(
esp, arg_count * kPointerSize));
2462 Handle<Object> uninitialized =
2464 Handle<JSGlobalPropertyCell> cell =
2465 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2466 RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2470 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2471 PrepareForBailoutForId(expr->ReturnId(),
TOS_REG);
2472 context()->Plug(
eax);
2476 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2477 ZoneList<Expression*>* args = expr->arguments();
2478 ASSERT(args->length() == 1);
2480 VisitForAccumulatorValue(args->at(0));
2482 Label materialize_true, materialize_false;
2483 Label* if_true =
NULL;
2484 Label* if_false =
NULL;
2485 Label* fall_through =
NULL;
2486 context()->PrepareTest(&materialize_true, &materialize_false,
2487 &if_true, &if_false, &fall_through);
2489 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2491 Split(
zero, if_true, if_false, fall_through);
2493 context()->Plug(if_true, if_false);
2497 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2498 ZoneList<Expression*>* args = expr->arguments();
2499 ASSERT(args->length() == 1);
2501 VisitForAccumulatorValue(args->at(0));
2503 Label materialize_true, materialize_false;
2504 Label* if_true =
NULL;
2505 Label* if_false =
NULL;
2506 Label* fall_through =
NULL;
2507 context()->PrepareTest(&materialize_true, &materialize_false,
2508 &if_true, &if_false, &fall_through);
2510 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2512 Split(
zero, if_true, if_false, fall_through);
2514 context()->Plug(if_true, if_false);
2518 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2519 ZoneList<Expression*>* args = expr->arguments();
2520 ASSERT(args->length() == 1);
2522 VisitForAccumulatorValue(args->at(0));
2524 Label materialize_true, materialize_false;
2525 Label* if_true =
NULL;
2526 Label* if_false =
NULL;
2527 Label* fall_through =
NULL;
2528 context()->PrepareTest(&materialize_true, &materialize_false,
2529 &if_true, &if_false, &fall_through);
2531 __ JumpIfSmi(
eax, if_false);
2532 __ cmp(
eax, isolate()->factory()->null_value());
2543 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2544 Split(
below_equal, if_true, if_false, fall_through);
2546 context()->Plug(if_true, if_false);
2550 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2551 ZoneList<Expression*>* args = expr->arguments();
2552 ASSERT(args->length() == 1);
2554 VisitForAccumulatorValue(args->at(0));
2556 Label materialize_true, materialize_false;
2557 Label* if_true =
NULL;
2558 Label* if_false =
NULL;
2559 Label* fall_through =
NULL;
2560 context()->PrepareTest(&materialize_true, &materialize_false,
2561 &if_true, &if_false, &fall_through);
2563 __ JumpIfSmi(
eax, if_false);
2565 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2566 Split(
above_equal, if_true, if_false, fall_through);
2568 context()->Plug(if_true, if_false);
2572 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2573 ZoneList<Expression*>* args = expr->arguments();
2574 ASSERT(args->length() == 1);
2576 VisitForAccumulatorValue(args->at(0));
2578 Label materialize_true, materialize_false;
2579 Label* if_true =
NULL;
2580 Label* if_false =
NULL;
2581 Label* fall_through =
NULL;
2582 context()->PrepareTest(&materialize_true, &materialize_false,
2583 &if_true, &if_false, &fall_through);
2585 __ JumpIfSmi(
eax, if_false);
2589 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2590 Split(
not_zero, if_true, if_false, fall_through);
2592 context()->Plug(if_true, if_false);
2596 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2597 CallRuntime* expr) {
2598 ZoneList<Expression*>* args = expr->arguments();
2599 ASSERT(args->length() == 1);
2601 VisitForAccumulatorValue(args->at(0));
2603 Label materialize_true, materialize_false;
2604 Label* if_true =
NULL;
2605 Label* if_false =
NULL;
2606 Label* fall_through =
NULL;
2607 context()->PrepareTest(&materialize_true, &materialize_false,
2608 &if_true, &if_false, &fall_through);
2610 __ AssertNotSmi(
eax);
2628 Label entry, loop, done;
2631 __ NumberOfOwnDescriptors(ecx,
ebx);
2635 __ LoadInstanceDescriptors(
ebx,
ebx);
2666 __ JumpIfSmi(ecx, if_false);
2681 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2682 context()->Plug(if_true, if_false);
2686 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2687 ZoneList<Expression*>* args = expr->arguments();
2688 ASSERT(args->length() == 1);
2690 VisitForAccumulatorValue(args->at(0));
2692 Label materialize_true, materialize_false;
2693 Label* if_true =
NULL;
2694 Label* if_false =
NULL;
2695 Label* fall_through =
NULL;
2696 context()->PrepareTest(&materialize_true, &materialize_false,
2697 &if_true, &if_false, &fall_through);
2699 __ JumpIfSmi(
eax, if_false);
2701 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2702 Split(
equal, if_true, if_false, fall_through);
2704 context()->Plug(if_true, if_false);
2708 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2709 ZoneList<Expression*>* args = expr->arguments();
2710 ASSERT(args->length() == 1);
2712 VisitForAccumulatorValue(args->at(0));
2714 Label materialize_true, materialize_false;
2715 Label* if_true =
NULL;
2716 Label* if_false =
NULL;
2717 Label* fall_through =
NULL;
2718 context()->PrepareTest(&materialize_true, &materialize_false,
2719 &if_true, &if_false, &fall_through);
2721 __ JumpIfSmi(
eax, if_false);
2723 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2724 Split(
equal, if_true, if_false, fall_through);
2726 context()->Plug(if_true, if_false);
2730 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2731 ZoneList<Expression*>* args = expr->arguments();
2732 ASSERT(args->length() == 1);
2734 VisitForAccumulatorValue(args->at(0));
2736 Label materialize_true, materialize_false;
2737 Label* if_true =
NULL;
2738 Label* if_false =
NULL;
2739 Label* fall_through =
NULL;
2740 context()->PrepareTest(&materialize_true, &materialize_false,
2741 &if_true, &if_false, &fall_through);
2743 __ JumpIfSmi(
eax, if_false);
2745 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2746 Split(
equal, if_true, if_false, fall_through);
2748 context()->Plug(if_true, if_false);
2753 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2754 ASSERT(expr->arguments()->length() == 0);
2756 Label materialize_true, materialize_false;
2757 Label* if_true =
NULL;
2758 Label* if_false =
NULL;
2759 Label* fall_through =
NULL;
2760 context()->PrepareTest(&materialize_true, &materialize_false,
2761 &if_true, &if_false, &fall_through);
2767 Label check_frame_marker;
2774 __ bind(&check_frame_marker);
2777 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2778 Split(
equal, if_true, if_false, fall_through);
2780 context()->Plug(if_true, if_false);
2784 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2785 ZoneList<Expression*>* args = expr->arguments();
2786 ASSERT(args->length() == 2);
2789 VisitForStackValue(args->at(0));
2790 VisitForAccumulatorValue(args->at(1));
2792 Label materialize_true, materialize_false;
2793 Label* if_true =
NULL;
2794 Label* if_false =
NULL;
2795 Label* fall_through =
NULL;
2796 context()->PrepareTest(&materialize_true, &materialize_false,
2797 &if_true, &if_false, &fall_through);
2801 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2802 Split(
equal, if_true, if_false, fall_through);
2804 context()->Plug(if_true, if_false);
2808 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2809 ZoneList<Expression*>* args = expr->arguments();
2810 ASSERT(args->length() == 1);
2814 VisitForAccumulatorValue(args->at(0));
2819 context()->Plug(
eax);
2823 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2824 ASSERT(expr->arguments()->length() == 0);
2842 context()->Plug(
eax);
2846 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2847 ZoneList<Expression*>* args = expr->arguments();
2848 ASSERT(args->length() == 1);
2849 Label done, null,
function, non_function_constructor;
2851 VisitForAccumulatorValue(args->at(0));
2854 __ JumpIfSmi(
eax, &null);
2888 __ mov(
eax, isolate()->factory()->function_class_symbol());
2892 __ bind(&non_function_constructor);
2893 __ mov(
eax, isolate()->factory()->Object_symbol());
2898 __ mov(
eax, isolate()->factory()->null_value());
2903 context()->Plug(
eax);
2907 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2915 ZoneList<Expression*>* args = expr->arguments();
2918 VisitForStackValue(args->at(1));
2919 VisitForStackValue(args->at(2));
2920 __ CallRuntime(Runtime::kLog, 2);
2923 __ mov(
eax, isolate()->factory()->undefined_value());
2924 context()->Plug(
eax);
2928 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2929 ASSERT(expr->arguments()->length() == 0);
2931 Label slow_allocate_heapnumber;
2932 Label heapnumber_allocated;
2934 __ AllocateHeapNumber(
edi,
ebx, ecx, &slow_allocate_heapnumber);
2935 __ jmp(&heapnumber_allocated);
2937 __ bind(&slow_allocate_heapnumber);
2939 __ CallRuntime(Runtime::kNumberAlloc, 0);
2942 __ bind(&heapnumber_allocated);
2944 __ PrepareCallCFunction(1,
ebx);
2948 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2955 CpuFeatures::Scope fscope(
SSE2);
2956 __ mov(
ebx, Immediate(0x49800000));
2966 Immediate(0x41300000));
2975 context()->Plug(
eax);
2979 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
2982 ZoneList<Expression*>* args = expr->arguments();
2983 ASSERT(args->length() == 3);
2984 VisitForStackValue(args->at(0));
2985 VisitForStackValue(args->at(1));
2986 VisitForStackValue(args->at(2));
2988 context()->Plug(
eax);
2992 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
2994 RegExpExecStub stub;
2995 ZoneList<Expression*>* args = expr->arguments();
2996 ASSERT(args->length() == 4);
2997 VisitForStackValue(args->at(0));
2998 VisitForStackValue(args->at(1));
2999 VisitForStackValue(args->at(2));
3000 VisitForStackValue(args->at(3));
3002 context()->Plug(
eax);
3006 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3007 ZoneList<Expression*>* args = expr->arguments();
3008 ASSERT(args->length() == 1);
3010 VisitForAccumulatorValue(args->at(0));
3014 __ JumpIfSmi(
eax, &done, Label::kNear);
3021 context()->Plug(
eax);
3025 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3026 ZoneList<Expression*>* args = expr->arguments();
3027 ASSERT(args->length() == 2);
3029 Smi* index =
Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3031 VisitForAccumulatorValue(args->at(0));
3033 Label runtime, done, not_date_object;
3034 Register
object =
eax;
3035 Register result =
eax;
3036 Register scratch =
ecx;
3038 __ JumpIfSmi(
object, ¬_date_object);
3042 if (index->value() == 0) {
3047 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3048 __ mov(scratch, Operand::StaticVariable(stamp));
3052 kPointerSize * index->value()));
3056 __ PrepareCallCFunction(2, scratch);
3057 __ mov(Operand(
esp, 0),
object);
3058 __ mov(Operand(
esp, 1 * kPointerSize), Immediate(index));
3059 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3063 __ bind(¬_date_object);
3064 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3066 context()->Plug(result);
3070 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3072 ZoneList<Expression*>* args = expr->arguments();
3073 ASSERT(args->length() == 2);
3074 VisitForStackValue(args->at(0));
3075 VisitForStackValue(args->at(1));
3081 __ CallRuntime(Runtime::kMath_pow, 2);
3083 context()->Plug(
eax);
3087 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3088 ZoneList<Expression*>* args = expr->arguments();
3089 ASSERT(args->length() == 2);
3091 VisitForStackValue(args->at(0));
3092 VisitForAccumulatorValue(args->at(1));
3097 __ JumpIfSmi(
ebx, &done, Label::kNear);
3112 context()->Plug(
eax);
3116 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3117 ZoneList<Expression*>* args = expr->arguments();
3121 VisitForStackValue(args->at(0));
3123 NumberToStringStub stub;
3125 context()->Plug(
eax);
3129 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3130 ZoneList<Expression*>* args = expr->arguments();
3131 ASSERT(args->length() == 1);
3133 VisitForAccumulatorValue(args->at(0));
3140 NopRuntimeCallHelper call_helper;
3141 generator.GenerateSlow(masm_, call_helper);
3144 context()->Plug(
ebx);
3148 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3149 ZoneList<Expression*>* args = expr->arguments();
3150 ASSERT(args->length() == 2);
3152 VisitForStackValue(args->at(0));
3153 VisitForAccumulatorValue(args->at(1));
3155 Register
object =
ebx;
3156 Register index =
eax;
3157 Register result =
edx;
3161 Label need_conversion;
3162 Label index_out_of_range;
3164 StringCharCodeAtGenerator
generator(
object,
3169 &index_out_of_range,
3174 __ bind(&index_out_of_range);
3177 __ Set(result, Immediate(isolate()->factory()->nan_value()));
3180 __ bind(&need_conversion);
3183 __ Set(result, Immediate(isolate()->factory()->undefined_value()));
3186 NopRuntimeCallHelper call_helper;
3187 generator.GenerateSlow(masm_, call_helper);
3190 context()->Plug(result);
3194 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3195 ZoneList<Expression*>* args = expr->arguments();
3196 ASSERT(args->length() == 2);
3198 VisitForStackValue(args->at(0));
3199 VisitForAccumulatorValue(args->at(1));
3201 Register
object =
ebx;
3202 Register index =
eax;
3203 Register scratch =
edx;
3204 Register result =
eax;
3208 Label need_conversion;
3209 Label index_out_of_range;
3217 &index_out_of_range,
3222 __ bind(&index_out_of_range);
3225 __ Set(result, Immediate(isolate()->factory()->empty_string()));
3228 __ bind(&need_conversion);
3234 NopRuntimeCallHelper call_helper;
3235 generator.GenerateSlow(masm_, call_helper);
3238 context()->Plug(result);
3242 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3243 ZoneList<Expression*>* args = expr->arguments();
3246 VisitForStackValue(args->at(0));
3247 VisitForStackValue(args->at(1));
3251 context()->Plug(
eax);
3255 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3256 ZoneList<Expression*>* args = expr->arguments();
3259 VisitForStackValue(args->at(0));
3260 VisitForStackValue(args->at(1));
3262 StringCompareStub stub;
3264 context()->Plug(
eax);
3268 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3272 ZoneList<Expression*>* args = expr->arguments();
3273 ASSERT(args->length() == 1);
3274 VisitForStackValue(args->at(0));
3276 context()->Plug(
eax);
3280 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3284 ZoneList<Expression*>* args = expr->arguments();
3285 ASSERT(args->length() == 1);
3286 VisitForStackValue(args->at(0));
3288 context()->Plug(
eax);
3292 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3296 ZoneList<Expression*>* args = expr->arguments();
3297 ASSERT(args->length() == 1);
3298 VisitForStackValue(args->at(0));
3300 context()->Plug(
eax);
3304 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3308 ZoneList<Expression*>* args = expr->arguments();
3309 ASSERT(args->length() == 1);
3310 VisitForStackValue(args->at(0));
3312 context()->Plug(
eax);
3316 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3318 ZoneList<Expression*>* args = expr->arguments();
3319 ASSERT(args->length() == 1);
3320 VisitForStackValue(args->at(0));
3321 __ CallRuntime(Runtime::kMath_sqrt, 1);
3322 context()->Plug(
eax);
3326 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3327 ZoneList<Expression*>* args = expr->arguments();
3328 ASSERT(args->length() >= 2);
3330 int arg_count = args->length() - 2;
3331 for (
int i = 0; i < arg_count + 1; ++i) {
3332 VisitForStackValue(args->at(i));
3334 VisitForAccumulatorValue(args->last());
3336 Label runtime, done;
3338 __ JumpIfSmi(
eax, &runtime);
3343 __ mov(
edi, result_register());
3344 ParameterCount count(arg_count);
3352 __ CallRuntime(Runtime::kCall, args->length());
3355 context()->Plug(
eax);
3359 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3361 RegExpConstructResultStub stub;
3362 ZoneList<Expression*>* args = expr->arguments();
3363 ASSERT(args->length() == 3);
3364 VisitForStackValue(args->at(0));
3365 VisitForStackValue(args->at(1));
3366 VisitForStackValue(args->at(2));
3368 context()->Plug(
eax);
3372 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3373 ZoneList<Expression*>* args = expr->arguments();
3377 int cache_id =
Smi::cast(*(args->at(0)->AsLiteral()->handle()))->
value();
3379 Handle<FixedArray> jsfunction_result_caches(
3380 isolate()->native_context()->jsfunction_result_caches());
3381 if (jsfunction_result_caches->length() <= cache_id) {
3382 __ Abort(
"Attempt to use undefined cache.");
3383 __ mov(
eax, isolate()->factory()->undefined_value());
3384 context()->Plug(
eax);
3388 VisitForAccumulatorValue(args->at(1));
3391 Register cache =
ebx;
3400 Label done, not_found;
3410 __ bind(¬_found);
3414 __ CallRuntime(Runtime::kGetFromCache, 2);
3417 context()->Plug(
eax);
3421 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3422 ZoneList<Expression*>* args = expr->arguments();
3425 Register right =
eax;
3426 Register left =
ebx;
3429 VisitForStackValue(args->at(0));
3430 VisitForAccumulatorValue(args->at(1));
3433 Label done, fail, ok;
3434 __ cmp(left, right);
3438 __ and_(tmp, right);
3439 __ JumpIfSmi(tmp, &fail);
3449 __ mov(
eax, Immediate(isolate()->factory()->false_value()));
3452 __ mov(
eax, Immediate(isolate()->factory()->true_value()));
3455 context()->Plug(
eax);
3459 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3460 ZoneList<Expression*>* args = expr->arguments();
3461 ASSERT(args->length() == 1);
3463 VisitForAccumulatorValue(args->at(0));
3465 __ AssertString(
eax);
3467 Label materialize_true, materialize_false;
3468 Label* if_true =
NULL;
3469 Label* if_false =
NULL;
3470 Label* fall_through =
NULL;
3471 context()->PrepareTest(&materialize_true, &materialize_false,
3472 &if_true, &if_false, &fall_through);
3476 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3477 Split(
zero, if_true, if_false, fall_through);
3479 context()->Plug(if_true, if_false);
3483 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3484 ZoneList<Expression*>* args = expr->arguments();
3485 ASSERT(args->length() == 1);
3486 VisitForAccumulatorValue(args->at(0));
3488 __ AssertString(
eax);
3493 context()->Plug(
eax);
3497 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3498 Label bailout, done, one_char_separator, long_separator,
3499 non_trivial_array, not_size_one_array, loop,
3500 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3502 ZoneList<Expression*>* args = expr->arguments();
3503 ASSERT(args->length() == 2);
3505 VisitForStackValue(args->at(1));
3507 VisitForAccumulatorValue(args->at(0));
3509 Register array =
eax;
3510 Register elements =
no_reg;
3512 Register index =
edx;
3514 Register string_length =
ecx;
3516 Register
string =
esi;
3518 Register scratch =
ebx;
3520 Register array_length =
edi;
3521 Register result_pos =
no_reg;
3524 Operand separator_operand = Operand(
esp, 2 * kPointerSize);
3525 Operand result_operand = Operand(
esp, 1 * kPointerSize);
3526 Operand array_length_operand = Operand(
esp, 0);
3527 __ sub(
esp, Immediate(2 * kPointerSize));
3530 __ JumpIfSmi(array, &bailout);
3535 __ CheckFastElements(scratch, &bailout);
3539 __ SmiUntag(array_length);
3541 __ mov(result_operand, isolate()->factory()->empty_string());
3545 __ bind(&non_trivial_array);
3546 __ mov(array_length_operand, array_length);
3557 __ Set(index, Immediate(0));
3558 __ Set(string_length, Immediate(0));
3562 if (generate_debug_code_) {
3563 __ cmp(index, array_length);
3564 __ Assert(
less,
"No empty arrays here in EmitFastAsciiArrayJoin");
3571 __ JumpIfSmi(
string, &bailout);
3574 __ and_(scratch, Immediate(
3578 __ add(string_length,
3581 __ add(index, Immediate(1));
3582 __ cmp(index, array_length);
3586 __ cmp(array_length, 1);
3589 __ mov(result_operand, scratch);
3592 __ bind(¬_size_one_array);
3595 result_pos = array_length;
3603 __ mov(
string, separator_operand);
3604 __ JumpIfSmi(
string, &bailout);
3607 __ and_(scratch, Immediate(
3614 __ mov(scratch, separator_operand);
3616 __ sub(string_length, scratch);
3617 __ imul(scratch, array_length_operand);
3619 __ add(string_length, scratch);
3622 __ shr(string_length, 1);
3626 __ AllocateAsciiString(result_pos, string_length, scratch,
3627 index,
string, &bailout);
3628 __ mov(result_operand, result_pos);
3632 __ mov(
string, separator_operand);
3635 __ j(
equal, &one_char_separator);
3640 __ mov(index, Immediate(0));
3641 __ jmp(&loop_1_condition);
3654 __ mov(string_length,
3656 __ shr(string_length, 1);
3659 __ CopyBytes(
string, result_pos, string_length, scratch);
3660 __ add(index, Immediate(1));
3661 __ bind(&loop_1_condition);
3662 __ cmp(index, array_length_operand);
3669 __ bind(&one_char_separator);
3672 __ mov_b(separator_operand, scratch);
3674 __ Set(index, Immediate(0));
3677 __ jmp(&loop_2_entry);
3686 __ mov_b(scratch, separator_operand);
3687 __ mov_b(Operand(result_pos, 0), scratch);
3690 __ bind(&loop_2_entry);
3695 __ mov(string_length,
3697 __ shr(string_length, 1);
3700 __ CopyBytes(
string, result_pos, string_length, scratch);
3701 __ add(index, Immediate(1));
3703 __ cmp(index, array_length_operand);
3709 __ bind(&long_separator);
3711 __ Set(index, Immediate(0));
3714 __ jmp(&loop_3_entry);
3723 __ mov(
string, separator_operand);
3724 __ mov(string_length,
3726 __ shr(string_length, 1);
3729 __ CopyBytes(
string, result_pos, string_length, scratch);
3731 __ bind(&loop_3_entry);
3736 __ mov(string_length,
3738 __ shr(string_length, 1);
3741 __ CopyBytes(
string, result_pos, string_length, scratch);
3742 __ add(index, Immediate(1));
3744 __ cmp(index, array_length_operand);
3750 __ mov(result_operand, isolate()->factory()->undefined_value());
3752 __ mov(
eax, result_operand);
3754 __ add(
esp, Immediate(3 * kPointerSize));
3757 context()->Plug(
eax);
3761 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3762 Handle<String> name = expr->name();
3763 if (name->length() > 0 && name->Get(0) ==
'_') {
3764 Comment cmnt(masm_,
"[ InlineRuntimeCall");
3765 EmitInlineRuntimeCall(expr);
3769 Comment cmnt(masm_,
"[ CallRuntime");
3770 ZoneList<Expression*>* args = expr->arguments();
3772 if (expr->is_jsruntime()) {
3779 int arg_count = args->length();
3780 for (
int i = 0; i < arg_count; i++) {
3781 VisitForStackValue(args->at(i));
3784 if (expr->is_jsruntime()) {
3786 __ Set(ecx, Immediate(expr->name()));
3787 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3789 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3790 CallIC(ic, mode, expr->CallRuntimeFeedbackId());
3795 __ CallRuntime(expr->function(), arg_count);
3797 context()->Plug(
eax);
3801 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3802 switch (expr->op()) {
3803 case Token::DELETE: {
3804 Comment cmnt(masm_,
"[ UnaryOperation (DELETE)");
3805 Property*
property = expr->expression()->AsProperty();
3806 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3808 if (property !=
NULL) {
3809 VisitForStackValue(property->obj());
3810 VisitForStackValue(property->key());
3815 context()->Plug(
eax);
3816 }
else if (proxy !=
NULL) {
3817 Variable* var = proxy->var();
3821 if (var->IsUnallocated()) {
3823 __ push(Immediate(var->name()));
3826 context()->Plug(
eax);
3827 }
else if (var->IsStackAllocated() || var->IsContextSlot()) {
3831 context()->Plug(var->is_this());
3835 __ push(context_register());
3836 __ push(Immediate(var->name()));
3837 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3838 context()->Plug(
eax);
3843 VisitForEffect(expr->expression());
3844 context()->Plug(
true);
3850 Comment cmnt(masm_,
"[ UnaryOperation (VOID)");
3851 VisitForEffect(expr->expression());
3852 context()->Plug(isolate()->factory()->undefined_value());
3857 Comment cmnt(masm_,
"[ UnaryOperation (NOT)");
3858 if (context()->IsEffect()) {
3861 VisitForEffect(expr->expression());
3862 }
else if (context()->IsTest()) {
3865 VisitForControl(expr->expression(),
3866 test->false_label(),
3868 test->fall_through());
3869 context()->Plug(test->true_label(), test->false_label());
3875 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3876 Label materialize_true, materialize_false, done;
3877 VisitForControl(expr->expression(),
3881 __ bind(&materialize_true);
3882 PrepareForBailoutForId(expr->MaterializeTrueId(),
NO_REGISTERS);
3883 if (context()->IsAccumulatorValue()) {
3884 __ mov(
eax, isolate()->factory()->true_value());
3886 __ Push(isolate()->factory()->true_value());
3888 __ jmp(&done, Label::kNear);
3889 __ bind(&materialize_false);
3890 PrepareForBailoutForId(expr->MaterializeFalseId(),
NO_REGISTERS);
3891 if (context()->IsAccumulatorValue()) {
3892 __ mov(
eax, isolate()->factory()->false_value());
3894 __ Push(isolate()->factory()->false_value());
3901 case Token::TYPEOF: {
3902 Comment cmnt(masm_,
"[ UnaryOperation (TYPEOF)");
3903 { StackValueContext context(
this);
3904 VisitForTypeofValue(expr->expression());
3906 __ CallRuntime(Runtime::kTypeof, 1);
3907 context()->Plug(
eax);
3912 Comment cmt(masm_,
"[ UnaryOperation (ADD)");
3913 VisitForAccumulatorValue(expr->expression());
3914 Label no_conversion;
3915 __ JumpIfSmi(result_register(), &no_conversion);
3916 ToNumberStub convert_stub;
3917 __ CallStub(&convert_stub);
3918 __ bind(&no_conversion);
3919 context()->Plug(result_register());
3924 EmitUnaryOperation(expr,
"[ UnaryOperation (SUB)");
3927 case Token::BIT_NOT:
3928 EmitUnaryOperation(expr,
"[ UnaryOperation (BIT_NOT)");
3937 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3938 const char* comment) {
3939 Comment cmt(masm_, comment);
3940 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3943 UnaryOpStub stub(expr->op(), overwrite);
3946 VisitForAccumulatorValue(expr->expression());
3947 SetSourcePosition(expr->position());
3948 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
3949 expr->UnaryOperationFeedbackId());
3950 context()->Plug(
eax);
3954 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3955 Comment cmnt(masm_,
"[ CountOperation");
3956 SetSourcePosition(expr->position());
3960 if (!expr->expression()->IsValidLeftHandSide()) {
3961 VisitForEffect(expr->expression());
3967 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3968 LhsKind assign_type = VARIABLE;
3969 Property* prop = expr->expression()->AsProperty();
3974 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3978 if (assign_type == VARIABLE) {
3979 ASSERT(expr->expression()->AsVariableProxy()->var() !=
NULL);
3980 AccumulatorValueContext context(
this);
3981 EmitVariableLoad(expr->expression()->AsVariableProxy());
3984 if (expr->is_postfix() && !context()->IsEffect()) {
3987 if (assign_type == NAMED_PROPERTY) {
3989 VisitForAccumulatorValue(prop->obj());
3992 EmitNamedPropertyLoad(prop);
3994 VisitForStackValue(prop->obj());
3995 VisitForStackValue(prop->key());
3996 __ mov(
edx, Operand(
esp, kPointerSize));
3997 __ mov(ecx, Operand(
esp, 0));
3998 EmitKeyedPropertyLoad(prop);
4004 if (assign_type == VARIABLE) {
4005 PrepareForBailout(expr->expression(),
TOS_REG);
4007 PrepareForBailoutForId(prop->LoadId(),
TOS_REG);
4011 Label no_conversion;
4012 if (ShouldInlineSmiCase(expr->op())) {
4013 __ JumpIfSmi(
eax, &no_conversion, Label::kNear);
4015 ToNumberStub convert_stub;
4016 __ CallStub(&convert_stub);
4017 __ bind(&no_conversion);
4020 if (expr->is_postfix()) {
4021 if (!context()->IsEffect()) {
4025 switch (assign_type) {
4029 case NAMED_PROPERTY:
4030 __ mov(Operand(
esp, kPointerSize),
eax);
4032 case KEYED_PROPERTY:
4033 __ mov(Operand(
esp, 2 * kPointerSize),
eax);
4040 Label done, stub_call;
4041 JumpPatchSite patch_site(masm_);
4043 if (ShouldInlineSmiCase(expr->op())) {
4044 if (expr->op() == Token::INC) {
4052 patch_site.EmitJumpIfSmi(
eax, &done, Label::kNear);
4054 __ bind(&stub_call);
4056 if (expr->op() == Token::INC) {
4064 SetSourcePosition(expr->position());
4070 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
4071 patch_site.EmitPatchInfo();
4075 switch (assign_type) {
4077 if (expr->is_postfix()) {
4079 { EffectContext context(
this);
4080 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4082 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4087 if (!context()->IsEffect()) {
4088 context()->PlugTOS();
4092 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4094 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4095 context()->Plug(
eax);
4098 case NAMED_PROPERTY: {
4099 __ mov(ecx, prop->key()->AsLiteral()->handle());
4101 Handle<Code> ic = is_classic_mode()
4102 ? isolate()->builtins()->StoreIC_Initialize()
4103 : isolate()->builtins()->StoreIC_Initialize_Strict();
4104 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4105 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4106 if (expr->is_postfix()) {
4107 if (!context()->IsEffect()) {
4108 context()->PlugTOS();
4111 context()->Plug(
eax);
4115 case KEYED_PROPERTY: {
4118 Handle<Code> ic = is_classic_mode()
4119 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4120 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4121 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4122 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4123 if (expr->is_postfix()) {
4125 if (!context()->IsEffect()) {
4126 context()->PlugTOS();
4129 context()->Plug(
eax);
4137 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4138 VariableProxy* proxy = expr->AsVariableProxy();
4139 ASSERT(!context()->IsEffect());
4140 ASSERT(!context()->IsTest());
4142 if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
4143 Comment cmnt(masm_,
"Global variable");
4145 __ mov(ecx, Immediate(proxy->name()));
4146 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4150 PrepareForBailout(expr,
TOS_REG);
4151 context()->Plug(
eax);
4152 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
4157 EmitDynamicLookupFastCase(proxy->var(),
INSIDE_TYPEOF, &slow, &done);
4161 __ push(Immediate(proxy->name()));
4162 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4163 PrepareForBailout(expr,
TOS_REG);
4166 context()->Plug(
eax);
4169 VisitInDuplicateContext(expr);
4174 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4175 Expression* sub_expr,
4176 Handle<String>
check) {
4177 Label materialize_true, materialize_false;
4178 Label* if_true =
NULL;
4179 Label* if_false =
NULL;
4180 Label* fall_through =
NULL;
4181 context()->PrepareTest(&materialize_true, &materialize_false,
4182 &if_true, &if_false, &fall_through);
4184 { AccumulatorValueContext context(
this);
4185 VisitForTypeofValue(sub_expr);
4187 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4189 if (check->Equals(isolate()->heap()->number_symbol())) {
4190 __ JumpIfSmi(
eax, if_true);
4192 isolate()->factory()->heap_number_map());
4193 Split(
equal, if_true, if_false, fall_through);
4194 }
else if (check->Equals(isolate()->heap()->string_symbol())) {
4195 __ JumpIfSmi(
eax, if_false);
4201 Split(
zero, if_true, if_false, fall_through);
4202 }
else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4203 __ cmp(
eax, isolate()->factory()->true_value());
4205 __ cmp(
eax, isolate()->factory()->false_value());
4206 Split(
equal, if_true, if_false, fall_through);
4207 }
else if (FLAG_harmony_typeof &&
4208 check->Equals(isolate()->heap()->null_symbol())) {
4209 __ cmp(
eax, isolate()->factory()->null_value());
4210 Split(
equal, if_true, if_false, fall_through);
4211 }
else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4212 __ cmp(
eax, isolate()->factory()->undefined_value());
4214 __ JumpIfSmi(
eax, if_false);
4219 Split(
not_zero, if_true, if_false, fall_through);
4220 }
else if (check->Equals(isolate()->heap()->function_symbol())) {
4221 __ JumpIfSmi(
eax, if_false);
4226 Split(
equal, if_true, if_false, fall_through);
4227 }
else if (check->Equals(isolate()->heap()->object_symbol())) {
4228 __ JumpIfSmi(
eax, if_false);
4229 if (!FLAG_harmony_typeof) {
4230 __ cmp(
eax, isolate()->factory()->null_value());
4240 Split(
zero, if_true, if_false, fall_through);
4242 if (if_false != fall_through)
__ jmp(if_false);
4244 context()->Plug(if_true, if_false);
4248 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4249 Comment cmnt(masm_,
"[ CompareOperation");
4250 SetSourcePosition(expr->position());
4254 if (TryLiteralCompare(expr))
return;
4258 Label materialize_true, materialize_false;
4259 Label* if_true =
NULL;
4260 Label* if_false =
NULL;
4261 Label* fall_through =
NULL;
4262 context()->PrepareTest(&materialize_true, &materialize_false,
4263 &if_true, &if_false, &fall_through);
4266 VisitForStackValue(expr->left());
4269 VisitForStackValue(expr->right());
4271 PrepareForBailoutBeforeSplit(expr,
false,
NULL,
NULL);
4272 __ cmp(
eax, isolate()->factory()->true_value());
4273 Split(
equal, if_true, if_false, fall_through);
4276 case Token::INSTANCEOF: {
4277 VisitForStackValue(expr->right());
4280 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4283 Split(
zero, if_true, if_false, fall_through);
4288 VisitForAccumulatorValue(expr->right());
4291 case Token::EQ_STRICT:
4308 case Token::INSTANCEOF:
4314 bool inline_smi_code = ShouldInlineSmiCase(op);
4315 JumpPatchSite patch_site(masm_);
4316 if (inline_smi_code) {
4320 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4322 Split(cc, if_true, if_false,
NULL);
4323 __ bind(&slow_case);
4327 SetSourcePosition(expr->position());
4329 CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4330 patch_site.EmitPatchInfo();
4332 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4334 Split(cc, if_true, if_false, fall_through);
4340 context()->Plug(if_true, if_false);
4344 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4345 Expression* sub_expr,
4347 Label materialize_true, materialize_false;
4348 Label* if_true =
NULL;
4349 Label* if_false =
NULL;
4350 Label* fall_through =
NULL;
4351 context()->PrepareTest(&materialize_true, &materialize_false,
4352 &if_true, &if_false, &fall_through);
4354 VisitForAccumulatorValue(sub_expr);
4355 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4356 Handle<Object> nil_value = nil ==
kNullValue ?
4357 isolate()->factory()->null_value() :
4358 isolate()->factory()->undefined_value();
4359 __ cmp(
eax, nil_value);
4360 if (expr->op() == Token::EQ_STRICT) {
4361 Split(
equal, if_true, if_false, fall_through);
4363 Handle<Object> other_nil_value = nil ==
kNullValue ?
4364 isolate()->factory()->undefined_value() :
4365 isolate()->factory()->null_value();
4367 __ cmp(
eax, other_nil_value);
4369 __ JumpIfSmi(
eax, if_false);
4374 Split(
not_zero, if_true, if_false, fall_through);
4376 context()->Plug(if_true, if_false);
4380 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4382 context()->Plug(
eax);
4386 Register FullCodeGenerator::result_register() {
4391 Register FullCodeGenerator::context_register() {
4396 void FullCodeGenerator::StoreToFrameField(
int frame_offset, Register value) {
4398 __ mov(Operand(
ebp, frame_offset), value);
4402 void FullCodeGenerator::LoadContextField(Register dst,
int context_index) {
4407 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4409 if (declaration_scope->is_global_scope() ||
4410 declaration_scope->is_module_scope()) {
4416 }
else if (declaration_scope->is_eval_scope()) {
4422 ASSERT(declaration_scope->is_function_scope());
4431 void FullCodeGenerator::EnterFinallyBlock() {
4442 __ push(result_register());
4445 ExternalReference pending_message_obj =
4446 ExternalReference::address_of_pending_message_obj(isolate());
4447 __ mov(
edx, Operand::StaticVariable(pending_message_obj));
4450 ExternalReference has_pending_message =
4451 ExternalReference::address_of_has_pending_message(isolate());
4452 __ mov(
edx, Operand::StaticVariable(has_pending_message));
4456 ExternalReference pending_message_script =
4457 ExternalReference::address_of_pending_message_script(isolate());
4458 __ mov(
edx, Operand::StaticVariable(pending_message_script));
4463 void FullCodeGenerator::ExitFinallyBlock() {
4467 ExternalReference pending_message_script =
4468 ExternalReference::address_of_pending_message_script(isolate());
4469 __ mov(Operand::StaticVariable(pending_message_script),
edx);
4473 ExternalReference has_pending_message =
4474 ExternalReference::address_of_has_pending_message(isolate());
4475 __ mov(Operand::StaticVariable(has_pending_message),
edx);
4478 ExternalReference pending_message_obj =
4479 ExternalReference::address_of_pending_message_obj(isolate());
4480 __ mov(Operand::StaticVariable(pending_message_obj),
edx);
4483 __ pop(result_register());
4495 #define __ ACCESS_MASM(masm())
4499 int* context_length) {
4506 __ Drop(*stack_depth);
4507 if (*context_length > 0) {
4513 __ call(finally_entry_);
4516 *context_length = 0;
4524 #endif // V8_TARGET_ARCH_IA32
static const int kCallerFPOffset
static const int kLengthOffset
static const int kBitFieldOffset
Scope * DeclarationScope()
const intptr_t kSmiTagMask
VariableDeclaration * function() const
static int SlotOffset(int index)
static const int kFingerOffset
static const int kBuiltinsOffset
static const int kEnumCacheOffset
static String * cast(Object *obj)
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
static const int kDataOffset
static const int kGlobalReceiverOffset
int SizeOfCodeGeneratedSince(Label *label)
Scope * outer_scope() const
static Handle< Object > UninitializedSentinel(Isolate *isolate)
static bool IsSupported(CpuFeature f)
static const int kCallerSPOffset
#define ASSERT(condition)
static const int kMaxBackEdgeWeight
static const int kInObjectFieldCount
static bool IsCompileTimeValue(Expression *expression)
#define POINTER_SIZE_ALIGN(value)
const uint32_t kStringRepresentationMask
static const int kMaximumSlots
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
bool IsOptimizable() const
Variable * parameter(int index) const
MemOperand ContextOperand(Register context, int index)
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
static const int kHashFieldOffset
static const int kLiteralsOffset
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
static const int kValueOffset
Variable * arguments() const
static const int kForInSlowCaseMarker
static const int kFirstOffset
static BailoutId Declarations()
static const int kJSReturnSequenceLength
static const int kForInFastCaseMarker
static const int kStringWrapperSafeForDefaultValueOf
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
bool ShouldSelfOptimize()
static const int kContextOffset
static const int kCacheStampOffset
static TestContext * cast(AstContext *context)
static const int kDescriptorSize
static const int kPropertiesOffset
int num_parameters() const
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
static const int kHeaderSize
static const int kElementsOffset
static const int kContainsCachedArrayIndexMask
static BailoutId FunctionEntry()
const uint32_t kStringTag
bool IsDeclaredVariableMode(VariableMode mode)
friend class NestedStatement
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
static const int kMaxLoopNestingMarker
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
static const int kMapOffset
static const int kValueOffset
static const int kEnumCacheBridgeCacheOffset
const uint32_t kIsNotStringMask
void VisitIllegalRedeclaration(AstVisitor *visitor)
static const int kLengthOffset
static const int kContextOffset
static const int kFunctionOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
Handle< Object > CodeObject()
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
static const int kConstructorOffset
#define ASSERT_NE(v1, v2)
static Operand FixedArrayElementOperand(Register array, Register index_as_smi, int additional_offset=0)
static const int kIsUndetectable
static bool ShouldGenerateLog(Expression *type)
static const int kMaximumClonedProperties
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static const int kPrototypeOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
bool IsImmutableVariableMode(VariableMode mode)
static const int kNativeContextOffset
const uint32_t kAsciiStringTag
static const int kMarkerOffset
static const int kSharedFunctionInfoOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
static FixedArrayBase * cast(Object *object)
static const int kLocal0Offset
static const int kMaxValue
static const int kBitField2Offset
static Handle< Code > GetUninitialized(Token::Value op)
void check(i::Vector< const char > string)
static const int kExponentOffset
static const int kMaximumClonedLength
static const int kValueOffset
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset
static const int kMantissaOffset
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag