30 #if defined(V8_TARGET_ARCH_IA32)
45 #define __ ACCESS_MASM(masm_)
50 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
52 info_emitted_ =
false;
57 ASSERT(patch_site_.is_bound() == info_emitted_);
60 void EmitJumpIfNotSmi(Register reg,
62 Label::Distance distance = Label::kFar) {
67 void EmitJumpIfSmi(Register reg,
69 Label::Distance distance = Label::kFar) {
71 EmitJump(
carry, target, distance);
74 void EmitPatchInfo() {
75 if (patch_site_.is_bound()) {
76 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
78 __ test(
eax, Immediate(delta_to_patch_site));
89 void EmitJump(
Condition cc, Label* target, Label::Distance distance) {
90 ASSERT(!patch_site_.is_bound() && !info_emitted_);
92 __ bind(&patch_site_);
93 __ j(cc, target, distance);
96 MacroAssembler* masm_;
117 void FullCodeGenerator::Generate() {
118 CompilationInfo* info = info_;
120 isolate()->factory()->NewFixedArray(
function()->handler_count(),
TENURED);
121 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
123 SetFunctionPosition(
function());
124 Comment cmnt(masm_,
"[ function compiled by full code generator");
127 if (strlen(FLAG_stop_at) > 0 &&
128 info->function()->name()->IsEqualTo(
CStrVector(FLAG_stop_at))) {
137 if (!info->is_classic_mode() || info->is_native()) {
140 __ j(
zero, &ok, Label::kNear);
142 int receiver_offset = (info->scope()->num_parameters() + 1) *
kPointerSize;
143 __ mov(
ecx, Operand(
esp, receiver_offset));
144 __ JumpIfSmi(
ecx, &ok);
147 __ mov(Operand(
esp, receiver_offset),
148 Immediate(isolate()->factory()->undefined_value()));
155 FrameScope frame_scope(masm_, StackFrame::MANUAL);
162 { Comment cmnt(masm_,
"[ Allocate locals");
163 int locals_count = info->scope()->num_stack_slots();
164 if (locals_count == 1) {
165 __ push(Immediate(isolate()->factory()->undefined_value()));
166 }
else if (locals_count > 1) {
167 __ mov(
eax, Immediate(isolate()->factory()->undefined_value()));
168 for (
int i = 0; i < locals_count; i++) {
174 bool function_in_register =
true;
178 if (heap_slots > 0) {
179 Comment cmnt(masm_,
"[ Allocate local context");
183 FastNewContextStub stub(heap_slots);
186 __ CallRuntime(Runtime::kNewFunctionContext, 1);
188 function_in_register =
false;
194 int num_parameters = info->scope()->num_parameters();
195 for (
int i = 0; i < num_parameters; i++) {
197 if (var->IsContextSlot()) {
201 __ mov(
eax, Operand(
ebp, parameter_offset));
204 __ mov(Operand(
esi, context_offset),
eax);
206 __ RecordWriteContextSlot(
esi,
215 Variable* arguments = scope()->
arguments();
216 if (arguments !=
NULL) {
218 Comment cmnt(masm_,
"[ Allocate arguments object");
219 if (function_in_register) {
225 int num_parameters = info->scope()->num_parameters();
236 if (!is_classic_mode()) {
238 }
else if (
function()->has_duplicate_parameters()) {
243 ArgumentsAccessStub stub(type);
250 __ CallRuntime(Runtime::kTraceEnter, 0);
255 if (scope()->HasIllegalRedeclaration()) {
256 Comment cmnt(masm_,
"[ Declarations");
261 { Comment cmnt(masm_,
"[ Declarations");
264 if (scope()->is_function_scope() && scope()->
function() !=
NULL) {
265 VariableDeclaration*
function = scope()->
function();
266 ASSERT(function->proxy()->var()->mode() ==
CONST ||
269 VisitVariableDeclaration(
function);
271 VisitDeclarations(scope()->declarations());
274 { Comment cmnt(masm_,
"[ Stack check");
277 ExternalReference stack_limit =
278 ExternalReference::address_of_stack_limit(isolate());
279 __ cmp(
esp, Operand::StaticVariable(stack_limit));
286 { Comment cmnt(masm_,
"[ Body");
287 ASSERT(loop_depth() == 0);
288 VisitStatements(
function()->body());
289 ASSERT(loop_depth() == 0);
295 { Comment cmnt(masm_,
"[ return <undefined>;");
296 __ mov(
eax, isolate()->factory()->undefined_value());
297 EmitReturnSequence();
302 void FullCodeGenerator::ClearAccumulator() {
307 void FullCodeGenerator::EmitProfilingCounterDecrement(
int delta) {
308 __ mov(
ebx, Immediate(profiling_counter_));
314 void FullCodeGenerator::EmitProfilingCounterReset() {
315 int reset_value = FLAG_interrupt_budget;
316 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
320 if (isolate()->IsDebuggerActive()) {
324 __ mov(
ebx, Immediate(profiling_counter_));
330 static const int kMaxBackEdgeWeight = 127;
331 static const int kBackEdgeDistanceDivisor = 100;
334 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
335 Label* back_edge_target) {
336 Comment cmnt(masm_,
"[ Stack check");
339 if (FLAG_count_based_interrupts) {
341 if (FLAG_weighted_back_edges) {
342 ASSERT(back_edge_target->is_bound());
344 weight =
Min(kMaxBackEdgeWeight,
345 Max(1, distance / kBackEdgeDistanceDivisor));
347 EmitProfilingCounterDecrement(weight);
355 ExternalReference stack_limit =
356 ExternalReference::address_of_stack_limit(isolate());
357 __ cmp(
esp, Operand::StaticVariable(stack_limit));
366 RecordStackCheck(stmt->OsrEntryId());
375 if (FLAG_count_based_interrupts) {
376 EmitProfilingCounterReset();
384 PrepareForBailoutForId(stmt->OsrEntryId(),
NO_REGISTERS);
388 void FullCodeGenerator::EmitReturnSequence() {
389 Comment cmnt(masm_,
"[ Return sequence");
390 if (return_label_.is_bound()) {
391 __ jmp(&return_label_);
394 __ bind(&return_label_);
397 __ CallRuntime(Runtime::kTraceExit, 1);
399 if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
402 if (info_->ShouldSelfOptimize()) {
403 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
404 }
else if (FLAG_weighted_back_edges) {
406 weight =
Min(kMaxBackEdgeWeight,
407 Max(1, distance / kBackEdgeDistanceDivisor));
409 EmitProfilingCounterDecrement(weight);
413 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
415 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
421 EmitProfilingCounterReset();
426 Label check_exit_codesize;
427 masm_->
bind(&check_exit_codesize);
429 SetSourcePosition(
function()->end_position() - 1);
436 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
437 __ Ret(arguments_bytes,
ecx);
438 #ifdef ENABLE_DEBUGGER_SUPPORT
448 void FullCodeGenerator::EffectContext::Plug(Variable* var)
const {
449 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
453 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var)
const {
454 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
455 codegen()->GetVar(result_register(), var);
459 void FullCodeGenerator::StackValueContext::Plug(Variable* var)
const {
460 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
461 MemOperand operand = codegen()->VarOperand(var, result_register());
467 void FullCodeGenerator::TestContext::Plug(Variable* var)
const {
469 codegen()->GetVar(result_register(), var);
470 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
471 codegen()->DoTest(
this);
480 void FullCodeGenerator::AccumulatorValueContext::Plug(
486 void FullCodeGenerator::StackValueContext::Plug(
497 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit)
const {
501 void FullCodeGenerator::AccumulatorValueContext::Plug(
502 Handle<Object> lit)
const {
504 __ SafeSet(result_register(), Immediate(lit));
506 __ Set(result_register(), Immediate(lit));
511 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit)
const {
513 __ SafePush(Immediate(lit));
515 __ push(Immediate(lit));
520 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit)
const {
521 codegen()->PrepareForBailoutBeforeSplit(condition(),
525 ASSERT(!lit->IsUndetectableObject());
526 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
527 if (false_label_ != fall_through_)
__ jmp(false_label_);
528 }
else if (lit->IsTrue() || lit->IsJSObject()) {
529 if (true_label_ != fall_through_)
__ jmp(true_label_);
530 }
else if (lit->IsString()) {
532 if (false_label_ != fall_through_)
__ jmp(false_label_);
534 if (true_label_ != fall_through_)
__ jmp(true_label_);
536 }
else if (lit->IsSmi()) {
538 if (false_label_ != fall_through_)
__ jmp(false_label_);
540 if (true_label_ != fall_through_)
__ jmp(true_label_);
544 __ mov(result_register(), lit);
545 codegen()->DoTest(
this);
550 void FullCodeGenerator::EffectContext::DropAndPlug(
int count,
551 Register reg)
const {
557 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
559 Register reg)
const {
562 __ Move(result_register(), reg);
566 void FullCodeGenerator::StackValueContext::DropAndPlug(
int count,
567 Register reg)
const {
569 if (count > 1)
__ Drop(count - 1);
570 __ mov(Operand(
esp, 0), reg);
574 void FullCodeGenerator::TestContext::DropAndPlug(
int count,
575 Register reg)
const {
579 __ Move(result_register(), reg);
580 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
581 codegen()->DoTest(
this);
585 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
586 Label* materialize_false)
const {
587 ASSERT(materialize_true == materialize_false);
588 __ bind(materialize_true);
592 void FullCodeGenerator::AccumulatorValueContext::Plug(
593 Label* materialize_true,
594 Label* materialize_false)
const {
596 __ bind(materialize_true);
597 __ mov(result_register(), isolate()->factory()->true_value());
598 __ jmp(&done, Label::kNear);
599 __ bind(materialize_false);
600 __ mov(result_register(), isolate()->factory()->false_value());
605 void FullCodeGenerator::StackValueContext::Plug(
606 Label* materialize_true,
607 Label* materialize_false)
const {
609 __ bind(materialize_true);
610 __ push(Immediate(isolate()->factory()->true_value()));
611 __ jmp(&done, Label::kNear);
612 __ bind(materialize_false);
613 __ push(Immediate(isolate()->factory()->false_value()));
618 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
619 Label* materialize_false)
const {
620 ASSERT(materialize_true == true_label_);
621 ASSERT(materialize_false == false_label_);
625 void FullCodeGenerator::EffectContext::Plug(
bool flag)
const {
629 void FullCodeGenerator::AccumulatorValueContext::Plug(
bool flag)
const {
630 Handle<Object> value = flag
631 ? isolate()->factory()->true_value()
632 : isolate()->factory()->false_value();
633 __ mov(result_register(), value);
637 void FullCodeGenerator::StackValueContext::Plug(
bool flag)
const {
638 Handle<Object> value = flag
639 ? isolate()->factory()->true_value()
640 : isolate()->factory()->false_value();
641 __ push(Immediate(value));
645 void FullCodeGenerator::TestContext::Plug(
bool flag)
const {
646 codegen()->PrepareForBailoutBeforeSplit(condition(),
651 if (true_label_ != fall_through_)
__ jmp(true_label_);
653 if (false_label_ != fall_through_)
__ jmp(false_label_);
658 void FullCodeGenerator::DoTest(Expression* condition,
661 Label* fall_through) {
662 ToBooleanStub stub(result_register());
663 __ push(result_register());
664 __ CallStub(&stub, condition->test_id());
665 __ test(result_register(), result_register());
667 Split(
not_zero, if_true, if_false, fall_through);
674 Label* fall_through) {
675 if (if_false == fall_through) {
677 }
else if (if_true == fall_through) {
686 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
687 ASSERT(var->IsStackAllocated());
691 if (var->IsParameter()) {
692 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
696 return Operand(
ebp, offset);
700 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
701 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
702 if (var->IsContextSlot()) {
704 __ LoadContext(scratch, context_chain_length);
707 return StackOperand(var);
712 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
713 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
715 __ mov(dest, location);
719 void FullCodeGenerator::SetVar(Variable* var,
723 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
724 ASSERT(!scratch0.is(src));
725 ASSERT(!scratch0.is(scratch1));
726 ASSERT(!scratch1.is(src));
727 MemOperand location = VarOperand(var, scratch0);
728 __ mov(location, src);
731 if (var->IsContextSlot()) {
739 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
740 bool should_normalize,
746 if (!context()->IsTest() || !info_->IsOptimizable())
return;
749 if (should_normalize)
__ jmp(&skip, Label::kNear);
750 PrepareForBailout(expr,
TOS_REG);
751 if (should_normalize) {
752 __ cmp(
eax, isolate()->factory()->true_value());
759 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
762 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
763 if (FLAG_debug_code) {
766 __ cmp(
ebx, isolate()->factory()->with_context_map());
767 __ Check(
not_equal,
"Declaration in with context.");
768 __ cmp(
ebx, isolate()->factory()->catch_context_map());
769 __ Check(
not_equal,
"Declaration in catch context.");
774 void FullCodeGenerator::VisitVariableDeclaration(
775 VariableDeclaration* declaration) {
779 VariableProxy* proxy = declaration->proxy();
781 Variable* variable = proxy->var();
783 switch (variable->location()) {
785 globals_->
Add(variable->name(),
zone());
786 globals_->
Add(variable->binding_needs_init()
787 ? isolate()->factory()->the_hole_value()
788 : isolate()->factory()->undefined_value(),
zone());
794 Comment cmnt(masm_,
"[ VariableDeclaration");
795 __ mov(StackOperand(variable),
796 Immediate(isolate()->factory()->the_hole_value()));
802 Comment cmnt(masm_,
"[ VariableDeclaration");
803 EmitDebugCheckDeclarationContext(variable);
805 Immediate(isolate()->factory()->the_hole_value()));
812 Comment cmnt(masm_,
"[ VariableDeclaration");
814 __ push(Immediate(variable->name()));
826 __ push(Immediate(isolate()->factory()->the_hole_value()));
830 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
837 void FullCodeGenerator::VisitFunctionDeclaration(
838 FunctionDeclaration* declaration) {
839 VariableProxy* proxy = declaration->proxy();
840 Variable* variable = proxy->var();
841 switch (variable->location()) {
843 globals_->
Add(variable->name(),
zone());
844 Handle<SharedFunctionInfo>
function =
847 if (
function.is_null())
return SetStackOverflow();
848 globals_->
Add(
function,
zone());
854 Comment cmnt(masm_,
"[ FunctionDeclaration");
855 VisitForAccumulatorValue(declaration->fun());
856 __ mov(StackOperand(variable), result_register());
861 Comment cmnt(masm_,
"[ FunctionDeclaration");
862 EmitDebugCheckDeclarationContext(variable);
863 VisitForAccumulatorValue(declaration->fun());
866 __ RecordWriteContextSlot(
esi,
878 Comment cmnt(masm_,
"[ FunctionDeclaration");
880 __ push(Immediate(variable->name()));
882 VisitForStackValue(declaration->fun());
883 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
890 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
891 VariableProxy* proxy = declaration->proxy();
892 Variable* variable = proxy->var();
893 Handle<JSModule> instance = declaration->module()->interface()->Instance();
894 ASSERT(!instance.is_null());
896 switch (variable->location()) {
898 Comment cmnt(masm_,
"[ ModuleDeclaration");
899 globals_->
Add(variable->name(),
zone());
900 globals_->
Add(instance,
zone());
901 Visit(declaration->module());
906 Comment cmnt(masm_,
"[ ModuleDeclaration");
907 EmitDebugCheckDeclarationContext(variable);
909 Visit(declaration->module());
921 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
922 VariableProxy* proxy = declaration->proxy();
923 Variable* variable = proxy->var();
924 switch (variable->location()) {
930 Comment cmnt(masm_,
"[ ImportDeclaration");
931 EmitDebugCheckDeclarationContext(variable);
944 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
949 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
952 __ push(Immediate(pairs));
954 __ CallRuntime(Runtime::kDeclareGlobals, 3);
959 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
960 Comment cmnt(masm_,
"[ SwitchStatement");
961 Breakable nested_statement(
this, stmt);
962 SetStatementPosition(stmt);
965 VisitForStackValue(stmt->tag());
968 ZoneList<CaseClause*>* clauses = stmt->cases();
969 CaseClause* default_clause =
NULL;
973 for (
int i = 0; i < clauses->length(); i++) {
974 CaseClause* clause = clauses->at(i);
975 clause->body_target()->Unuse();
978 if (clause->is_default()) {
979 default_clause = clause;
983 Comment cmnt(masm_,
"[ Case comparison");
988 VisitForAccumulatorValue(clause->label());
992 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
993 JumpPatchSite patch_site(masm_);
994 if (inline_smi_code) {
998 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
1003 __ jmp(clause->body_target());
1004 __ bind(&slow_case);
1008 SetSourcePosition(clause->position());
1010 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1011 patch_site.EmitPatchInfo();
1015 __ jmp(clause->body_target());
1020 __ bind(&next_test);
1022 if (default_clause ==
NULL) {
1023 __ jmp(nested_statement.break_label());
1025 __ jmp(default_clause->body_target());
1029 for (
int i = 0; i < clauses->length(); i++) {
1030 Comment cmnt(masm_,
"[ Case body");
1031 CaseClause* clause = clauses->at(i);
1032 __ bind(clause->body_target());
1033 PrepareForBailoutForId(clause->EntryId(),
NO_REGISTERS);
1034 VisitStatements(clause->statements());
1037 __ bind(nested_statement.break_label());
1042 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1043 Comment cmnt(masm_,
"[ ForInStatement");
1044 SetStatementPosition(stmt);
1047 ForIn loop_statement(
this, stmt);
1048 increment_loop_depth();
1053 VisitForAccumulatorValue(stmt->enumerable());
1054 __ cmp(
eax, isolate()->factory()->undefined_value());
1056 __ cmp(
eax, isolate()->factory()->null_value());
1059 PrepareForBailoutForId(stmt->PrepareId(),
TOS_REG);
1062 Label convert, done_convert;
1063 __ JumpIfSmi(
eax, &convert, Label::kNear);
1069 __ bind(&done_convert);
1073 Label call_runtime, use_cache, fixed_array;
1082 __ CheckEnumCache(&call_runtime);
1085 __ jmp(&use_cache, Label::kNear);
1088 __ bind(&call_runtime);
1090 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1092 isolate()->factory()->meta_map());
1097 __ bind(&use_cache);
1098 __ LoadInstanceDescriptors(
eax, ecx);
1112 __ bind(&fixed_array);
1114 Handle<JSGlobalPropertyCell> cell =
1115 isolate()->factory()->NewJSGlobalPropertyCell(
1118 RecordTypeFeedbackCell(stmt->PrepareId(), cell);
1119 __ LoadHeapObject(
ebx, cell);
1124 __ mov(ecx, Operand(
esp, 0 * kPointerSize));
1129 __ bind(&non_proxy);
1139 __ mov(
eax, Operand(
esp, 0 * kPointerSize));
1140 __ cmp(
eax, Operand(
esp, 1 * kPointerSize));
1144 __ mov(
ebx, Operand(
esp, 2 * kPointerSize));
1149 __ mov(
edx, Operand(
esp, 3 * kPointerSize));
1154 __ mov(ecx, Operand(
esp, 4 * kPointerSize));
1156 __ j(
equal, &update_each, Label::kNear);
1162 __ j(
zero, &update_each);
1171 __ j(
equal, loop_statement.continue_label());
1176 __ bind(&update_each);
1177 __ mov(result_register(),
ebx);
1179 { EffectContext context(
this);
1180 EmitAssignment(stmt->each());
1184 Visit(stmt->body());
1188 __ bind(loop_statement.continue_label());
1191 EmitStackCheck(stmt, &loop);
1195 __ bind(loop_statement.break_label());
1196 __ add(
esp, Immediate(5 * kPointerSize));
1201 decrement_loop_depth();
1205 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1213 if (!FLAG_always_opt &&
1214 !FLAG_prepare_always_opt &&
1216 scope()->is_function_scope() &&
1217 info->num_literals() == 0) {
1218 FastNewClosureStub stub(info->language_mode());
1219 __ push(Immediate(info));
1223 __ push(Immediate(info));
1224 __ push(Immediate(pretenure
1225 ? isolate()->factory()->true_value()
1226 : isolate()->factory()->false_value()));
1227 __ CallRuntime(Runtime::kNewClosure, 3);
1229 context()->Plug(
eax);
1233 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1234 Comment cmnt(masm_,
"[ VariableProxy");
1235 EmitVariableLoad(expr);
1239 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1242 Register context =
esi;
1243 Register temp =
edx;
1247 if (s->num_heap_slots() > 0) {
1248 if (s->calls_non_strict_eval()) {
1262 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope())
break;
1263 s = s->outer_scope();
1266 if (s !=
NULL && s->is_eval_scope()) {
1270 if (!context.is(temp)) {
1271 __ mov(temp, context);
1276 Immediate(isolate()->factory()->global_context_map()));
1277 __ j(
equal, &fast, Label::kNear);
1290 __ mov(ecx, var->name());
1291 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1293 ? RelocInfo::CODE_TARGET
1294 : RelocInfo::CODE_TARGET_CONTEXT;
1299 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1301 ASSERT(var->IsContextSlot());
1302 Register context =
esi;
1303 Register temp =
ebx;
1305 for (Scope* s = scope(); s != var->scope(); s = s->
outer_scope()) {
1306 if (s->num_heap_slots() > 0) {
1307 if (s->calls_non_strict_eval()) {
1329 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1339 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1342 Variable* local = var->local_if_not_shadowed();
1343 __ mov(
eax, ContextSlotOperandCheckExtensions(local, slow));
1344 if (local->mode() ==
CONST ||
1346 local->mode() ==
LET) {
1347 __ cmp(
eax, isolate()->factory()->the_hole_value());
1349 if (local->mode() ==
CONST) {
1350 __ mov(
eax, isolate()->factory()->undefined_value());
1352 __ push(Immediate(var->name()));
1353 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1361 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1363 SetSourcePosition(proxy->position());
1364 Variable* var = proxy->var();
1368 switch (var->location()) {
1370 Comment cmnt(masm_,
"Global variable");
1374 __ mov(ecx, var->name());
1375 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1376 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1377 context()->Plug(
eax);
1384 Comment cmnt(masm_, var->IsContextSlot()
1385 ?
"Context variable"
1386 :
"Stack variable");
1387 if (var->binding_needs_init()) {
1411 bool skip_init_check;
1413 skip_init_check =
false;
1416 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1417 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1418 skip_init_check = var->mode() !=
CONST &&
1419 var->initializer_position() < proxy->position();
1422 if (!skip_init_check) {
1426 __ cmp(
eax, isolate()->factory()->the_hole_value());
1431 __ push(Immediate(var->name()));
1432 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1436 __ mov(
eax, isolate()->factory()->undefined_value());
1439 context()->Plug(
eax);
1443 context()->Plug(var);
1453 Comment cmnt(masm_,
"Lookup variable");
1455 __ push(Immediate(var->name()));
1456 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1458 context()->Plug(
eax);
1465 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1466 Comment cmnt(masm_,
"[ RegExpLiteral");
1475 int literal_offset =
1478 __ cmp(
ebx, isolate()->factory()->undefined_value());
1485 __ push(Immediate(expr->pattern()));
1486 __ push(Immediate(expr->flags()));
1487 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1490 __ bind(&materialized);
1492 Label allocated, runtime_allocate;
1496 __ bind(&runtime_allocate);
1499 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1502 __ bind(&allocated);
1511 if ((size % (2 * kPointerSize)) != 0) {
1515 context()->Plug(
eax);
1519 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1520 if (expression ==
NULL) {
1521 __ push(Immediate(isolate()->factory()->null_value()));
1523 VisitForStackValue(expression);
1528 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1529 Comment cmnt(masm_,
"[ ObjectLiteral");
1530 Handle<FixedArray> constant_properties = expr->constant_properties();
1534 __ push(Immediate(constant_properties));
1535 int flags = expr->fast_elements()
1538 flags |= expr->has_function()
1542 int properties_count = constant_properties->length() / 2;
1543 if (expr->depth() > 1) {
1544 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1547 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1549 FastCloneShallowObjectStub stub(properties_count);
1555 bool result_saved =
false;
1560 expr->CalculateEmitStore(
zone());
1562 AccessorTable accessor_table(isolate()->
zone());
1563 for (
int i = 0; i < expr->properties()->length(); i++) {
1564 ObjectLiteral::Property*
property = expr->properties()->at(i);
1565 if (property->IsCompileTimeValue())
continue;
1567 Literal* key =
property->key();
1568 Expression* value =
property->value();
1569 if (!result_saved) {
1571 result_saved =
true;
1573 switch (property->kind()) {
1580 if (key->handle()->IsSymbol()) {
1581 if (property->emit_store()) {
1582 VisitForAccumulatorValue(value);
1583 __ mov(ecx, Immediate(key->handle()));
1585 Handle<Code> ic = is_classic_mode()
1586 ? isolate()->builtins()->StoreIC_Initialize()
1587 : isolate()->builtins()->StoreIC_Initialize_Strict();
1588 CallIC(ic, RelocInfo::CODE_TARGET, key->id());
1591 VisitForEffect(value);
1597 __ push(Operand(
esp, 0));
1598 VisitForStackValue(key);
1599 VisitForStackValue(value);
1600 if (property->emit_store()) {
1602 __ CallRuntime(Runtime::kSetProperty, 4);
1608 accessor_table.lookup(key)->second->getter = value;
1611 accessor_table.lookup(key)->second->setter = value;
1618 for (AccessorTable::Iterator it = accessor_table.begin();
1619 it != accessor_table.end();
1621 __ push(Operand(
esp, 0));
1622 VisitForStackValue(it->first);
1623 EmitAccessor(it->second->getter);
1624 EmitAccessor(it->second->setter);
1626 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1629 if (expr->has_function()) {
1631 __ push(Operand(
esp, 0));
1632 __ CallRuntime(Runtime::kToFastProperties, 1);
1636 context()->PlugTOS();
1638 context()->Plug(
eax);
1643 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1644 Comment cmnt(masm_,
"[ ArrayLiteral");
1646 ZoneList<Expression*>* subexprs = expr->values();
1647 int length = subexprs->length();
1648 Handle<FixedArray> constant_elements = expr->constant_elements();
1649 ASSERT_EQ(2, constant_elements->length());
1652 bool has_constant_fast_elements =
1654 Handle<FixedArrayBase> constant_elements_values(
1660 __ push(Immediate(constant_elements));
1661 Heap* heap = isolate()->heap();
1662 if (has_constant_fast_elements &&
1663 constant_elements_values->map() == heap->fixed_cow_array_map()) {
1666 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1667 FastCloneShallowArrayStub stub(
1671 }
else if (expr->depth() > 1) {
1672 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1674 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1677 FLAG_smi_only_arrays);
1683 FastCloneShallowArrayStub stub(mode, length);
1687 bool result_saved =
false;
1691 for (
int i = 0; i < length; i++) {
1692 Expression* subexpr = subexprs->at(i);
1695 if (subexpr->AsLiteral() !=
NULL ||
1700 if (!result_saved) {
1702 result_saved =
true;
1704 VisitForAccumulatorValue(subexpr);
1715 __ RecordWriteField(
ebx, offset, result_register(), ecx,
1717 EMIT_REMEMBERED_SET,
1725 StoreArrayLiteralElementStub stub;
1729 PrepareForBailoutForId(expr->GetIdForElement(i),
NO_REGISTERS);
1733 context()->PlugTOS();
1735 context()->Plug(
eax);
1740 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1741 Comment cmnt(masm_,
"[ Assignment");
1744 if (!expr->target()->IsValidLeftHandSide()) {
1745 VisitForEffect(expr->target());
1751 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1752 LhsKind assign_type = VARIABLE;
1753 Property*
property = expr->target()->AsProperty();
1754 if (property !=
NULL) {
1755 assign_type = (
property->key()->IsPropertyName())
1761 switch (assign_type) {
1765 case NAMED_PROPERTY:
1766 if (expr->is_compound()) {
1768 VisitForStackValue(property->obj());
1771 VisitForStackValue(property->obj());
1774 case KEYED_PROPERTY: {
1775 if (expr->is_compound()) {
1776 VisitForStackValue(property->obj());
1777 VisitForStackValue(property->key());
1778 __ mov(
edx, Operand(
esp, kPointerSize));
1779 __ mov(ecx, Operand(
esp, 0));
1781 VisitForStackValue(property->obj());
1782 VisitForStackValue(property->key());
1790 if (expr->is_compound()) {
1791 AccumulatorValueContext result_context(
this);
1792 { AccumulatorValueContext left_operand_context(
this);
1793 switch (assign_type) {
1795 EmitVariableLoad(expr->target()->AsVariableProxy());
1796 PrepareForBailout(expr->target(),
TOS_REG);
1798 case NAMED_PROPERTY:
1799 EmitNamedPropertyLoad(property);
1800 PrepareForBailoutForId(expr->CompoundLoadId(),
TOS_REG);
1802 case KEYED_PROPERTY:
1803 EmitKeyedPropertyLoad(property);
1804 PrepareForBailoutForId(expr->CompoundLoadId(),
TOS_REG);
1811 VisitForAccumulatorValue(expr->value());
1813 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1816 SetSourcePosition(expr->position() + 1);
1817 if (ShouldInlineSmiCase(op)) {
1818 EmitInlineSmiBinaryOp(expr->binary_operation(),
1824 EmitBinaryOp(expr->binary_operation(), op, mode);
1828 PrepareForBailout(expr->binary_operation(),
TOS_REG);
1830 VisitForAccumulatorValue(expr->value());
1834 SetSourcePosition(expr->position());
1837 switch (assign_type) {
1839 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1841 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
1842 context()->Plug(
eax);
1844 case NAMED_PROPERTY:
1845 EmitNamedPropertyAssignment(expr);
1847 case KEYED_PROPERTY:
1848 EmitKeyedPropertyAssignment(expr);
1854 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1855 SetSourcePosition(prop->position());
1856 Literal* key = prop->key()->AsLiteral();
1857 ASSERT(!key->handle()->IsSmi());
1858 __ mov(ecx, Immediate(key->handle()));
1859 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1860 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1864 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1865 SetSourcePosition(prop->position());
1866 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1867 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1871 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1875 Expression* right) {
1878 Label smi_case, done, stub_call;
1882 JumpPatchSite patch_site(masm_);
1883 patch_site.EmitJumpIfSmi(
eax, &smi_case, Label::kNear);
1885 __ bind(&stub_call);
1887 BinaryOpStub stub(op, mode);
1888 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1889 patch_site.EmitPatchInfo();
1890 __ jmp(&done, Label::kNear);
1909 __ cmp(
eax, 0xc0000000);
1913 __ bind(&result_ok);
1922 __ test(
eax, Immediate(0xc0000000));
1926 __ bind(&result_ok);
1952 case Token::BIT_AND:
1955 case Token::BIT_XOR:
1963 context()->Plug(
eax);
1967 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1971 BinaryOpStub stub(op, mode);
1972 JumpPatchSite patch_site(masm_);
1973 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1974 patch_site.EmitPatchInfo();
1975 context()->Plug(
eax);
1979 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1982 if (!expr->IsValidLeftHandSide()) {
1983 VisitForEffect(expr);
1989 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1990 LhsKind assign_type = VARIABLE;
1991 Property* prop = expr->AsProperty();
1993 assign_type = (prop->key()->IsPropertyName())
1998 switch (assign_type) {
2000 Variable* var = expr->AsVariableProxy()->var();
2001 EffectContext context(
this);
2002 EmitVariableAssignment(var, Token::ASSIGN);
2005 case NAMED_PROPERTY: {
2007 VisitForAccumulatorValue(prop->obj());
2010 __ mov(ecx, prop->key()->AsLiteral()->handle());
2011 Handle<Code> ic = is_classic_mode()
2012 ? isolate()->builtins()->StoreIC_Initialize()
2013 : isolate()->builtins()->StoreIC_Initialize_Strict();
2017 case KEYED_PROPERTY: {
2019 VisitForStackValue(prop->obj());
2020 VisitForAccumulatorValue(prop->key());
2024 Handle<Code> ic = is_classic_mode()
2025 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2026 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2031 context()->Plug(
eax);
2035 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2037 if (var->IsUnallocated()) {
2039 __ mov(ecx, var->name());
2041 Handle<Code> ic = is_classic_mode()
2042 ? isolate()->builtins()->StoreIC_Initialize()
2043 : isolate()->builtins()->StoreIC_Initialize_Strict();
2044 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2046 }
else if (op == Token::INIT_CONST) {
2048 ASSERT(!var->IsParameter());
2049 if (var->IsStackLocal()) {
2051 __ mov(
edx, StackOperand(var));
2052 __ cmp(
edx, isolate()->factory()->the_hole_value());
2054 __ mov(StackOperand(var),
eax);
2057 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2065 __ push(Immediate(var->name()));
2066 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2069 }
else if (var->mode() ==
LET && op != Token::INIT_LET) {
2071 if (var->IsLookupSlot()) {
2074 __ push(Immediate(var->name()));
2076 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2078 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2081 __ mov(
edx, location);
2082 __ cmp(
edx, isolate()->factory()->the_hole_value());
2084 __ push(Immediate(var->name()));
2085 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2087 __ mov(location,
eax);
2088 if (var->IsContextSlot()) {
2091 __ RecordWriteContextSlot(ecx, offset,
edx,
ebx, kDontSaveFPRegs);
2095 }
else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2098 if (var->IsStackAllocated() || var->IsContextSlot()) {
2100 if (FLAG_debug_code && op == Token::INIT_LET) {
2102 __ mov(
edx, location);
2103 __ cmp(
edx, isolate()->factory()->the_hole_value());
2104 __ Check(
equal,
"Let binding re-initialization.");
2107 __ mov(location,
eax);
2108 if (var->IsContextSlot()) {
2111 __ RecordWriteContextSlot(ecx, offset,
edx,
ebx, kDontSaveFPRegs);
2114 ASSERT(var->IsLookupSlot());
2117 __ push(Immediate(var->name()));
2119 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2126 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2131 Property* prop = expr->target()->AsProperty();
2138 if (expr->starts_initialization_block()) {
2139 __ push(result_register());
2140 __ push(Operand(
esp, kPointerSize));
2141 __ CallRuntime(Runtime::kToSlowProperties, 1);
2142 __ pop(result_register());
2146 SetSourcePosition(expr->position());
2147 __ mov(ecx, prop->key()->AsLiteral()->handle());
2148 if (expr->ends_initialization_block()) {
2153 Handle<Code> ic = is_classic_mode()
2154 ? isolate()->builtins()->StoreIC_Initialize()
2155 : isolate()->builtins()->StoreIC_Initialize_Strict();
2156 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2159 if (expr->ends_initialization_block()) {
2161 __ push(Operand(
esp, kPointerSize));
2162 __ CallRuntime(Runtime::kToFastProperties, 1);
2166 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2167 context()->Plug(
eax);
2171 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2180 if (expr->starts_initialization_block()) {
2181 __ push(result_register());
2183 __ push(Operand(
esp, 2 * kPointerSize));
2184 __ CallRuntime(Runtime::kToSlowProperties, 1);
2185 __ pop(result_register());
2189 if (expr->ends_initialization_block()) {
2195 SetSourcePosition(expr->position());
2196 Handle<Code> ic = is_classic_mode()
2197 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2198 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2199 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2202 if (expr->ends_initialization_block()) {
2206 __ CallRuntime(Runtime::kToFastProperties, 1);
2210 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2211 context()->Plug(
eax);
2215 void FullCodeGenerator::VisitProperty(Property* expr) {
2216 Comment cmnt(masm_,
"[ Property");
2217 Expression* key = expr->key();
2219 if (key->IsPropertyName()) {
2220 VisitForAccumulatorValue(expr->obj());
2221 __ mov(
edx, result_register());
2222 EmitNamedPropertyLoad(expr);
2223 context()->Plug(
eax);
2225 VisitForStackValue(expr->obj());
2226 VisitForAccumulatorValue(expr->key());
2228 __ mov(ecx, result_register());
2229 EmitKeyedPropertyLoad(expr);
2230 context()->Plug(
eax);
2235 void FullCodeGenerator::CallIC(Handle<Code> code,
2236 RelocInfo::Mode rmode,
2239 __ call(code, rmode, ast_id);
2245 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2246 Handle<Object>
name,
2247 RelocInfo::Mode mode) {
2249 ZoneList<Expression*>* args = expr->arguments();
2250 int arg_count = args->length();
2251 { PreservePositionScope scope(masm()->positions_recorder());
2252 for (
int i = 0; i < arg_count; i++) {
2253 VisitForStackValue(args->at(i));
2255 __ Set(ecx, Immediate(name));
2258 SetSourcePosition(expr->position());
2260 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2261 CallIC(ic, mode, expr->id());
2262 RecordJSReturnSite(expr);
2265 context()->Plug(
eax);
2269 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2272 VisitForAccumulatorValue(key);
2281 ZoneList<Expression*>* args = expr->arguments();
2282 int arg_count = args->length();
2283 { PreservePositionScope scope(masm()->positions_recorder());
2284 for (
int i = 0; i < arg_count; i++) {
2285 VisitForStackValue(args->at(i));
2289 SetSourcePosition(expr->position());
2291 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2292 __ mov(ecx, Operand(
esp, (arg_count + 1) * kPointerSize));
2293 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2294 RecordJSReturnSite(expr);
2297 context()->DropAndPlug(1,
eax);
2303 ZoneList<Expression*>* args = expr->arguments();
2304 int arg_count = args->length();
2305 { PreservePositionScope scope(masm()->positions_recorder());
2306 for (
int i = 0; i < arg_count; i++) {
2307 VisitForStackValue(args->at(i));
2311 SetSourcePosition(expr->position());
2316 Handle<Object> uninitialized =
2318 Handle<JSGlobalPropertyCell> cell =
2319 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2320 RecordTypeFeedbackCell(expr->id(), cell);
2324 CallFunctionStub stub(arg_count, flags);
2325 __ mov(
edi, Operand(
esp, (arg_count + 1) * kPointerSize));
2326 __ CallStub(&stub, expr->id());
2328 RecordJSReturnSite(expr);
2331 context()->DropAndPlug(1,
eax);
2335 void FullCodeGenerator::EmitResolvePossiblyDirectEval(
int arg_count) {
2337 if (arg_count > 0) {
2338 __ push(Operand(
esp, arg_count * kPointerSize));
2340 __ push(Immediate(isolate()->factory()->undefined_value()));
2344 __ push(Operand(
ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2352 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2356 void FullCodeGenerator::VisitCall(Call* expr) {
2360 expr->return_is_recorded_ =
false;
2363 Comment cmnt(masm_,
"[ Call");
2364 Expression* callee = expr->expression();
2365 VariableProxy* proxy = callee->AsVariableProxy();
2366 Property*
property = callee->AsProperty();
2368 if (proxy !=
NULL && proxy->var()->is_possibly_eval()) {
2372 ZoneList<Expression*>* args = expr->arguments();
2373 int arg_count = args->length();
2374 { PreservePositionScope pos_scope(masm()->positions_recorder());
2375 VisitForStackValue(callee);
2377 __ push(Immediate(isolate()->factory()->undefined_value()));
2379 for (
int i = 0; i < arg_count; i++) {
2380 VisitForStackValue(args->at(i));
2385 __ push(Operand(
esp, (arg_count + 1) * kPointerSize));
2386 EmitResolvePossiblyDirectEval(arg_count);
2390 __ mov(Operand(
esp, (arg_count + 0) * kPointerSize),
edx);
2391 __ mov(Operand(
esp, (arg_count + 1) * kPointerSize),
eax);
2394 SetSourcePosition(expr->position());
2396 __ mov(
edi, Operand(
esp, (arg_count + 1) * kPointerSize));
2398 RecordJSReturnSite(expr);
2401 context()->DropAndPlug(1,
eax);
2403 }
else if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
2406 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2408 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
2411 { PreservePositionScope scope(masm()->positions_recorder());
2419 __ push(context_register());
2420 __ push(Immediate(proxy->name()));
2421 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2427 if (done.is_linked()) {
2429 __ jmp(&call, Label::kNear);
2435 __ push(Immediate(isolate()->factory()->the_hole_value()));
2444 }
else if (property !=
NULL) {
2445 { PreservePositionScope scope(masm()->positions_recorder());
2446 VisitForStackValue(property->obj());
2448 if (property->key()->IsPropertyName()) {
2449 EmitCallWithIC(expr,
2450 property->key()->AsLiteral()->handle(),
2451 RelocInfo::CODE_TARGET);
2453 EmitKeyedCallWithIC(expr, property->key());
2458 { PreservePositionScope scope(masm()->positions_recorder());
2459 VisitForStackValue(callee);
2470 ASSERT(expr->return_is_recorded_);
2475 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2476 Comment cmnt(masm_,
"[ CallNew");
2484 VisitForStackValue(expr->expression());
2487 ZoneList<Expression*>* args = expr->arguments();
2488 int arg_count = args->length();
2489 for (
int i = 0; i < arg_count; i++) {
2490 VisitForStackValue(args->at(i));
2495 SetSourcePosition(expr->position());
2498 __ SafeSet(
eax, Immediate(arg_count));
2499 __ mov(
edi, Operand(
esp, arg_count * kPointerSize));
2505 Handle<Object> uninitialized =
2507 Handle<JSGlobalPropertyCell> cell =
2508 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2509 RecordTypeFeedbackCell(expr->id(), cell);
2515 CallConstructStub stub(flags);
2516 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2517 PrepareForBailoutForId(expr->ReturnId(),
TOS_REG);
2518 context()->Plug(
eax);
2522 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2523 ZoneList<Expression*>* args = expr->arguments();
2524 ASSERT(args->length() == 1);
2526 VisitForAccumulatorValue(args->at(0));
2528 Label materialize_true, materialize_false;
2529 Label* if_true =
NULL;
2530 Label* if_false =
NULL;
2531 Label* fall_through =
NULL;
2532 context()->PrepareTest(&materialize_true, &materialize_false,
2533 &if_true, &if_false, &fall_through);
2535 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2537 Split(
zero, if_true, if_false, fall_through);
2539 context()->Plug(if_true, if_false);
2543 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2544 ZoneList<Expression*>* args = expr->arguments();
2545 ASSERT(args->length() == 1);
2547 VisitForAccumulatorValue(args->at(0));
2549 Label materialize_true, materialize_false;
2550 Label* if_true =
NULL;
2551 Label* if_false =
NULL;
2552 Label* fall_through =
NULL;
2553 context()->PrepareTest(&materialize_true, &materialize_false,
2554 &if_true, &if_false, &fall_through);
2556 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2558 Split(
zero, if_true, if_false, fall_through);
2560 context()->Plug(if_true, if_false);
2564 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2565 ZoneList<Expression*>* args = expr->arguments();
2566 ASSERT(args->length() == 1);
2568 VisitForAccumulatorValue(args->at(0));
2570 Label materialize_true, materialize_false;
2571 Label* if_true =
NULL;
2572 Label* if_false =
NULL;
2573 Label* fall_through =
NULL;
2574 context()->PrepareTest(&materialize_true, &materialize_false,
2575 &if_true, &if_false, &fall_through);
2577 __ JumpIfSmi(
eax, if_false);
2578 __ cmp(
eax, isolate()->factory()->null_value());
2589 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2590 Split(
below_equal, if_true, if_false, fall_through);
2592 context()->Plug(if_true, if_false);
2596 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2597 ZoneList<Expression*>* args = expr->arguments();
2598 ASSERT(args->length() == 1);
2600 VisitForAccumulatorValue(args->at(0));
2602 Label materialize_true, materialize_false;
2603 Label* if_true =
NULL;
2604 Label* if_false =
NULL;
2605 Label* fall_through =
NULL;
2606 context()->PrepareTest(&materialize_true, &materialize_false,
2607 &if_true, &if_false, &fall_through);
2609 __ JumpIfSmi(
eax, if_false);
2611 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2612 Split(
above_equal, if_true, if_false, fall_through);
2614 context()->Plug(if_true, if_false);
2618 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2619 ZoneList<Expression*>* args = expr->arguments();
2620 ASSERT(args->length() == 1);
2622 VisitForAccumulatorValue(args->at(0));
2624 Label materialize_true, materialize_false;
2625 Label* if_true =
NULL;
2626 Label* if_false =
NULL;
2627 Label* fall_through =
NULL;
2628 context()->PrepareTest(&materialize_true, &materialize_false,
2629 &if_true, &if_false, &fall_through);
2631 __ JumpIfSmi(
eax, if_false);
2635 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2636 Split(
not_zero, if_true, if_false, fall_through);
2638 context()->Plug(if_true, if_false);
2642 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2643 CallRuntime* expr) {
2644 ZoneList<Expression*>* args = expr->arguments();
2645 ASSERT(args->length() == 1);
2647 VisitForAccumulatorValue(args->at(0));
2649 Label materialize_true, materialize_false;
2650 Label* if_true =
NULL;
2651 Label* if_false =
NULL;
2652 Label* fall_through =
NULL;
2653 context()->PrepareTest(&materialize_true, &materialize_false,
2654 &if_true, &if_false, &fall_through);
2656 if (FLAG_debug_code)
__ AbortIfSmi(
eax);
2674 __ LoadInstanceDescriptors(
ebx,
ebx);
2695 __ add(
ebx, Immediate(kPointerSize));
2706 __ JumpIfSmi(ecx, if_false);
2721 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2722 context()->Plug(if_true, if_false);
2726 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2727 ZoneList<Expression*>* args = expr->arguments();
2728 ASSERT(args->length() == 1);
2730 VisitForAccumulatorValue(args->at(0));
2732 Label materialize_true, materialize_false;
2733 Label* if_true =
NULL;
2734 Label* if_false =
NULL;
2735 Label* fall_through =
NULL;
2736 context()->PrepareTest(&materialize_true, &materialize_false,
2737 &if_true, &if_false, &fall_through);
2739 __ JumpIfSmi(
eax, if_false);
2741 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2742 Split(
equal, if_true, if_false, fall_through);
2744 context()->Plug(if_true, if_false);
2748 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2749 ZoneList<Expression*>* args = expr->arguments();
2750 ASSERT(args->length() == 1);
2752 VisitForAccumulatorValue(args->at(0));
2754 Label materialize_true, materialize_false;
2755 Label* if_true =
NULL;
2756 Label* if_false =
NULL;
2757 Label* fall_through =
NULL;
2758 context()->PrepareTest(&materialize_true, &materialize_false,
2759 &if_true, &if_false, &fall_through);
2761 __ JumpIfSmi(
eax, if_false);
2763 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2764 Split(
equal, if_true, if_false, fall_through);
2766 context()->Plug(if_true, if_false);
2770 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2771 ZoneList<Expression*>* args = expr->arguments();
2772 ASSERT(args->length() == 1);
2774 VisitForAccumulatorValue(args->at(0));
2776 Label materialize_true, materialize_false;
2777 Label* if_true =
NULL;
2778 Label* if_false =
NULL;
2779 Label* fall_through =
NULL;
2780 context()->PrepareTest(&materialize_true, &materialize_false,
2781 &if_true, &if_false, &fall_through);
2783 __ JumpIfSmi(
eax, if_false);
2785 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2786 Split(
equal, if_true, if_false, fall_through);
2788 context()->Plug(if_true, if_false);
2793 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2794 ASSERT(expr->arguments()->length() == 0);
2796 Label materialize_true, materialize_false;
2797 Label* if_true =
NULL;
2798 Label* if_false =
NULL;
2799 Label* fall_through =
NULL;
2800 context()->PrepareTest(&materialize_true, &materialize_false,
2801 &if_true, &if_false, &fall_through);
2807 Label check_frame_marker;
2814 __ bind(&check_frame_marker);
2817 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2818 Split(
equal, if_true, if_false, fall_through);
2820 context()->Plug(if_true, if_false);
2824 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2825 ZoneList<Expression*>* args = expr->arguments();
2826 ASSERT(args->length() == 2);
2829 VisitForStackValue(args->at(0));
2830 VisitForAccumulatorValue(args->at(1));
2832 Label materialize_true, materialize_false;
2833 Label* if_true =
NULL;
2834 Label* if_false =
NULL;
2835 Label* fall_through =
NULL;
2836 context()->PrepareTest(&materialize_true, &materialize_false,
2837 &if_true, &if_false, &fall_through);
2841 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2842 Split(
equal, if_true, if_false, fall_through);
2844 context()->Plug(if_true, if_false);
2848 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2849 ZoneList<Expression*>* args = expr->arguments();
2850 ASSERT(args->length() == 1);
2854 VisitForAccumulatorValue(args->at(0));
2859 context()->Plug(
eax);
2863 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2864 ASSERT(expr->arguments()->length() == 0);
2881 if (FLAG_debug_code)
__ AbortIfNotSmi(
eax);
2882 context()->Plug(
eax);
2886 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2887 ZoneList<Expression*>* args = expr->arguments();
2888 ASSERT(args->length() == 1);
2889 Label done, null,
function, non_function_constructor;
2891 VisitForAccumulatorValue(args->at(0));
2894 __ JumpIfSmi(
eax, &null);
2928 __ mov(
eax, isolate()->factory()->function_class_symbol());
2932 __ bind(&non_function_constructor);
2933 __ mov(
eax, isolate()->factory()->Object_symbol());
2938 __ mov(
eax, isolate()->factory()->null_value());
2943 context()->Plug(
eax);
2947 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2955 ZoneList<Expression*>* args = expr->arguments();
2958 VisitForStackValue(args->at(1));
2959 VisitForStackValue(args->at(2));
2960 __ CallRuntime(Runtime::kLog, 2);
2963 __ mov(
eax, isolate()->factory()->undefined_value());
2964 context()->Plug(
eax);
2968 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2969 ASSERT(expr->arguments()->length() == 0);
2971 Label slow_allocate_heapnumber;
2972 Label heapnumber_allocated;
2974 __ AllocateHeapNumber(
edi,
ebx, ecx, &slow_allocate_heapnumber);
2975 __ jmp(&heapnumber_allocated);
2977 __ bind(&slow_allocate_heapnumber);
2979 __ CallRuntime(Runtime::kNumberAlloc, 0);
2982 __ bind(&heapnumber_allocated);
2984 __ PrepareCallCFunction(1,
ebx);
2988 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2995 CpuFeatures::Scope fscope(
SSE2);
2996 __ mov(
ebx, Immediate(0x49800000));
3006 Immediate(0x41300000));
3015 context()->Plug(
eax);
3019 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3022 ZoneList<Expression*>* args = expr->arguments();
3023 ASSERT(args->length() == 3);
3024 VisitForStackValue(args->at(0));
3025 VisitForStackValue(args->at(1));
3026 VisitForStackValue(args->at(2));
3028 context()->Plug(
eax);
3032 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3034 RegExpExecStub stub;
3035 ZoneList<Expression*>* args = expr->arguments();
3036 ASSERT(args->length() == 4);
3037 VisitForStackValue(args->at(0));
3038 VisitForStackValue(args->at(1));
3039 VisitForStackValue(args->at(2));
3040 VisitForStackValue(args->at(3));
3042 context()->Plug(
eax);
3046 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3047 ZoneList<Expression*>* args = expr->arguments();
3048 ASSERT(args->length() == 1);
3050 VisitForAccumulatorValue(args->at(0));
3054 __ JumpIfSmi(
eax, &done, Label::kNear);
3061 context()->Plug(
eax);
3065 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3066 ZoneList<Expression*>* args = expr->arguments();
3067 ASSERT(args->length() == 2);
3069 Smi* index =
Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3071 VisitForAccumulatorValue(args->at(0));
3073 Label runtime, done;
3074 Register
object =
eax;
3075 Register result =
eax;
3076 Register scratch =
ecx;
3079 __ AbortIfSmi(
object);
3081 __ Assert(
equal,
"Trying to get date field from non-date.");
3084 if (index->value() == 0) {
3088 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3089 __ mov(scratch, Operand::StaticVariable(stamp));
3093 kPointerSize * index->value()));
3097 __ PrepareCallCFunction(2, scratch);
3098 __ mov(Operand(
esp, 0),
object);
3099 __ mov(Operand(
esp, 1 * kPointerSize), Immediate(index));
3100 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3103 context()->Plug(result);
3107 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3109 ZoneList<Expression*>* args = expr->arguments();
3110 ASSERT(args->length() == 2);
3111 VisitForStackValue(args->at(0));
3112 VisitForStackValue(args->at(1));
3118 __ CallRuntime(Runtime::kMath_pow, 2);
3120 context()->Plug(
eax);
3124 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3125 ZoneList<Expression*>* args = expr->arguments();
3126 ASSERT(args->length() == 2);
3128 VisitForStackValue(args->at(0));
3129 VisitForAccumulatorValue(args->at(1));
3134 __ JumpIfSmi(
ebx, &done, Label::kNear);
3149 context()->Plug(
eax);
3153 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3154 ZoneList<Expression*>* args = expr->arguments();
3158 VisitForStackValue(args->at(0));
3160 NumberToStringStub stub;
3162 context()->Plug(
eax);
3166 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3167 ZoneList<Expression*>* args = expr->arguments();
3168 ASSERT(args->length() == 1);
3170 VisitForAccumulatorValue(args->at(0));
3173 StringCharFromCodeGenerator generator(
eax,
ebx);
3174 generator.GenerateFast(masm_);
3177 NopRuntimeCallHelper call_helper;
3178 generator.GenerateSlow(masm_, call_helper);
3181 context()->Plug(
ebx);
3185 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3186 ZoneList<Expression*>* args = expr->arguments();
3187 ASSERT(args->length() == 2);
3189 VisitForStackValue(args->at(0));
3190 VisitForAccumulatorValue(args->at(1));
3192 Register
object =
ebx;
3193 Register index =
eax;
3194 Register result =
edx;
3198 Label need_conversion;
3199 Label index_out_of_range;
3201 StringCharCodeAtGenerator generator(
object,
3206 &index_out_of_range,
3208 generator.GenerateFast(masm_);
3211 __ bind(&index_out_of_range);
3214 __ Set(result, Immediate(isolate()->factory()->nan_value()));
3217 __ bind(&need_conversion);
3220 __ Set(result, Immediate(isolate()->factory()->undefined_value()));
3223 NopRuntimeCallHelper call_helper;
3224 generator.GenerateSlow(masm_, call_helper);
3227 context()->Plug(result);
3231 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3232 ZoneList<Expression*>* args = expr->arguments();
3233 ASSERT(args->length() == 2);
3235 VisitForStackValue(args->at(0));
3236 VisitForAccumulatorValue(args->at(1));
3238 Register
object =
ebx;
3239 Register index =
eax;
3240 Register scratch =
edx;
3241 Register result =
eax;
3245 Label need_conversion;
3246 Label index_out_of_range;
3248 StringCharAtGenerator generator(
object,
3254 &index_out_of_range,
3256 generator.GenerateFast(masm_);
3259 __ bind(&index_out_of_range);
3262 __ Set(result, Immediate(isolate()->factory()->empty_string()));
3265 __ bind(&need_conversion);
3271 NopRuntimeCallHelper call_helper;
3272 generator.GenerateSlow(masm_, call_helper);
3275 context()->Plug(result);
3279 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3280 ZoneList<Expression*>* args = expr->arguments();
3283 VisitForStackValue(args->at(0));
3284 VisitForStackValue(args->at(1));
3288 context()->Plug(
eax);
3292 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3293 ZoneList<Expression*>* args = expr->arguments();
3296 VisitForStackValue(args->at(0));
3297 VisitForStackValue(args->at(1));
3299 StringCompareStub stub;
3301 context()->Plug(
eax);
3305 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3309 ZoneList<Expression*>* args = expr->arguments();
3310 ASSERT(args->length() == 1);
3311 VisitForStackValue(args->at(0));
3313 context()->Plug(
eax);
3317 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3321 ZoneList<Expression*>* args = expr->arguments();
3322 ASSERT(args->length() == 1);
3323 VisitForStackValue(args->at(0));
3325 context()->Plug(
eax);
3329 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3333 ZoneList<Expression*>* args = expr->arguments();
3334 ASSERT(args->length() == 1);
3335 VisitForStackValue(args->at(0));
3337 context()->Plug(
eax);
3341 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3345 ZoneList<Expression*>* args = expr->arguments();
3346 ASSERT(args->length() == 1);
3347 VisitForStackValue(args->at(0));
3349 context()->Plug(
eax);
3353 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3355 ZoneList<Expression*>* args = expr->arguments();
3356 ASSERT(args->length() == 1);
3357 VisitForStackValue(args->at(0));
3358 __ CallRuntime(Runtime::kMath_sqrt, 1);
3359 context()->Plug(
eax);
3363 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3364 ZoneList<Expression*>* args = expr->arguments();
3365 ASSERT(args->length() >= 2);
3367 int arg_count = args->length() - 2;
3368 for (
int i = 0; i < arg_count + 1; ++i) {
3369 VisitForStackValue(args->at(i));
3371 VisitForAccumulatorValue(args->last());
3379 __ mov(
edi, result_register());
3380 ParameterCount count(arg_count);
3388 __ CallRuntime(Runtime::kCall, args->length());
3391 context()->Plug(
eax);
3395 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3397 RegExpConstructResultStub stub;
3398 ZoneList<Expression*>* args = expr->arguments();
3399 ASSERT(args->length() == 3);
3400 VisitForStackValue(args->at(0));
3401 VisitForStackValue(args->at(1));
3402 VisitForStackValue(args->at(2));
3404 context()->Plug(
eax);
3408 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3409 ZoneList<Expression*>* args = expr->arguments();
3413 int cache_id =
Smi::cast(*(args->at(0)->AsLiteral()->handle()))->
value();
3415 Handle<FixedArray> jsfunction_result_caches(
3416 isolate()->global_context()->jsfunction_result_caches());
3417 if (jsfunction_result_caches->length() <= cache_id) {
3418 __ Abort(
"Attempt to use undefined cache.");
3419 __ mov(
eax, isolate()->factory()->undefined_value());
3420 context()->Plug(
eax);
3424 VisitForAccumulatorValue(args->at(1));
3427 Register cache =
ebx;
3436 Label done, not_found;
3446 __ bind(¬_found);
3450 __ CallRuntime(Runtime::kGetFromCache, 2);
3453 context()->Plug(
eax);
3457 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3458 ZoneList<Expression*>* args = expr->arguments();
3461 Register right =
eax;
3462 Register left =
ebx;
3465 VisitForStackValue(args->at(0));
3466 VisitForAccumulatorValue(args->at(1));
3469 Label done, fail, ok;
3470 __ cmp(left, right);
3474 __ and_(tmp, right);
3475 __ JumpIfSmi(tmp, &fail);
3485 __ mov(
eax, Immediate(isolate()->factory()->false_value()));
3488 __ mov(
eax, Immediate(isolate()->factory()->true_value()));
3491 context()->Plug(
eax);
3495 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3496 ZoneList<Expression*>* args = expr->arguments();
3497 ASSERT(args->length() == 1);
3499 VisitForAccumulatorValue(args->at(0));
3501 if (FLAG_debug_code) {
3502 __ AbortIfNotString(
eax);
3505 Label materialize_true, materialize_false;
3506 Label* if_true =
NULL;
3507 Label* if_false =
NULL;
3508 Label* fall_through =
NULL;
3509 context()->PrepareTest(&materialize_true, &materialize_false,
3510 &if_true, &if_false, &fall_through);
3514 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3515 Split(
zero, if_true, if_false, fall_through);
3517 context()->Plug(if_true, if_false);
3521 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3522 ZoneList<Expression*>* args = expr->arguments();
3523 ASSERT(args->length() == 1);
3524 VisitForAccumulatorValue(args->at(0));
3526 if (FLAG_debug_code) {
3527 __ AbortIfNotString(
eax);
3533 context()->Plug(
eax);
3537 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3538 Label bailout, done, one_char_separator, long_separator,
3539 non_trivial_array, not_size_one_array, loop,
3540 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3542 ZoneList<Expression*>* args = expr->arguments();
3543 ASSERT(args->length() == 2);
3545 VisitForStackValue(args->at(1));
3547 VisitForAccumulatorValue(args->at(0));
3549 Register array =
eax;
3550 Register elements =
no_reg;
3552 Register index =
edx;
3554 Register string_length =
ecx;
3556 Register
string =
esi;
3558 Register scratch =
ebx;
3560 Register array_length =
edi;
3561 Register result_pos =
no_reg;
3564 Operand separator_operand = Operand(
esp, 2 * kPointerSize);
3565 Operand result_operand = Operand(
esp, 1 * kPointerSize);
3566 Operand array_length_operand = Operand(
esp, 0);
3567 __ sub(
esp, Immediate(2 * kPointerSize));
3570 __ JumpIfSmi(array, &bailout);
3575 __ CheckFastElements(scratch, &bailout);
3579 __ SmiUntag(array_length);
3581 __ mov(result_operand, isolate()->factory()->empty_string());
3585 __ bind(&non_trivial_array);
3586 __ mov(array_length_operand, array_length);
3597 __ Set(index, Immediate(0));
3598 __ Set(string_length, Immediate(0));
3602 if (FLAG_debug_code) {
3603 __ cmp(index, array_length);
3604 __ Assert(
less,
"No empty arrays here in EmitFastAsciiArrayJoin");
3611 __ JumpIfSmi(
string, &bailout);
3614 __ and_(scratch, Immediate(
3618 __ add(string_length,
3621 __ add(index, Immediate(1));
3622 __ cmp(index, array_length);
3626 __ cmp(array_length, 1);
3629 __ mov(result_operand, scratch);
3632 __ bind(¬_size_one_array);
3635 result_pos = array_length;
3643 __ mov(
string, separator_operand);
3644 __ JumpIfSmi(
string, &bailout);
3647 __ and_(scratch, Immediate(
3654 __ mov(scratch, separator_operand);
3656 __ sub(string_length, scratch);
3657 __ imul(scratch, array_length_operand);
3659 __ add(string_length, scratch);
3662 __ shr(string_length, 1);
3666 __ AllocateAsciiString(result_pos, string_length, scratch,
3667 index,
string, &bailout);
3668 __ mov(result_operand, result_pos);
3672 __ mov(
string, separator_operand);
3675 __ j(
equal, &one_char_separator);
3680 __ mov(index, Immediate(0));
3681 __ jmp(&loop_1_condition);
3694 __ mov(string_length,
3696 __ shr(string_length, 1);
3699 __ CopyBytes(
string, result_pos, string_length, scratch);
3700 __ add(index, Immediate(1));
3701 __ bind(&loop_1_condition);
3702 __ cmp(index, array_length_operand);
3709 __ bind(&one_char_separator);
3712 __ mov_b(separator_operand, scratch);
3714 __ Set(index, Immediate(0));
3717 __ jmp(&loop_2_entry);
3726 __ mov_b(scratch, separator_operand);
3727 __ mov_b(Operand(result_pos, 0), scratch);
3730 __ bind(&loop_2_entry);
3735 __ mov(string_length,
3737 __ shr(string_length, 1);
3740 __ CopyBytes(
string, result_pos, string_length, scratch);
3741 __ add(index, Immediate(1));
3743 __ cmp(index, array_length_operand);
3749 __ bind(&long_separator);
3751 __ Set(index, Immediate(0));
3754 __ jmp(&loop_3_entry);
3763 __ mov(
string, separator_operand);
3764 __ mov(string_length,
3766 __ shr(string_length, 1);
3769 __ CopyBytes(
string, result_pos, string_length, scratch);
3771 __ bind(&loop_3_entry);
3776 __ mov(string_length,
3778 __ shr(string_length, 1);
3781 __ CopyBytes(
string, result_pos, string_length, scratch);
3782 __ add(index, Immediate(1));
3784 __ cmp(index, array_length_operand);
3790 __ mov(result_operand, isolate()->factory()->undefined_value());
3792 __ mov(
eax, result_operand);
3794 __ add(
esp, Immediate(3 * kPointerSize));
3797 context()->Plug(
eax);
3801 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3802 Handle<String> name = expr->name();
3803 if (name->length() > 0 && name->Get(0) ==
'_') {
3804 Comment cmnt(masm_,
"[ InlineRuntimeCall");
3805 EmitInlineRuntimeCall(expr);
3809 Comment cmnt(masm_,
"[ CallRuntime");
3810 ZoneList<Expression*>* args = expr->arguments();
3812 if (expr->is_jsruntime()) {
3819 int arg_count = args->length();
3820 for (
int i = 0; i < arg_count; i++) {
3821 VisitForStackValue(args->at(i));
3824 if (expr->is_jsruntime()) {
3826 __ Set(ecx, Immediate(expr->name()));
3827 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3829 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3830 CallIC(ic, mode, expr->id());
3835 __ CallRuntime(expr->function(), arg_count);
3837 context()->Plug(
eax);
3841 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3842 switch (expr->op()) {
3843 case Token::DELETE: {
3844 Comment cmnt(masm_,
"[ UnaryOperation (DELETE)");
3845 Property*
property = expr->expression()->AsProperty();
3846 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3848 if (property !=
NULL) {
3849 VisitForStackValue(property->obj());
3850 VisitForStackValue(property->key());
3855 context()->Plug(
eax);
3856 }
else if (proxy !=
NULL) {
3857 Variable* var = proxy->var();
3861 if (var->IsUnallocated()) {
3863 __ push(Immediate(var->name()));
3866 context()->Plug(
eax);
3867 }
else if (var->IsStackAllocated() || var->IsContextSlot()) {
3871 context()->Plug(var->is_this());
3875 __ push(context_register());
3876 __ push(Immediate(var->name()));
3877 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3878 context()->Plug(
eax);
3883 VisitForEffect(expr->expression());
3884 context()->Plug(
true);
3890 Comment cmnt(masm_,
"[ UnaryOperation (VOID)");
3891 VisitForEffect(expr->expression());
3892 context()->Plug(isolate()->factory()->undefined_value());
3897 Comment cmnt(masm_,
"[ UnaryOperation (NOT)");
3898 if (context()->IsEffect()) {
3901 VisitForEffect(expr->expression());
3902 }
else if (context()->IsTest()) {
3905 VisitForControl(expr->expression(),
3906 test->false_label(),
3908 test->fall_through());
3909 context()->Plug(test->true_label(), test->false_label());
3915 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3916 Label materialize_true, materialize_false, done;
3917 VisitForControl(expr->expression(),
3921 __ bind(&materialize_true);
3922 PrepareForBailoutForId(expr->MaterializeTrueId(),
NO_REGISTERS);
3923 if (context()->IsAccumulatorValue()) {
3924 __ mov(
eax, isolate()->factory()->true_value());
3926 __ Push(isolate()->factory()->true_value());
3928 __ jmp(&done, Label::kNear);
3929 __ bind(&materialize_false);
3930 PrepareForBailoutForId(expr->MaterializeFalseId(),
NO_REGISTERS);
3931 if (context()->IsAccumulatorValue()) {
3932 __ mov(
eax, isolate()->factory()->false_value());
3934 __ Push(isolate()->factory()->false_value());
3941 case Token::TYPEOF: {
3942 Comment cmnt(masm_,
"[ UnaryOperation (TYPEOF)");
3943 { StackValueContext context(
this);
3944 VisitForTypeofValue(expr->expression());
3946 __ CallRuntime(Runtime::kTypeof, 1);
3947 context()->Plug(
eax);
3952 Comment cmt(masm_,
"[ UnaryOperation (ADD)");
3953 VisitForAccumulatorValue(expr->expression());
3954 Label no_conversion;
3955 __ JumpIfSmi(result_register(), &no_conversion);
3956 ToNumberStub convert_stub;
3957 __ CallStub(&convert_stub);
3958 __ bind(&no_conversion);
3959 context()->Plug(result_register());
3964 EmitUnaryOperation(expr,
"[ UnaryOperation (SUB)");
3967 case Token::BIT_NOT:
3968 EmitUnaryOperation(expr,
"[ UnaryOperation (BIT_NOT)");
3977 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3979 Comment cmt(masm_, comment);
3980 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3983 UnaryOpStub stub(expr->op(), overwrite);
3986 VisitForAccumulatorValue(expr->expression());
3987 SetSourcePosition(expr->position());
3988 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
3989 context()->Plug(
eax);
3993 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3994 Comment cmnt(masm_,
"[ CountOperation");
3995 SetSourcePosition(expr->position());
3999 if (!expr->expression()->IsValidLeftHandSide()) {
4000 VisitForEffect(expr->expression());
4006 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4007 LhsKind assign_type = VARIABLE;
4008 Property* prop = expr->expression()->AsProperty();
4013 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4017 if (assign_type == VARIABLE) {
4018 ASSERT(expr->expression()->AsVariableProxy()->var() !=
NULL);
4019 AccumulatorValueContext context(
this);
4020 EmitVariableLoad(expr->expression()->AsVariableProxy());
4023 if (expr->is_postfix() && !context()->IsEffect()) {
4026 if (assign_type == NAMED_PROPERTY) {
4028 VisitForAccumulatorValue(prop->obj());
4031 EmitNamedPropertyLoad(prop);
4033 VisitForStackValue(prop->obj());
4034 VisitForStackValue(prop->key());
4035 __ mov(
edx, Operand(
esp, kPointerSize));
4036 __ mov(ecx, Operand(
esp, 0));
4037 EmitKeyedPropertyLoad(prop);
4043 if (assign_type == VARIABLE) {
4044 PrepareForBailout(expr->expression(),
TOS_REG);
4046 PrepareForBailoutForId(expr->CountId(),
TOS_REG);
4050 Label no_conversion;
4051 if (ShouldInlineSmiCase(expr->op())) {
4052 __ JumpIfSmi(
eax, &no_conversion, Label::kNear);
4054 ToNumberStub convert_stub;
4055 __ CallStub(&convert_stub);
4056 __ bind(&no_conversion);
4059 if (expr->is_postfix()) {
4060 if (!context()->IsEffect()) {
4064 switch (assign_type) {
4068 case NAMED_PROPERTY:
4069 __ mov(Operand(
esp, kPointerSize),
eax);
4071 case KEYED_PROPERTY:
4072 __ mov(Operand(
esp, 2 * kPointerSize),
eax);
4079 Label done, stub_call;
4080 JumpPatchSite patch_site(masm_);
4082 if (ShouldInlineSmiCase(expr->op())) {
4083 if (expr->op() == Token::INC) {
4091 patch_site.EmitJumpIfSmi(
eax, &done, Label::kNear);
4093 __ bind(&stub_call);
4095 if (expr->op() == Token::INC) {
4103 SetSourcePosition(expr->position());
4109 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
4110 patch_site.EmitPatchInfo();
4114 switch (assign_type) {
4116 if (expr->is_postfix()) {
4118 { EffectContext context(
this);
4119 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4121 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4126 if (!context()->IsEffect()) {
4127 context()->PlugTOS();
4131 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4133 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4134 context()->Plug(
eax);
4137 case NAMED_PROPERTY: {
4138 __ mov(ecx, prop->key()->AsLiteral()->handle());
4140 Handle<Code> ic = is_classic_mode()
4141 ? isolate()->builtins()->StoreIC_Initialize()
4142 : isolate()->builtins()->StoreIC_Initialize_Strict();
4143 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4144 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4145 if (expr->is_postfix()) {
4146 if (!context()->IsEffect()) {
4147 context()->PlugTOS();
4150 context()->Plug(
eax);
4154 case KEYED_PROPERTY: {
4157 Handle<Code> ic = is_classic_mode()
4158 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4159 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4160 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4161 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4162 if (expr->is_postfix()) {
4164 if (!context()->IsEffect()) {
4165 context()->PlugTOS();
4168 context()->Plug(
eax);
4176 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4177 VariableProxy* proxy = expr->AsVariableProxy();
4178 ASSERT(!context()->IsEffect());
4179 ASSERT(!context()->IsTest());
4181 if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
4182 Comment cmnt(masm_,
"Global variable");
4184 __ mov(ecx, Immediate(proxy->name()));
4185 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4189 PrepareForBailout(expr,
TOS_REG);
4190 context()->Plug(
eax);
4191 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
4196 EmitDynamicLookupFastCase(proxy->var(),
INSIDE_TYPEOF, &slow, &done);
4200 __ push(Immediate(proxy->name()));
4201 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4202 PrepareForBailout(expr,
TOS_REG);
4205 context()->Plug(
eax);
4208 VisitInDuplicateContext(expr);
4213 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4214 Expression* sub_expr,
4215 Handle<String>
check) {
4216 Label materialize_true, materialize_false;
4217 Label* if_true =
NULL;
4218 Label* if_false =
NULL;
4219 Label* fall_through =
NULL;
4220 context()->PrepareTest(&materialize_true, &materialize_false,
4221 &if_true, &if_false, &fall_through);
4223 { AccumulatorValueContext context(
this);
4224 VisitForTypeofValue(sub_expr);
4226 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4228 if (check->Equals(isolate()->heap()->number_symbol())) {
4229 __ JumpIfSmi(
eax, if_true);
4231 isolate()->factory()->heap_number_map());
4232 Split(
equal, if_true, if_false, fall_through);
4233 }
else if (check->Equals(isolate()->heap()->string_symbol())) {
4234 __ JumpIfSmi(
eax, if_false);
4240 Split(
zero, if_true, if_false, fall_through);
4241 }
else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4242 __ cmp(
eax, isolate()->factory()->true_value());
4244 __ cmp(
eax, isolate()->factory()->false_value());
4245 Split(
equal, if_true, if_false, fall_through);
4246 }
else if (FLAG_harmony_typeof &&
4247 check->Equals(isolate()->heap()->null_symbol())) {
4248 __ cmp(
eax, isolate()->factory()->null_value());
4249 Split(
equal, if_true, if_false, fall_through);
4250 }
else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4251 __ cmp(
eax, isolate()->factory()->undefined_value());
4253 __ JumpIfSmi(
eax, if_false);
4258 Split(
not_zero, if_true, if_false, fall_through);
4259 }
else if (check->Equals(isolate()->heap()->function_symbol())) {
4260 __ JumpIfSmi(
eax, if_false);
4265 Split(
equal, if_true, if_false, fall_through);
4266 }
else if (check->Equals(isolate()->heap()->object_symbol())) {
4267 __ JumpIfSmi(
eax, if_false);
4268 if (!FLAG_harmony_typeof) {
4269 __ cmp(
eax, isolate()->factory()->null_value());
4279 Split(
zero, if_true, if_false, fall_through);
4281 if (if_false != fall_through)
__ jmp(if_false);
4283 context()->Plug(if_true, if_false);
4287 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4288 Comment cmnt(masm_,
"[ CompareOperation");
4289 SetSourcePosition(expr->position());
4293 if (TryLiteralCompare(expr))
return;
4297 Label materialize_true, materialize_false;
4298 Label* if_true =
NULL;
4299 Label* if_false =
NULL;
4300 Label* fall_through =
NULL;
4301 context()->PrepareTest(&materialize_true, &materialize_false,
4302 &if_true, &if_false, &fall_through);
4305 VisitForStackValue(expr->left());
4308 VisitForStackValue(expr->right());
4310 PrepareForBailoutBeforeSplit(expr,
false,
NULL,
NULL);
4311 __ cmp(
eax, isolate()->factory()->true_value());
4312 Split(
equal, if_true, if_false, fall_through);
4315 case Token::INSTANCEOF: {
4316 VisitForStackValue(expr->right());
4319 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4322 Split(
zero, if_true, if_false, fall_through);
4327 VisitForAccumulatorValue(expr->right());
4330 case Token::EQ_STRICT:
4347 case Token::INSTANCEOF:
4353 bool inline_smi_code = ShouldInlineSmiCase(op);
4354 JumpPatchSite patch_site(masm_);
4355 if (inline_smi_code) {
4359 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4361 Split(cc, if_true, if_false,
NULL);
4362 __ bind(&slow_case);
4366 SetSourcePosition(expr->position());
4368 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4369 patch_site.EmitPatchInfo();
4371 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4373 Split(cc, if_true, if_false, fall_through);
4379 context()->Plug(if_true, if_false);
4383 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4384 Expression* sub_expr,
4386 Label materialize_true, materialize_false;
4387 Label* if_true =
NULL;
4388 Label* if_false =
NULL;
4389 Label* fall_through =
NULL;
4390 context()->PrepareTest(&materialize_true, &materialize_false,
4391 &if_true, &if_false, &fall_through);
4393 VisitForAccumulatorValue(sub_expr);
4394 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4395 Handle<Object> nil_value = nil ==
kNullValue ?
4396 isolate()->factory()->null_value() :
4397 isolate()->factory()->undefined_value();
4398 __ cmp(
eax, nil_value);
4399 if (expr->op() == Token::EQ_STRICT) {
4400 Split(
equal, if_true, if_false, fall_through);
4402 Handle<Object> other_nil_value = nil ==
kNullValue ?
4403 isolate()->factory()->undefined_value() :
4404 isolate()->factory()->null_value();
4406 __ cmp(
eax, other_nil_value);
4408 __ JumpIfSmi(
eax, if_false);
4413 Split(
not_zero, if_true, if_false, fall_through);
4415 context()->Plug(if_true, if_false);
4419 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4421 context()->Plug(
eax);
4425 Register FullCodeGenerator::result_register() {
4430 Register FullCodeGenerator::context_register() {
4435 void FullCodeGenerator::StoreToFrameField(
int frame_offset, Register value) {
4437 __ mov(Operand(
ebp, frame_offset), value);
4441 void FullCodeGenerator::LoadContextField(Register dst,
int context_index) {
4446 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4448 if (declaration_scope->is_global_scope() ||
4449 declaration_scope->is_module_scope()) {
4455 }
else if (declaration_scope->is_eval_scope()) {
4461 ASSERT(declaration_scope->is_function_scope());
4470 void FullCodeGenerator::EnterFinallyBlock() {
4481 __ push(result_register());
4484 ExternalReference pending_message_obj =
4485 ExternalReference::address_of_pending_message_obj(isolate());
4486 __ mov(
edx, Operand::StaticVariable(pending_message_obj));
4489 ExternalReference has_pending_message =
4490 ExternalReference::address_of_has_pending_message(isolate());
4491 __ mov(
edx, Operand::StaticVariable(has_pending_message));
4494 ExternalReference pending_message_script =
4495 ExternalReference::address_of_pending_message_script(isolate());
4496 __ mov(
edx, Operand::StaticVariable(pending_message_script));
4501 void FullCodeGenerator::ExitFinallyBlock() {
4505 ExternalReference pending_message_script =
4506 ExternalReference::address_of_pending_message_script(isolate());
4507 __ mov(Operand::StaticVariable(pending_message_script),
edx);
4510 ExternalReference has_pending_message =
4511 ExternalReference::address_of_has_pending_message(isolate());
4512 __ mov(Operand::StaticVariable(has_pending_message),
edx);
4515 ExternalReference pending_message_obj =
4516 ExternalReference::address_of_pending_message_obj(isolate());
4517 __ mov(Operand::StaticVariable(pending_message_obj),
edx);
4520 __ pop(result_register());
4532 #define __ ACCESS_MASM(masm())
4536 int* context_length) {
4543 __ Drop(*stack_depth);
4544 if (*context_length > 0) {
4550 __ call(finally_entry_);
4553 *context_length = 0;
4562 #endif // V8_TARGET_ARCH_IA32
static const int kCallerFPOffset
static const int kLengthOffset
static const int kBitFieldOffset
Scope * DeclarationScope()
const intptr_t kSmiTagMask
VariableDeclaration * function() const
static int SlotOffset(int index)
static const int kFingerOffset
static const int kBuiltinsOffset
static String * cast(Object *obj)
static const int kDeclarationsId
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
static const int kDataOffset
static const int kGlobalReceiverOffset
int SizeOfCodeGeneratedSince(Label *label)
Scope * outer_scope() const
static Handle< Object > UninitializedSentinel(Isolate *isolate)
static bool IsSupported(CpuFeature f)
static const int kCallerSPOffset
#define ASSERT(condition)
static const int kInObjectFieldCount
const char * comment() const
static bool IsCompileTimeValue(Expression *expression)
#define POINTER_SIZE_ALIGN(value)
const uint32_t kStringRepresentationMask
static const int kMaximumSlots
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
static const int kGlobalContextOffset
Variable * parameter(int index) const
MemOperand ContextOperand(Register context, int index)
static const int kFunctionEntryId
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
static const int kHashFieldOffset
static const int kLiteralsOffset
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
static const int kValueOffset
Variable * arguments() const
static const int kForInSlowCaseMarker
static const int kJSReturnSequenceLength
static const int kForInFastCaseMarker
static const int kStringWrapperSafeForDefaultValueOf
Operand FieldOperand(Register object, int offset)
static const int kContextOffset
static const int kCacheStampOffset
static TestContext * cast(AstContext *context)
static const int kPropertiesOffset
static const int kHeaderSize
static const int kElementsOffset
static const int kContainsCachedArrayIndexMask
const uint32_t kStringTag
friend class NestedStatement
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
static const int kMaxLoopNestingMarker
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
static const int kEnumerationIndexOffset
static const int kMapOffset
static const int kValueOffset
static const int kEnumCacheBridgeCacheOffset
const uint32_t kIsNotStringMask
void VisitIllegalRedeclaration(AstVisitor *visitor)
static const int kLengthOffset
static const int kContextOffset
static const int kFunctionOffset
Handle< Object > CodeObject()
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
static const int kConstructorOffset
#define ASSERT_NE(v1, v2)
static Operand FixedArrayElementOperand(Register array, Register index_as_smi, int additional_offset=0)
static const int kIsUndetectable
static bool ShouldGenerateLog(Expression *type)
static const int kMaximumClonedProperties
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static const int kPrototypeOffset
static const int kValueOffset
const uint32_t kAsciiStringTag
static const int kMarkerOffset
static const int kSharedFunctionInfoOffset
static FixedArrayBase * cast(Object *object)
static const int kLocal0Offset
static const int kMaxValue
static const int kBitField2Offset
static Handle< Code > GetUninitialized(Token::Value op)
void check(i::Vector< const char > string)
static const int kExponentOffset
static const int kMaximumClonedLength
static const int kValueOffset
static const int kFirstIndex
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset
static const int kMantissaOffset