30 #if V8_TARGET_ARCH_IA32
45 #define __ ACCESS_MASM(masm_)
50 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
52 info_emitted_ =
false;
57 ASSERT(patch_site_.is_bound() == info_emitted_);
60 void EmitJumpIfNotSmi(Register reg,
62 Label::Distance distance = Label::kFar) {
67 void EmitJumpIfSmi(Register reg,
69 Label::Distance distance = Label::kFar) {
71 EmitJump(
carry, target, distance);
74 void EmitPatchInfo() {
75 if (patch_site_.is_bound()) {
76 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
77 ASSERT(is_int8(delta_to_patch_site));
78 __ test(
eax, Immediate(delta_to_patch_site));
89 void EmitJump(
Condition cc, Label* target, Label::Distance distance) {
90 ASSERT(!patch_site_.is_bound() && !info_emitted_);
92 __ bind(&patch_site_);
93 __ j(cc, target, distance);
96 MacroAssembler* masm_;
104 static void EmitStackCheck(MacroAssembler* masm_,
106 Register scratch =
esp) {
108 Isolate* isolate = masm_->isolate();
109 ExternalReference stack_limit =
110 ExternalReference::address_of_stack_limit(isolate);
111 ASSERT(scratch.is(
esp) == (pointers == 0));
113 __ mov(scratch,
esp);
116 __ cmp(scratch, Operand::StaticVariable(stack_limit));
118 __ call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
136 void FullCodeGenerator::Generate() {
137 CompilationInfo*
info = info_;
139 isolate()->factory()->NewFixedArray(
function()->handler_count(),
TENURED);
141 InitializeFeedbackVector();
143 profiling_counter_ = isolate()->factory()->NewCell(
144 Handle<Smi>(
Smi::FromInt(FLAG_interrupt_budget), isolate()));
145 SetFunctionPosition(
function());
146 Comment cmnt(masm_,
"[ function compiled by full code generator");
151 if (strlen(FLAG_stop_at) > 0 &&
152 info->function()->name()->IsUtf8EqualTo(
CStrVector(FLAG_stop_at))) {
160 if (info->strict_mode() ==
SLOPPY && !info->is_native()) {
163 int receiver_offset = (info->scope()->num_parameters() + 1) *
kPointerSize;
164 __ mov(
ecx, Operand(
esp, receiver_offset));
166 __ cmp(
ecx, isolate()->factory()->undefined_value());
172 __ mov(Operand(
esp, receiver_offset),
ecx);
180 FrameScope frame_scope(masm_, StackFrame::MANUAL);
182 info->set_prologue_offset(masm_->pc_offset());
184 info->AddNoFrameRange(0, masm_->pc_offset());
186 { Comment cmnt(masm_,
"[ Allocate locals");
187 int locals_count = info->scope()->num_stack_slots();
189 ASSERT(!info->function()->is_generator() || locals_count == 0);
190 if (locals_count == 1) {
191 __ push(Immediate(isolate()->factory()->undefined_value()));
192 }
else if (locals_count > 1) {
193 if (locals_count >= 128) {
194 EmitStackCheck(masm_, locals_count,
ecx);
196 __ mov(
eax, Immediate(isolate()->factory()->undefined_value()));
197 const int kMaxPushes = 32;
198 if (locals_count >= kMaxPushes) {
199 int loop_iterations = locals_count / kMaxPushes;
200 __ mov(
ecx, loop_iterations);
202 __ bind(&loop_header);
204 for (
int i = 0; i < kMaxPushes; i++) {
210 int remaining = locals_count % kMaxPushes;
212 for (
int i = 0; i < remaining; i++) {
218 bool function_in_register =
true;
222 if (heap_slots > 0) {
223 Comment cmnt(masm_,
"[ Allocate context");
225 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
227 __ Push(info->scope()->GetScopeInfo());
228 __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
229 }
else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
230 FastNewContextStub stub(heap_slots);
234 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
236 function_in_register =
false;
243 int num_parameters = info->scope()->num_parameters();
244 for (
int i = 0; i < num_parameters; i++) {
246 if (var->IsContextSlot()) {
250 __ mov(
eax, Operand(
ebp, parameter_offset));
253 __ mov(Operand(
esi, context_offset),
eax);
255 __ RecordWriteContextSlot(
esi,
264 Variable* arguments = scope()->
arguments();
265 if (arguments !=
NULL) {
267 Comment cmnt(masm_,
"[ Allocate arguments object");
268 if (function_in_register) {
274 int num_parameters = info->scope()->num_parameters();
285 if (strict_mode() ==
STRICT) {
287 }
else if (
function()->has_duplicate_parameters()) {
292 ArgumentsAccessStub stub(type);
299 __ CallRuntime(Runtime::kTraceEnter, 0);
304 if (scope()->HasIllegalRedeclaration()) {
305 Comment cmnt(masm_,
"[ Declarations");
310 { Comment cmnt(masm_,
"[ Declarations");
313 if (scope()->is_function_scope() && scope()->
function() !=
NULL) {
314 VariableDeclaration*
function = scope()->
function();
315 ASSERT(function->proxy()->var()->mode() ==
CONST ||
318 VisitVariableDeclaration(
function);
320 VisitDeclarations(scope()->declarations());
323 { Comment cmnt(masm_,
"[ Stack check");
325 EmitStackCheck(masm_);
328 { Comment cmnt(masm_,
"[ Body");
329 ASSERT(loop_depth() == 0);
330 VisitStatements(
function()->body());
331 ASSERT(loop_depth() == 0);
337 { Comment cmnt(masm_,
"[ return <undefined>;");
338 __ mov(
eax, isolate()->factory()->undefined_value());
339 EmitReturnSequence();
344 void FullCodeGenerator::ClearAccumulator() {
349 void FullCodeGenerator::EmitProfilingCounterDecrement(
int delta) {
350 __ mov(
ebx, Immediate(profiling_counter_));
356 void FullCodeGenerator::EmitProfilingCounterReset() {
357 int reset_value = FLAG_interrupt_budget;
358 __ mov(
ebx, Immediate(profiling_counter_));
364 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
365 Label* back_edge_target) {
366 Comment cmnt(masm_,
"[ Back edge bookkeeping");
369 ASSERT(back_edge_target->is_bound());
370 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
372 Max(1, distance / kCodeSizeMultiplier));
373 EmitProfilingCounterDecrement(weight);
375 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
380 RecordBackEdge(stmt->OsrEntryId());
382 EmitProfilingCounterReset();
389 PrepareForBailoutForId(stmt->OsrEntryId(),
NO_REGISTERS);
393 void FullCodeGenerator::EmitReturnSequence() {
394 Comment cmnt(masm_,
"[ Return sequence");
395 if (return_label_.is_bound()) {
396 __ jmp(&return_label_);
399 __ bind(&return_label_);
402 __ CallRuntime(Runtime::kTraceExit, 1);
407 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
409 int distance = masm_->pc_offset();
411 Max(1, distance / kCodeSizeMultiplier));
413 EmitProfilingCounterDecrement(weight);
417 __ call(isolate()->builtins()->InterruptCheck(),
418 RelocInfo::CODE_TARGET);
420 EmitProfilingCounterReset();
424 Label check_exit_codesize;
425 masm_->bind(&check_exit_codesize);
427 SetSourcePosition(
function()->end_position() - 1);
432 int no_frame_start = masm_->pc_offset();
436 __ Ret(arguments_bytes,
ecx);
437 #ifdef ENABLE_DEBUGGER_SUPPORT
441 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
448 void FullCodeGenerator::EffectContext::Plug(Variable* var)
const {
449 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
453 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var)
const {
454 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
455 codegen()->GetVar(result_register(), var);
459 void FullCodeGenerator::StackValueContext::Plug(Variable* var)
const {
460 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
461 MemOperand operand = codegen()->VarOperand(var, result_register());
467 void FullCodeGenerator::TestContext::Plug(Variable* var)
const {
469 codegen()->GetVar(result_register(), var);
470 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
471 codegen()->DoTest(
this);
480 void FullCodeGenerator::AccumulatorValueContext::Plug(
486 void FullCodeGenerator::StackValueContext::Plug(
497 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit)
const {
501 void FullCodeGenerator::AccumulatorValueContext::Plug(
502 Handle<Object> lit)
const {
504 __ SafeMove(result_register(), Immediate(lit));
506 __ Move(result_register(), Immediate(lit));
511 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit)
const {
513 __ SafePush(Immediate(lit));
515 __ push(Immediate(lit));
520 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit)
const {
521 codegen()->PrepareForBailoutBeforeSplit(condition(),
525 ASSERT(!lit->IsUndetectableObject());
526 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
527 if (false_label_ != fall_through_)
__ jmp(false_label_);
528 }
else if (lit->IsTrue() || lit->IsJSObject()) {
529 if (true_label_ != fall_through_)
__ jmp(true_label_);
530 }
else if (lit->IsString()) {
532 if (false_label_ != fall_through_)
__ jmp(false_label_);
534 if (true_label_ != fall_through_)
__ jmp(true_label_);
536 }
else if (lit->IsSmi()) {
538 if (false_label_ != fall_through_)
__ jmp(false_label_);
540 if (true_label_ != fall_through_)
__ jmp(true_label_);
544 __ mov(result_register(), lit);
545 codegen()->DoTest(
this);
550 void FullCodeGenerator::EffectContext::DropAndPlug(
int count,
551 Register reg)
const {
557 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
559 Register reg)
const {
562 __ Move(result_register(), reg);
566 void FullCodeGenerator::StackValueContext::DropAndPlug(
int count,
567 Register reg)
const {
569 if (count > 1)
__ Drop(count - 1);
570 __ mov(Operand(
esp, 0), reg);
574 void FullCodeGenerator::TestContext::DropAndPlug(
int count,
575 Register reg)
const {
579 __ Move(result_register(), reg);
580 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
581 codegen()->DoTest(
this);
585 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
586 Label* materialize_false)
const {
587 ASSERT(materialize_true == materialize_false);
588 __ bind(materialize_true);
592 void FullCodeGenerator::AccumulatorValueContext::Plug(
593 Label* materialize_true,
594 Label* materialize_false)
const {
596 __ bind(materialize_true);
597 __ mov(result_register(), isolate()->factory()->true_value());
598 __ jmp(&done, Label::kNear);
599 __ bind(materialize_false);
600 __ mov(result_register(), isolate()->factory()->false_value());
605 void FullCodeGenerator::StackValueContext::Plug(
606 Label* materialize_true,
607 Label* materialize_false)
const {
609 __ bind(materialize_true);
610 __ push(Immediate(isolate()->factory()->true_value()));
611 __ jmp(&done, Label::kNear);
612 __ bind(materialize_false);
613 __ push(Immediate(isolate()->factory()->false_value()));
618 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
619 Label* materialize_false)
const {
620 ASSERT(materialize_true == true_label_);
621 ASSERT(materialize_false == false_label_);
625 void FullCodeGenerator::EffectContext::Plug(
bool flag)
const {
629 void FullCodeGenerator::AccumulatorValueContext::Plug(
bool flag)
const {
630 Handle<Object> value = flag
631 ? isolate()->factory()->true_value()
632 : isolate()->factory()->false_value();
633 __ mov(result_register(), value);
637 void FullCodeGenerator::StackValueContext::Plug(
bool flag)
const {
638 Handle<Object> value = flag
639 ? isolate()->factory()->true_value()
640 : isolate()->factory()->false_value();
641 __ push(Immediate(value));
645 void FullCodeGenerator::TestContext::Plug(
bool flag)
const {
646 codegen()->PrepareForBailoutBeforeSplit(condition(),
651 if (true_label_ != fall_through_)
__ jmp(true_label_);
653 if (false_label_ != fall_through_)
__ jmp(false_label_);
658 void FullCodeGenerator::DoTest(Expression* condition,
661 Label* fall_through) {
663 CallIC(ic, condition->test_id());
664 __ test(result_register(), result_register());
666 Split(
not_zero, if_true, if_false, fall_through);
673 Label* fall_through) {
674 if (if_false == fall_through) {
676 }
else if (if_true == fall_through) {
685 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
686 ASSERT(var->IsStackAllocated());
690 if (var->IsParameter()) {
695 return Operand(
ebp, offset);
699 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
700 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
701 if (var->IsContextSlot()) {
703 __ LoadContext(scratch, context_chain_length);
706 return StackOperand(var);
711 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
712 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
714 __ mov(dest, location);
718 void FullCodeGenerator::SetVar(Variable* var,
722 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
723 ASSERT(!scratch0.is(src));
724 ASSERT(!scratch0.is(scratch1));
725 ASSERT(!scratch1.is(src));
726 MemOperand location = VarOperand(var, scratch0);
727 __ mov(location, src);
730 if (var->IsContextSlot()) {
738 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
739 bool should_normalize,
748 if (should_normalize)
__ jmp(&skip, Label::kNear);
749 PrepareForBailout(expr,
TOS_REG);
750 if (should_normalize) {
751 __ cmp(
eax, isolate()->factory()->true_value());
758 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
760 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
761 if (generate_debug_code_) {
764 __ cmp(
ebx, isolate()->factory()->with_context_map());
766 __ cmp(
ebx, isolate()->factory()->catch_context_map());
772 void FullCodeGenerator::VisitVariableDeclaration(
773 VariableDeclaration* declaration) {
777 VariableProxy* proxy = declaration->proxy();
779 Variable* variable = proxy->var();
781 switch (variable->location()) {
783 globals_->Add(variable->name(), zone());
784 globals_->Add(variable->binding_needs_init()
785 ? isolate()->factory()->the_hole_value()
786 : isolate()->factory()->undefined_value(), zone());
792 Comment cmnt(masm_,
"[ VariableDeclaration");
793 __ mov(StackOperand(variable),
794 Immediate(isolate()->factory()->the_hole_value()));
800 Comment cmnt(masm_,
"[ VariableDeclaration");
801 EmitDebugCheckDeclarationContext(variable);
803 Immediate(isolate()->factory()->the_hole_value()));
810 Comment cmnt(masm_,
"[ VariableDeclaration");
812 __ push(Immediate(variable->name()));
823 __ push(Immediate(isolate()->factory()->the_hole_value()));
827 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
834 void FullCodeGenerator::VisitFunctionDeclaration(
835 FunctionDeclaration* declaration) {
836 VariableProxy* proxy = declaration->proxy();
837 Variable* variable = proxy->var();
838 switch (variable->location()) {
840 globals_->Add(variable->name(), zone());
841 Handle<SharedFunctionInfo>
function =
844 if (
function.is_null())
return SetStackOverflow();
845 globals_->Add(
function, zone());
851 Comment cmnt(masm_,
"[ FunctionDeclaration");
852 VisitForAccumulatorValue(declaration->fun());
853 __ mov(StackOperand(variable), result_register());
858 Comment cmnt(masm_,
"[ FunctionDeclaration");
859 EmitDebugCheckDeclarationContext(variable);
860 VisitForAccumulatorValue(declaration->fun());
863 __ RecordWriteContextSlot(
esi,
875 Comment cmnt(masm_,
"[ FunctionDeclaration");
877 __ push(Immediate(variable->name()));
879 VisitForStackValue(declaration->fun());
880 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
887 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
888 Variable* variable = declaration->proxy()->var();
890 ASSERT(variable->interface()->IsFrozen());
892 Comment cmnt(masm_,
"[ ModuleDeclaration");
893 EmitDebugCheckDeclarationContext(variable);
903 __ RecordWriteContextSlot(
esi,
910 PrepareForBailoutForId(declaration->proxy()->id(),
NO_REGISTERS);
913 Visit(declaration->module());
917 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
918 VariableProxy* proxy = declaration->proxy();
919 Variable* variable = proxy->var();
920 switch (variable->location()) {
926 Comment cmnt(masm_,
"[ ImportDeclaration");
927 EmitDebugCheckDeclarationContext(variable);
940 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
945 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray>
pairs) {
950 __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
955 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
957 __ Push(descriptions);
958 __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
963 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
964 Comment cmnt(masm_,
"[ SwitchStatement");
965 Breakable nested_statement(
this, stmt);
966 SetStatementPosition(stmt);
969 VisitForStackValue(stmt->tag());
972 ZoneList<CaseClause*>* clauses = stmt->cases();
973 CaseClause* default_clause =
NULL;
977 for (
int i = 0; i < clauses->length(); i++) {
978 CaseClause* clause = clauses->at(i);
979 clause->body_target()->Unuse();
982 if (clause->is_default()) {
983 default_clause = clause;
987 Comment cmnt(masm_,
"[ Case comparison");
992 VisitForAccumulatorValue(clause->label());
996 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
997 JumpPatchSite patch_site(masm_);
998 if (inline_smi_code) {
1002 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
1007 __ jmp(clause->body_target());
1008 __ bind(&slow_case);
1012 SetSourcePosition(clause->position());
1014 CallIC(ic, clause->CompareId());
1015 patch_site.EmitPatchInfo();
1018 __ jmp(&skip, Label::kNear);
1019 PrepareForBailout(clause,
TOS_REG);
1020 __ cmp(
eax, isolate()->factory()->true_value());
1023 __ jmp(clause->body_target());
1029 __ jmp(clause->body_target());
1034 __ bind(&next_test);
1036 if (default_clause ==
NULL) {
1037 __ jmp(nested_statement.break_label());
1039 __ jmp(default_clause->body_target());
1043 for (
int i = 0; i < clauses->length(); i++) {
1044 Comment cmnt(masm_,
"[ Case body");
1045 CaseClause* clause = clauses->at(i);
1046 __ bind(clause->body_target());
1047 PrepareForBailoutForId(clause->EntryId(),
NO_REGISTERS);
1048 VisitStatements(clause->statements());
1051 __ bind(nested_statement.break_label());
1056 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1057 Comment cmnt(masm_,
"[ ForInStatement");
1058 int slot = stmt->ForInFeedbackSlot();
1060 SetStatementPosition(stmt);
1063 ForIn loop_statement(
this, stmt);
1064 increment_loop_depth();
1068 VisitForAccumulatorValue(stmt->enumerable());
1069 __ cmp(
eax, isolate()->factory()->undefined_value());
1071 __ cmp(
eax, isolate()->factory()->null_value());
1074 PrepareForBailoutForId(stmt->PrepareId(),
TOS_REG);
1077 Label convert, done_convert;
1078 __ JumpIfSmi(
eax, &convert, Label::kNear);
1084 __ bind(&done_convert);
1088 Label call_runtime, use_cache, fixed_array;
1097 __ CheckEnumCache(&call_runtime);
1100 __ jmp(&use_cache, Label::kNear);
1103 __ bind(&call_runtime);
1105 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1107 isolate()->factory()->meta_map());
1112 Label no_descriptors;
1113 __ bind(&use_cache);
1119 __ LoadInstanceDescriptors(
eax, ecx);
1130 __ bind(&no_descriptors);
1131 __ add(
esp, Immediate(kPointerSize));
1136 __ bind(&fixed_array);
1138 Handle<Object> feedback = Handle<Object>(
1141 StoreFeedbackVectorSlot(slot, feedback);
1144 __ LoadHeapObject(
ebx, FeedbackVector());
1149 __ mov(ecx, Operand(
esp, 0 * kPointerSize));
1154 __ bind(&non_proxy);
1164 __ mov(
eax, Operand(
esp, 0 * kPointerSize));
1165 __ cmp(
eax, Operand(
esp, 1 * kPointerSize));
1169 __ mov(
ebx, Operand(
esp, 2 * kPointerSize));
1174 __ mov(
edx, Operand(
esp, 3 * kPointerSize));
1179 __ mov(ecx, Operand(
esp, 4 * kPointerSize));
1181 __ j(
equal, &update_each, Label::kNear);
1187 __ j(
zero, &update_each);
1196 __ j(
equal, loop_statement.continue_label());
1201 __ bind(&update_each);
1202 __ mov(result_register(),
ebx);
1204 { EffectContext context(
this);
1205 EmitAssignment(stmt->each());
1209 Visit(stmt->body());
1213 __ bind(loop_statement.continue_label());
1216 EmitBackEdgeBookkeeping(stmt, &loop);
1220 __ bind(loop_statement.break_label());
1221 __ add(
esp, Immediate(5 * kPointerSize));
1226 decrement_loop_depth();
1230 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1231 Comment cmnt(masm_,
"[ ForOfStatement");
1232 SetStatementPosition(stmt);
1234 Iteration loop_statement(
this, stmt);
1235 increment_loop_depth();
1238 VisitForAccumulatorValue(stmt->assign_iterator());
1241 __ CompareRoot(
eax, Heap::kUndefinedValueRootIndex);
1242 __ j(
equal, loop_statement.break_label());
1243 __ CompareRoot(
eax, Heap::kNullValueRootIndex);
1244 __ j(
equal, loop_statement.break_label());
1247 Label convert, done_convert;
1248 __ JumpIfSmi(
eax, &convert);
1254 __ bind(&done_convert);
1257 __ bind(loop_statement.continue_label());
1260 VisitForEffect(stmt->next_result());
1263 Label result_not_done;
1264 VisitForControl(stmt->result_done(),
1265 loop_statement.break_label(),
1268 __ bind(&result_not_done);
1271 VisitForEffect(stmt->assign_each());
1274 Visit(stmt->body());
1277 PrepareForBailoutForId(stmt->BackEdgeId(),
NO_REGISTERS);
1278 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1279 __ jmp(loop_statement.continue_label());
1283 __ bind(loop_statement.break_label());
1284 decrement_loop_depth();
1288 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1296 if (!FLAG_always_opt &&
1297 !FLAG_prepare_always_opt &&
1299 scope()->is_function_scope() &&
1300 info->num_literals() == 0) {
1301 FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1302 __ mov(
ebx, Immediate(info));
1306 __ push(Immediate(info));
1307 __ push(Immediate(pretenure
1308 ? isolate()->factory()->true_value()
1309 : isolate()->factory()->false_value()));
1310 __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1312 context()->Plug(
eax);
1316 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1317 Comment cmnt(masm_,
"[ VariableProxy");
1318 EmitVariableLoad(expr);
1322 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1325 Register context =
esi;
1326 Register temp =
edx;
1330 if (s->num_heap_slots() > 0) {
1331 if (s->calls_sloppy_eval()) {
1345 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope())
break;
1346 s = s->outer_scope();
1349 if (s !=
NULL && s->is_eval_scope()) {
1353 if (!context.is(temp)) {
1354 __ mov(temp, context);
1359 Immediate(isolate()->factory()->native_context_map()));
1360 __ j(
equal, &fast, Label::kNear);
1373 __ mov(ecx, var->name());
1382 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1384 ASSERT(var->IsContextSlot());
1385 Register context =
esi;
1386 Register temp =
ebx;
1388 for (Scope* s = scope(); s != var->scope(); s = s->
outer_scope()) {
1389 if (s->num_heap_slots() > 0) {
1390 if (s->calls_sloppy_eval()) {
1412 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1422 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1425 Variable* local = var->local_if_not_shadowed();
1426 __ mov(
eax, ContextSlotOperandCheckExtensions(local, slow));
1427 if (local->mode() ==
LET || local->mode() ==
CONST ||
1429 __ cmp(
eax, isolate()->factory()->the_hole_value());
1432 __ mov(
eax, isolate()->factory()->undefined_value());
1434 __ push(Immediate(var->name()));
1435 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1443 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1445 SetSourcePosition(proxy->position());
1446 Variable* var = proxy->var();
1450 switch (var->location()) {
1452 Comment cmnt(masm_,
"[ Global variable");
1456 __ mov(ecx, var->name());
1458 context()->Plug(
eax);
1465 Comment cmnt(masm_, var->IsContextSlot() ?
"[ Context variable"
1466 :
"[ Stack variable");
1467 if (var->binding_needs_init()) {
1491 bool skip_init_check;
1493 skip_init_check =
false;
1496 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1497 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1499 var->initializer_position() < proxy->position();
1502 if (!skip_init_check) {
1506 __ cmp(
eax, isolate()->factory()->the_hole_value());
1508 if (var->mode() ==
LET || var->mode() ==
CONST) {
1511 __ push(Immediate(var->name()));
1512 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1516 __ mov(
eax, isolate()->factory()->undefined_value());
1519 context()->Plug(
eax);
1523 context()->Plug(var);
1528 Comment cmnt(masm_,
"[ Lookup variable");
1535 __ push(Immediate(var->name()));
1536 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1538 context()->Plug(
eax);
1545 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1546 Comment cmnt(masm_,
"[ RegExpLiteral");
1555 int literal_offset =
1558 __ cmp(
ebx, isolate()->factory()->undefined_value());
1565 __ push(Immediate(expr->pattern()));
1566 __ push(Immediate(expr->flags()));
1567 __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1570 __ bind(&materialized);
1572 Label allocated, runtime_allocate;
1576 __ bind(&runtime_allocate);
1579 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1582 __ bind(&allocated);
1591 if ((size % (2 * kPointerSize)) != 0) {
1595 context()->Plug(
eax);
1599 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1600 if (expression ==
NULL) {
1601 __ push(Immediate(isolate()->factory()->null_value()));
1603 VisitForStackValue(expression);
1608 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1609 Comment cmnt(masm_,
"[ ObjectLiteral");
1611 expr->BuildConstantProperties(isolate());
1612 Handle<FixedArray> constant_properties = expr->constant_properties();
1613 int flags = expr->fast_elements()
1614 ? ObjectLiteral::kFastElements
1615 : ObjectLiteral::kNoFlags;
1616 flags |= expr->has_function()
1617 ? ObjectLiteral::kHasFunction
1618 : ObjectLiteral::kNoFlags;
1619 int properties_count = constant_properties->length() / 2;
1621 flags != ObjectLiteral::kFastElements ||
1626 __ push(Immediate(constant_properties));
1628 __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1633 __ mov(ecx, Immediate(constant_properties));
1635 FastCloneShallowObjectStub stub(properties_count);
1641 bool result_saved =
false;
1646 expr->CalculateEmitStore(zone());
1648 AccessorTable accessor_table(zone());
1649 for (
int i = 0; i < expr->properties()->length(); i++) {
1650 ObjectLiteral::Property*
property = expr->properties()->at(i);
1651 if (property->IsCompileTimeValue())
continue;
1653 Literal* key =
property->key();
1654 Expression* value =
property->value();
1655 if (!result_saved) {
1657 result_saved =
true;
1659 switch (property->kind()) {
1662 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1665 case ObjectLiteral::Property::COMPUTED:
1666 if (key->value()->IsInternalizedString()) {
1667 if (property->emit_store()) {
1668 VisitForAccumulatorValue(value);
1669 __ mov(ecx, Immediate(key->value()));
1671 CallStoreIC(key->LiteralFeedbackId());
1674 VisitForEffect(value);
1678 __ push(Operand(
esp, 0));
1679 VisitForStackValue(key);
1680 VisitForStackValue(value);
1681 if (property->emit_store()) {
1683 __ CallRuntime(Runtime::kSetProperty, 4);
1688 case ObjectLiteral::Property::PROTOTYPE:
1689 __ push(Operand(
esp, 0));
1690 VisitForStackValue(value);
1691 if (property->emit_store()) {
1692 __ CallRuntime(Runtime::kSetPrototype, 2);
1697 case ObjectLiteral::Property::GETTER:
1698 accessor_table.lookup(key)->second->getter = value;
1700 case ObjectLiteral::Property::SETTER:
1701 accessor_table.lookup(key)->second->setter = value;
1708 for (AccessorTable::Iterator it = accessor_table.begin();
1709 it != accessor_table.end();
1711 __ push(Operand(
esp, 0));
1712 VisitForStackValue(it->first);
1713 EmitAccessor(it->second->getter);
1714 EmitAccessor(it->second->setter);
1716 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1719 if (expr->has_function()) {
1721 __ push(Operand(
esp, 0));
1722 __ CallRuntime(Runtime::kToFastProperties, 1);
1726 context()->PlugTOS();
1728 context()->Plug(
eax);
1733 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1734 Comment cmnt(masm_,
"[ ArrayLiteral");
1736 expr->BuildConstantElements(isolate());
1737 int flags = expr->depth() == 1
1738 ? ArrayLiteral::kShallowElements
1739 : ArrayLiteral::kNoFlags;
1741 ZoneList<Expression*>* subexprs = expr->values();
1742 int length = subexprs->length();
1743 Handle<FixedArray> constant_elements = expr->constant_elements();
1744 ASSERT_EQ(2, constant_elements->length());
1747 bool has_constant_fast_elements =
1749 Handle<FixedArrayBase> constant_elements_values(
1753 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1759 Heap* heap = isolate()->heap();
1760 if (has_constant_fast_elements &&
1761 constant_elements_values->map() == heap->fixed_cow_array_map()) {
1764 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1768 __ mov(ecx, Immediate(constant_elements));
1769 FastCloneShallowArrayStub stub(
1771 allocation_site_mode,
1779 __ push(Immediate(constant_elements));
1781 __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1784 FLAG_smi_only_arrays);
1790 if (has_constant_fast_elements) {
1797 __ mov(ecx, Immediate(constant_elements));
1798 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1802 bool result_saved =
false;
1806 for (
int i = 0; i < length; i++) {
1807 Expression* subexpr = subexprs->at(i);
1812 if (!result_saved) {
1815 result_saved =
true;
1817 VisitForAccumulatorValue(subexpr);
1823 __ mov(
ebx, Operand(
esp, kPointerSize));
1828 __ RecordWriteField(
ebx, offset, result_register(), ecx,
1830 EMIT_REMEMBERED_SET,
1835 StoreArrayLiteralElementStub stub;
1839 PrepareForBailoutForId(expr->GetIdForElement(i),
NO_REGISTERS);
1843 __ add(
esp, Immediate(kPointerSize));
1844 context()->PlugTOS();
1846 context()->Plug(
eax);
1851 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1852 ASSERT(expr->target()->IsValidLeftHandSide());
1854 Comment cmnt(masm_,
"[ Assignment");
1858 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1859 LhsKind assign_type = VARIABLE;
1860 Property*
property = expr->target()->AsProperty();
1861 if (property !=
NULL) {
1862 assign_type = (
property->key()->IsPropertyName())
1868 switch (assign_type) {
1872 case NAMED_PROPERTY:
1873 if (expr->is_compound()) {
1875 VisitForStackValue(property->obj());
1878 VisitForStackValue(property->obj());
1881 case KEYED_PROPERTY: {
1882 if (expr->is_compound()) {
1883 VisitForStackValue(property->obj());
1884 VisitForStackValue(property->key());
1885 __ mov(
edx, Operand(
esp, kPointerSize));
1886 __ mov(ecx, Operand(
esp, 0));
1888 VisitForStackValue(property->obj());
1889 VisitForStackValue(property->key());
1897 if (expr->is_compound()) {
1898 AccumulatorValueContext result_context(
this);
1899 { AccumulatorValueContext left_operand_context(
this);
1900 switch (assign_type) {
1902 EmitVariableLoad(expr->target()->AsVariableProxy());
1903 PrepareForBailout(expr->target(),
TOS_REG);
1905 case NAMED_PROPERTY:
1906 EmitNamedPropertyLoad(property);
1907 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1909 case KEYED_PROPERTY:
1910 EmitKeyedPropertyLoad(property);
1911 PrepareForBailoutForId(property->LoadId(),
TOS_REG);
1918 VisitForAccumulatorValue(expr->value());
1920 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1923 SetSourcePosition(expr->position() + 1);
1924 if (ShouldInlineSmiCase(op)) {
1925 EmitInlineSmiBinaryOp(expr->binary_operation(),
1931 EmitBinaryOp(expr->binary_operation(), op,
mode);
1935 PrepareForBailout(expr->binary_operation(),
TOS_REG);
1937 VisitForAccumulatorValue(expr->value());
1941 SetSourcePosition(expr->position());
1944 switch (assign_type) {
1946 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1948 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
1949 context()->Plug(
eax);
1951 case NAMED_PROPERTY:
1952 EmitNamedPropertyAssignment(expr);
1954 case KEYED_PROPERTY:
1955 EmitKeyedPropertyAssignment(expr);
1961 void FullCodeGenerator::VisitYield(Yield* expr) {
1962 Comment cmnt(masm_,
"[ Yield");
1965 VisitForStackValue(expr->expression());
1967 switch (expr->yield_kind()) {
1968 case Yield::SUSPEND:
1970 EmitCreateIteratorResult(
false);
1971 __ push(result_register());
1973 case Yield::INITIAL: {
1974 Label suspend, continuation, post_runtime, resume;
1978 __ bind(&continuation);
1982 VisitForAccumulatorValue(expr->generator_object());
1994 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
1995 __ mov(context_register(),
1997 __ bind(&post_runtime);
1998 __ pop(result_register());
1999 EmitReturnSequence();
2002 context()->Plug(result_register());
2006 case Yield::FINAL: {
2007 VisitForAccumulatorValue(expr->generator_object());
2012 EmitCreateIteratorResult(
true);
2013 EmitUnwindBeforeReturn();
2014 EmitReturnSequence();
2018 case Yield::DELEGATING: {
2019 VisitForStackValue(expr->generator_object());
2025 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2026 Label l_next, l_call, l_loop;
2028 __ mov(
eax, isolate()->factory()->undefined_value());
2033 handler_table()->set(expr->index(),
Smi::FromInt(l_catch.pos()));
2034 __ mov(ecx, isolate()->factory()->throw_string());
2036 __ push(Operand(
esp, 2 * kPointerSize));
2045 __ PushTryHandler(StackHandler::CATCH, expr->index());
2049 __ bind(&l_continuation);
2051 __ bind(&l_suspend);
2052 const int generator_object_depth = kPointerSize + handler_size;
2053 __ mov(
eax, Operand(
esp, generator_object_depth));
2062 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2063 __ mov(context_register(),
2066 EmitReturnSequence();
2072 __ mov(ecx, isolate()->factory()->next_string());
2074 __ push(Operand(
esp, 2 * kPointerSize));
2079 __ mov(
edx, Operand(
esp, kPointerSize));
2080 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2083 __ mov(Operand(
esp, 2 * kPointerSize),
edi);
2094 __ mov(ecx, isolate()->factory()->done_string());
2103 __ mov(ecx, isolate()->factory()->value_string());
2105 context()->DropAndPlug(2,
eax);
2112 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2119 VisitForStackValue(generator);
2120 VisitForAccumulatorValue(value);
2124 Label wrong_state, closed_state, done;
2130 __ j(
less, &wrong_state);
2143 __ mov(ecx, isolate()->factory()->the_hole_value());
2144 Label push_argument_holes, push_frame;
2145 __ bind(&push_argument_holes);
2149 __ jmp(&push_argument_holes);
2154 __ bind(&push_frame);
2155 __ call(&resume_frame);
2157 __ bind(&resume_frame);
2172 __ cmp(
edx, Immediate(0));
2181 __ bind(&slow_resume);
2186 Label push_operand_holes, call_resume;
2187 __ bind(&push_operand_holes);
2188 __ sub(
edx, Immediate(1));
2191 __ jmp(&push_operand_holes);
2192 __ bind(&call_resume);
2194 __ push(result_register());
2196 __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
2198 __ Abort(kGeneratorFailedToResume);
2201 __ bind(&closed_state);
2204 __ push(Immediate(isolate()->factory()->undefined_value()));
2206 EmitCreateIteratorResult(
true);
2210 __ CallRuntime(Runtime::kHiddenThrow, 1);
2215 __ bind(&wrong_state);
2217 __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
2220 context()->Plug(result_register());
2224 void FullCodeGenerator::EmitCreateIteratorResult(
bool done) {
2228 Handle<Map>
map(isolate()->native_context()->generator_result_map());
2233 __ bind(&gc_required);
2235 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
2236 __ mov(context_register(),
2239 __ bind(&allocated);
2242 __ mov(edx, isolate()->factory()->ToBoolean(done));
2246 isolate()->factory()->empty_fixed_array());
2248 isolate()->factory()->empty_fixed_array());
2255 ecx, edx, kDontSaveFPRegs);
2259 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2260 SetSourcePosition(prop->position());
2261 Literal* key = prop->key()->AsLiteral();
2262 ASSERT(!key->value()->IsSmi());
2263 __ mov(ecx, Immediate(key->value()));
2268 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2269 SetSourcePosition(prop->position());
2270 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2271 CallIC(ic, prop->PropertyFeedbackId());
2275 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2279 Expression* right) {
2282 Label smi_case, done, stub_call;
2286 JumpPatchSite patch_site(masm_);
2287 patch_site.EmitJumpIfSmi(
eax, &smi_case, Label::kNear);
2289 __ bind(&stub_call);
2291 BinaryOpICStub stub(op, mode);
2292 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2293 patch_site.EmitPatchInfo();
2294 __ jmp(&done, Label::kNear);
2312 __ cmp(
eax, 0xc0000000);
2316 __ bind(&result_ok);
2325 __ test(
eax, Immediate(0xc0000000));
2329 __ bind(&result_ok);
2355 case Token::BIT_AND:
2358 case Token::BIT_XOR:
2366 context()->Plug(
eax);
2370 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2374 BinaryOpICStub stub(op, mode);
2375 JumpPatchSite patch_site(masm_);
2376 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2377 patch_site.EmitPatchInfo();
2378 context()->Plug(
eax);
2382 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2383 ASSERT(expr->IsValidLeftHandSide());
2387 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2388 LhsKind assign_type = VARIABLE;
2389 Property* prop = expr->AsProperty();
2391 assign_type = (prop->key()->IsPropertyName())
2396 switch (assign_type) {
2398 Variable* var = expr->AsVariableProxy()->var();
2399 EffectContext context(
this);
2400 EmitVariableAssignment(var, Token::ASSIGN);
2403 case NAMED_PROPERTY: {
2405 VisitForAccumulatorValue(prop->obj());
2408 __ mov(ecx, prop->key()->AsLiteral()->value());
2412 case KEYED_PROPERTY: {
2414 VisitForStackValue(prop->obj());
2415 VisitForAccumulatorValue(prop->key());
2419 Handle<Code> ic = strict_mode() ==
SLOPPY
2420 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2421 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2426 context()->Plug(
eax);
2430 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2432 __ mov(location,
eax);
2433 if (var->IsContextSlot()) {
2436 __ RecordWriteContextSlot(ecx, offset, edx,
ebx, kDontSaveFPRegs);
2441 void FullCodeGenerator::EmitCallStoreContextSlot(
2445 __ push(Immediate(name));
2447 __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2451 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2453 if (var->IsUnallocated()) {
2455 __ mov(ecx, var->name());
2459 }
else if (op == Token::INIT_CONST_LEGACY) {
2461 ASSERT(!var->IsParameter());
2462 if (var->IsLookupSlot()) {
2465 __ push(Immediate(var->name()));
2466 __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2468 ASSERT(var->IsStackLocal() || var->IsContextSlot());
2471 __ mov(edx, location);
2472 __ cmp(edx, isolate()->factory()->the_hole_value());
2474 EmitStoreToStackLocalOrContextSlot(var, location);
2478 }
else if (var->mode() ==
LET && op != Token::INIT_LET) {
2480 if (var->IsLookupSlot()) {
2481 EmitCallStoreContextSlot(var->name(), strict_mode());
2483 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2486 __ mov(edx, location);
2487 __ cmp(edx, isolate()->factory()->the_hole_value());
2489 __ push(Immediate(var->name()));
2490 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2492 EmitStoreToStackLocalOrContextSlot(var, location);
2495 }
else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2498 if (var->IsLookupSlot()) {
2499 EmitCallStoreContextSlot(var->name(), strict_mode());
2501 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2503 if (generate_debug_code_ && op == Token::INIT_LET) {
2505 __ mov(edx, location);
2506 __ cmp(edx, isolate()->factory()->the_hole_value());
2507 __ Check(
equal, kLetBindingReInitialization);
2509 EmitStoreToStackLocalOrContextSlot(var, location);
2516 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2521 Property* prop = expr->target()->AsProperty();
2526 SetSourcePosition(expr->position());
2527 __ mov(ecx, prop->key()->AsLiteral()->value());
2529 CallStoreIC(expr->AssignmentFeedbackId());
2530 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2531 context()->Plug(
eax);
2535 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2544 SetSourcePosition(expr->position());
2545 Handle<Code> ic = strict_mode() ==
SLOPPY
2546 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2547 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2548 CallIC(ic, expr->AssignmentFeedbackId());
2550 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
2551 context()->Plug(
eax);
2555 void FullCodeGenerator::VisitProperty(Property* expr) {
2556 Comment cmnt(masm_,
"[ Property");
2557 Expression* key = expr->key();
2559 if (key->IsPropertyName()) {
2560 VisitForAccumulatorValue(expr->obj());
2561 __ mov(edx, result_register());
2562 EmitNamedPropertyLoad(expr);
2563 PrepareForBailoutForId(expr->LoadId(),
TOS_REG);
2564 context()->Plug(
eax);
2566 VisitForStackValue(expr->obj());
2567 VisitForAccumulatorValue(expr->key());
2569 __ mov(ecx, result_register());
2570 EmitKeyedPropertyLoad(expr);
2571 context()->Plug(
eax);
2576 void FullCodeGenerator::CallIC(Handle<Code>
code,
2577 TypeFeedbackId ast_id) {
2579 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2586 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2587 Expression* callee = expr->expression();
2588 ZoneList<Expression*>* args = expr->arguments();
2589 int arg_count = args->length();
2593 if (callee->IsVariableProxy()) {
2594 { StackValueContext context(
this);
2595 EmitVariableLoad(callee->AsVariableProxy());
2600 __ push(Immediate(isolate()->factory()->undefined_value()));
2604 ASSERT(callee->IsProperty());
2605 __ mov(edx, Operand(
esp, 0));
2606 EmitNamedPropertyLoad(callee->AsProperty());
2607 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
TOS_REG);
2609 __ push(Operand(
esp, 0));
2610 __ mov(Operand(
esp, kPointerSize),
eax);
2615 { PreservePositionScope scope(masm()->positions_recorder());
2616 for (
int i = 0; i < arg_count; i++) {
2617 VisitForStackValue(args->at(i));
2622 SetSourcePosition(expr->position());
2623 CallFunctionStub stub(arg_count, flags);
2624 __ mov(
edi, Operand(
esp, (arg_count + 1) * kPointerSize));
2626 RecordJSReturnSite(expr);
2631 context()->DropAndPlug(1,
eax);
2636 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2639 VisitForAccumulatorValue(key);
2641 Expression* callee = expr->expression();
2642 ZoneList<Expression*>* args = expr->arguments();
2643 int arg_count = args->length();
2646 ASSERT(callee->IsProperty());
2647 __ mov(edx, Operand(
esp, 0));
2650 EmitKeyedPropertyLoad(callee->AsProperty());
2651 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
TOS_REG);
2654 __ push(Operand(
esp, 0));
2655 __ mov(Operand(
esp, kPointerSize),
eax);
2658 { PreservePositionScope scope(masm()->positions_recorder());
2659 for (
int i = 0; i < arg_count; i++) {
2660 VisitForStackValue(args->at(i));
2665 SetSourcePosition(expr->position());
2667 __ mov(
edi, Operand(
esp, (arg_count + 1) * kPointerSize));
2669 RecordJSReturnSite(expr);
2674 context()->DropAndPlug(1,
eax);
2678 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2680 ZoneList<Expression*>* args = expr->arguments();
2681 int arg_count = args->length();
2682 { PreservePositionScope scope(masm()->positions_recorder());
2683 for (
int i = 0; i < arg_count; i++) {
2684 VisitForStackValue(args->at(i));
2688 SetSourcePosition(expr->position());
2690 Handle<Object> uninitialized =
2692 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2693 __ LoadHeapObject(
ebx, FeedbackVector());
2698 __ mov(
edi, Operand(
esp, (arg_count + 1) * kPointerSize));
2701 RecordJSReturnSite(expr);
2704 context()->DropAndPlug(1,
eax);
2708 void FullCodeGenerator::EmitResolvePossiblyDirectEval(
int arg_count) {
2710 if (arg_count > 0) {
2711 __ push(Operand(
esp, arg_count * kPointerSize));
2713 __ push(Immediate(isolate()->factory()->undefined_value()));
2725 __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2729 void FullCodeGenerator::VisitCall(Call* expr) {
2733 expr->return_is_recorded_ =
false;
2736 Comment cmnt(masm_,
"[ Call");
2737 Expression* callee = expr->expression();
2738 Call::CallType call_type = expr->GetCallType(isolate());
2740 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2744 ZoneList<Expression*>* args = expr->arguments();
2745 int arg_count = args->length();
2746 { PreservePositionScope pos_scope(masm()->positions_recorder());
2747 VisitForStackValue(callee);
2749 __ push(Immediate(isolate()->factory()->undefined_value()));
2751 for (
int i = 0; i < arg_count; i++) {
2752 VisitForStackValue(args->at(i));
2757 __ push(Operand(
esp, (arg_count + 1) * kPointerSize));
2758 EmitResolvePossiblyDirectEval(arg_count);
2762 __ mov(Operand(
esp, (arg_count + 0) * kPointerSize), edx);
2763 __ mov(Operand(
esp, (arg_count + 1) * kPointerSize),
eax);
2766 SetSourcePosition(expr->position());
2768 __ mov(
edi, Operand(
esp, (arg_count + 1) * kPointerSize));
2770 RecordJSReturnSite(expr);
2773 context()->DropAndPlug(1,
eax);
2775 }
else if (call_type == Call::GLOBAL_CALL) {
2776 EmitCallWithIC(expr);
2778 }
else if (call_type == Call::LOOKUP_SLOT_CALL) {
2780 VariableProxy* proxy = callee->AsVariableProxy();
2782 { PreservePositionScope scope(masm()->positions_recorder());
2790 __ push(context_register());
2791 __ push(Immediate(proxy->name()));
2792 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2798 if (done.is_linked()) {
2800 __ jmp(&call, Label::kNear);
2806 __ push(Immediate(isolate()->factory()->undefined_value()));
2812 EmitCallWithStub(expr);
2814 }
else if (call_type == Call::PROPERTY_CALL) {
2815 Property*
property = callee->AsProperty();
2816 { PreservePositionScope scope(masm()->positions_recorder());
2817 VisitForStackValue(property->obj());
2819 if (property->key()->IsPropertyName()) {
2820 EmitCallWithIC(expr);
2822 EmitKeyedCallWithIC(expr, property->key());
2826 ASSERT(call_type == Call::OTHER_CALL);
2828 { PreservePositionScope scope(masm()->positions_recorder());
2829 VisitForStackValue(callee);
2831 __ push(Immediate(isolate()->factory()->undefined_value()));
2833 EmitCallWithStub(expr);
2838 ASSERT(expr->return_is_recorded_);
2843 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2844 Comment cmnt(masm_,
"[ CallNew");
2852 VisitForStackValue(expr->expression());
2855 ZoneList<Expression*>* args = expr->arguments();
2856 int arg_count = args->length();
2857 for (
int i = 0; i < arg_count; i++) {
2858 VisitForStackValue(args->at(i));
2863 SetSourcePosition(expr->position());
2866 __ Move(
eax, Immediate(arg_count));
2867 __ mov(
edi, Operand(
esp, arg_count * kPointerSize));
2870 Handle<Object> uninitialized =
2872 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2873 if (FLAG_pretenuring_call_new) {
2874 StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
2875 isolate()->factory()->NewAllocationSite());
2876 ASSERT(expr->AllocationSiteFeedbackSlot() ==
2877 expr->CallNewFeedbackSlot() + 1);
2880 __ LoadHeapObject(
ebx, FeedbackVector());
2881 __ mov(edx, Immediate(
Smi::FromInt(expr->CallNewFeedbackSlot())));
2884 __ call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2885 PrepareForBailoutForId(expr->ReturnId(),
TOS_REG);
2886 context()->Plug(
eax);
2890 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2891 ZoneList<Expression*>* args = expr->arguments();
2892 ASSERT(args->length() == 1);
2894 VisitForAccumulatorValue(args->at(0));
2896 Label materialize_true, materialize_false;
2897 Label* if_true =
NULL;
2898 Label* if_false =
NULL;
2899 Label* fall_through =
NULL;
2900 context()->PrepareTest(&materialize_true, &materialize_false,
2901 &if_true, &if_false, &fall_through);
2903 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2905 Split(
zero, if_true, if_false, fall_through);
2907 context()->Plug(if_true, if_false);
2911 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2912 ZoneList<Expression*>* args = expr->arguments();
2913 ASSERT(args->length() == 1);
2915 VisitForAccumulatorValue(args->at(0));
2917 Label materialize_true, materialize_false;
2918 Label* if_true =
NULL;
2919 Label* if_false =
NULL;
2920 Label* fall_through =
NULL;
2921 context()->PrepareTest(&materialize_true, &materialize_false,
2922 &if_true, &if_false, &fall_through);
2924 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2926 Split(
zero, if_true, if_false, fall_through);
2928 context()->Plug(if_true, if_false);
2932 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2933 ZoneList<Expression*>* args = expr->arguments();
2934 ASSERT(args->length() == 1);
2936 VisitForAccumulatorValue(args->at(0));
2938 Label materialize_true, materialize_false;
2939 Label* if_true =
NULL;
2940 Label* if_false =
NULL;
2941 Label* fall_through =
NULL;
2942 context()->PrepareTest(&materialize_true, &materialize_false,
2943 &if_true, &if_false, &fall_through);
2945 __ JumpIfSmi(
eax, if_false);
2946 __ cmp(
eax, isolate()->factory()->null_value());
2957 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2958 Split(
below_equal, if_true, if_false, fall_through);
2960 context()->Plug(if_true, if_false);
2964 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2965 ZoneList<Expression*>* args = expr->arguments();
2966 ASSERT(args->length() == 1);
2968 VisitForAccumulatorValue(args->at(0));
2970 Label materialize_true, materialize_false;
2971 Label* if_true =
NULL;
2972 Label* if_false =
NULL;
2973 Label* fall_through =
NULL;
2974 context()->PrepareTest(&materialize_true, &materialize_false,
2975 &if_true, &if_false, &fall_through);
2977 __ JumpIfSmi(
eax, if_false);
2979 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
2980 Split(
above_equal, if_true, if_false, fall_through);
2982 context()->Plug(if_true, if_false);
2986 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2987 ZoneList<Expression*>* args = expr->arguments();
2988 ASSERT(args->length() == 1);
2990 VisitForAccumulatorValue(args->at(0));
2992 Label materialize_true, materialize_false;
2993 Label* if_true =
NULL;
2994 Label* if_false =
NULL;
2995 Label* fall_through =
NULL;
2996 context()->PrepareTest(&materialize_true, &materialize_false,
2997 &if_true, &if_false, &fall_through);
2999 __ JumpIfSmi(
eax, if_false);
3003 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3004 Split(
not_zero, if_true, if_false, fall_through);
3006 context()->Plug(if_true, if_false);
3010 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3011 CallRuntime* expr) {
3012 ZoneList<Expression*>* args = expr->arguments();
3013 ASSERT(args->length() == 1);
3015 VisitForAccumulatorValue(args->at(0));
3017 Label materialize_true, materialize_false, skip_lookup;
3018 Label* if_true =
NULL;
3019 Label* if_false =
NULL;
3020 Label* fall_through =
NULL;
3021 context()->PrepareTest(&materialize_true, &materialize_false,
3022 &if_true, &if_false, &fall_through);
3024 __ AssertNotSmi(
eax);
3036 __ cmp(ecx, isolate()->factory()->hash_table_map());
3042 Label entry, loop, done;
3045 __ NumberOfOwnDescriptors(ecx,
ebx);
3049 __ LoadInstanceDescriptors(
ebx,
ebx);
3065 __ cmp(edx, isolate()->factory()->value_of_string());
3081 __ bind(&skip_lookup);
3086 __ JumpIfSmi(ecx, if_false);
3094 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3095 Split(
equal, if_true, if_false, fall_through);
3097 context()->Plug(if_true, if_false);
3101 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3102 ZoneList<Expression*>* args = expr->arguments();
3103 ASSERT(args->length() == 1);
3105 VisitForAccumulatorValue(args->at(0));
3107 Label materialize_true, materialize_false;
3108 Label* if_true =
NULL;
3109 Label* if_false =
NULL;
3110 Label* fall_through =
NULL;
3111 context()->PrepareTest(&materialize_true, &materialize_false,
3112 &if_true, &if_false, &fall_through);
3114 __ JumpIfSmi(
eax, if_false);
3116 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3117 Split(
equal, if_true, if_false, fall_through);
3119 context()->Plug(if_true, if_false);
3123 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3124 ZoneList<Expression*>* args = expr->arguments();
3125 ASSERT(args->length() == 1);
3127 VisitForAccumulatorValue(args->at(0));
3129 Label materialize_true, materialize_false;
3130 Label* if_true =
NULL;
3131 Label* if_false =
NULL;
3132 Label* fall_through =
NULL;
3133 context()->PrepareTest(&materialize_true, &materialize_false,
3134 &if_true, &if_false, &fall_through);
3143 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3144 Split(
equal, if_true, if_false, fall_through);
3146 context()->Plug(if_true, if_false);
3151 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3152 ZoneList<Expression*>* args = expr->arguments();
3153 ASSERT(args->length() == 1);
3155 VisitForAccumulatorValue(args->at(0));
3157 Label materialize_true, materialize_false;
3158 Label* if_true =
NULL;
3159 Label* if_false =
NULL;
3160 Label* fall_through =
NULL;
3161 context()->PrepareTest(&materialize_true, &materialize_false,
3162 &if_true, &if_false, &fall_through);
3164 __ JumpIfSmi(
eax, if_false);
3166 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3167 Split(
equal, if_true, if_false, fall_through);
3169 context()->Plug(if_true, if_false);
3173 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3174 ZoneList<Expression*>* args = expr->arguments();
3175 ASSERT(args->length() == 1);
3177 VisitForAccumulatorValue(args->at(0));
3179 Label materialize_true, materialize_false;
3180 Label* if_true =
NULL;
3181 Label* if_false =
NULL;
3182 Label* fall_through =
NULL;
3183 context()->PrepareTest(&materialize_true, &materialize_false,
3184 &if_true, &if_false, &fall_through);
3186 __ JumpIfSmi(
eax, if_false);
3188 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3189 Split(
equal, if_true, if_false, fall_through);
3191 context()->Plug(if_true, if_false);
3196 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3197 ASSERT(expr->arguments()->length() == 0);
3199 Label materialize_true, materialize_false;
3200 Label* if_true =
NULL;
3201 Label* if_false =
NULL;
3202 Label* fall_through =
NULL;
3203 context()->PrepareTest(&materialize_true, &materialize_false,
3204 &if_true, &if_false, &fall_through);
3210 Label check_frame_marker;
3217 __ bind(&check_frame_marker);
3220 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3221 Split(
equal, if_true, if_false, fall_through);
3223 context()->Plug(if_true, if_false);
3227 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3228 ZoneList<Expression*>* args = expr->arguments();
3229 ASSERT(args->length() == 2);
3232 VisitForStackValue(args->at(0));
3233 VisitForAccumulatorValue(args->at(1));
3235 Label materialize_true, materialize_false;
3236 Label* if_true =
NULL;
3237 Label* if_false =
NULL;
3238 Label* fall_through =
NULL;
3239 context()->PrepareTest(&materialize_true, &materialize_false,
3240 &if_true, &if_false, &fall_through);
3244 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3245 Split(
equal, if_true, if_false, fall_through);
3247 context()->Plug(if_true, if_false);
3251 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3252 ZoneList<Expression*>* args = expr->arguments();
3253 ASSERT(args->length() == 1);
3257 VisitForAccumulatorValue(args->at(0));
3262 context()->Plug(
eax);
3266 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3267 ASSERT(expr->arguments()->length() == 0);
3285 context()->Plug(
eax);
3289 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3290 ZoneList<Expression*>* args = expr->arguments();
3291 ASSERT(args->length() == 1);
3292 Label done, null,
function, non_function_constructor;
3294 VisitForAccumulatorValue(args->at(0));
3297 __ JumpIfSmi(
eax, &null);
3331 __ mov(
eax, isolate()->factory()->function_class_string());
3335 __ bind(&non_function_constructor);
3336 __ mov(
eax, isolate()->factory()->Object_string());
3341 __ mov(
eax, isolate()->factory()->null_value());
3346 context()->Plug(
eax);
3350 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3358 ZoneList<Expression*>* args = expr->arguments();
3361 VisitForStackValue(args->at(1));
3362 VisitForStackValue(args->at(2));
3363 __ CallRuntime(Runtime::kHiddenLog, 2);
3366 __ mov(
eax, isolate()->factory()->undefined_value());
3367 context()->Plug(
eax);
3371 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3374 ZoneList<Expression*>* args = expr->arguments();
3375 ASSERT(args->length() == 3);
3376 VisitForStackValue(args->at(0));
3377 VisitForStackValue(args->at(1));
3378 VisitForStackValue(args->at(2));
3380 context()->Plug(
eax);
3384 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3386 RegExpExecStub stub;
3387 ZoneList<Expression*>* args = expr->arguments();
3388 ASSERT(args->length() == 4);
3389 VisitForStackValue(args->at(0));
3390 VisitForStackValue(args->at(1));
3391 VisitForStackValue(args->at(2));
3392 VisitForStackValue(args->at(3));
3394 context()->Plug(
eax);
3398 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3399 ZoneList<Expression*>* args = expr->arguments();
3400 ASSERT(args->length() == 1);
3402 VisitForAccumulatorValue(args->at(0));
3406 __ JumpIfSmi(
eax, &done, Label::kNear);
3413 context()->Plug(
eax);
3417 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3418 ZoneList<Expression*>* args = expr->arguments();
3419 ASSERT(args->length() == 2);
3421 Smi* index =
Smi::cast(*(args->at(1)->AsLiteral()->value()));
3423 VisitForAccumulatorValue(args->at(0));
3425 Label runtime, done, not_date_object;
3426 Register
object =
eax;
3427 Register result =
eax;
3428 Register scratch =
ecx;
3430 __ JumpIfSmi(
object, ¬_date_object);
3434 if (index->value() == 0) {
3439 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3440 __ mov(scratch, Operand::StaticVariable(stamp));
3444 kPointerSize * index->value()));
3448 __ PrepareCallCFunction(2, scratch);
3449 __ mov(Operand(
esp, 0),
object);
3450 __ mov(Operand(
esp, 1 * kPointerSize), Immediate(index));
3451 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3455 __ bind(¬_date_object);
3456 __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3458 context()->Plug(result);
3462 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3463 ZoneList<Expression*>* args = expr->arguments();
3466 Register
string =
eax;
3467 Register index =
ebx;
3468 Register value =
ecx;
3470 VisitForStackValue(args->at(1));
3471 VisitForStackValue(args->at(2));
3472 VisitForAccumulatorValue(args->at(0));
3477 if (FLAG_debug_code) {
3479 __ Check(
zero, kNonSmiValue);
3481 __ Check(
zero, kNonSmiValue);
3487 if (FLAG_debug_code) {
3489 __ EmitSeqStringSetCharCheck(
string, index, value, one_byte_seq_type);
3494 context()->Plug(
string);
3498 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3499 ZoneList<Expression*>* args = expr->arguments();
3502 Register
string =
eax;
3503 Register index =
ebx;
3504 Register value =
ecx;
3506 VisitForStackValue(args->at(1));
3507 VisitForStackValue(args->at(2));
3508 VisitForAccumulatorValue(args->at(0));
3512 if (FLAG_debug_code) {
3514 __ Check(
zero, kNonSmiValue);
3516 __ Check(
zero, kNonSmiValue);
3519 __ EmitSeqStringSetCharCheck(
string, index, value, two_byte_seq_type);
3527 context()->Plug(
string);
3531 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3533 ZoneList<Expression*>* args = expr->arguments();
3534 ASSERT(args->length() == 2);
3535 VisitForStackValue(args->at(0));
3536 VisitForStackValue(args->at(1));
3542 __ CallRuntime(Runtime::kMath_pow, 2);
3544 context()->Plug(
eax);
3548 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3549 ZoneList<Expression*>* args = expr->arguments();
3550 ASSERT(args->length() == 2);
3552 VisitForStackValue(args->at(0));
3553 VisitForAccumulatorValue(args->at(1));
3558 __ JumpIfSmi(
ebx, &done, Label::kNear);
3573 context()->Plug(
eax);
3577 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3578 ZoneList<Expression*>* args = expr->arguments();
3582 VisitForAccumulatorValue(args->at(0));
3584 NumberToStringStub stub;
3586 context()->Plug(
eax);
3590 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3591 ZoneList<Expression*>* args = expr->arguments();
3592 ASSERT(args->length() == 1);
3594 VisitForAccumulatorValue(args->at(0));
3597 StringCharFromCodeGenerator generator(
eax,
ebx);
3598 generator.GenerateFast(masm_);
3601 NopRuntimeCallHelper call_helper;
3602 generator.GenerateSlow(masm_, call_helper);
3605 context()->Plug(
ebx);
3609 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3610 ZoneList<Expression*>* args = expr->arguments();
3611 ASSERT(args->length() == 2);
3613 VisitForStackValue(args->at(0));
3614 VisitForAccumulatorValue(args->at(1));
3616 Register
object =
ebx;
3617 Register index =
eax;
3618 Register result =
edx;
3622 Label need_conversion;
3623 Label index_out_of_range;
3625 StringCharCodeAtGenerator generator(
object,
3630 &index_out_of_range,
3632 generator.GenerateFast(masm_);
3635 __ bind(&index_out_of_range);
3638 __ Move(result, Immediate(isolate()->factory()->nan_value()));
3641 __ bind(&need_conversion);
3644 __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3647 NopRuntimeCallHelper call_helper;
3648 generator.GenerateSlow(masm_, call_helper);
3651 context()->Plug(result);
3655 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3656 ZoneList<Expression*>* args = expr->arguments();
3657 ASSERT(args->length() == 2);
3659 VisitForStackValue(args->at(0));
3660 VisitForAccumulatorValue(args->at(1));
3662 Register
object =
ebx;
3663 Register index =
eax;
3664 Register scratch =
edx;
3665 Register result =
eax;
3669 Label need_conversion;
3670 Label index_out_of_range;
3672 StringCharAtGenerator generator(
object,
3678 &index_out_of_range,
3680 generator.GenerateFast(masm_);
3683 __ bind(&index_out_of_range);
3686 __ Move(result, Immediate(isolate()->factory()->empty_string()));
3689 __ bind(&need_conversion);
3695 NopRuntimeCallHelper call_helper;
3696 generator.GenerateSlow(masm_, call_helper);
3699 context()->Plug(result);
3703 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3704 ZoneList<Expression*>* args = expr->arguments();
3706 VisitForStackValue(args->at(0));
3707 VisitForAccumulatorValue(args->at(1));
3712 context()->Plug(
eax);
3716 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3717 ZoneList<Expression*>* args = expr->arguments();
3720 VisitForStackValue(args->at(0));
3721 VisitForStackValue(args->at(1));
3723 StringCompareStub stub;
3725 context()->Plug(
eax);
3729 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3731 ZoneList<Expression*>* args = expr->arguments();
3732 ASSERT(args->length() == 1);
3733 VisitForStackValue(args->at(0));
3734 __ CallRuntime(Runtime::kMath_log, 1);
3735 context()->Plug(
eax);
3739 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3741 ZoneList<Expression*>* args = expr->arguments();
3742 ASSERT(args->length() == 1);
3743 VisitForStackValue(args->at(0));
3744 __ CallRuntime(Runtime::kMath_sqrt, 1);
3745 context()->Plug(
eax);
3749 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3750 ZoneList<Expression*>* args = expr->arguments();
3751 ASSERT(args->length() >= 2);
3753 int arg_count = args->length() - 2;
3754 for (
int i = 0; i < arg_count + 1; ++i) {
3755 VisitForStackValue(args->at(i));
3757 VisitForAccumulatorValue(args->last());
3759 Label runtime, done;
3761 __ JumpIfSmi(
eax, &runtime);
3766 __ mov(
edi, result_register());
3767 ParameterCount count(arg_count);
3774 __ CallRuntime(Runtime::kCall, args->length());
3777 context()->Plug(
eax);
3781 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3783 RegExpConstructResultStub stub;
3784 ZoneList<Expression*>* args = expr->arguments();
3785 ASSERT(args->length() == 3);
3786 VisitForStackValue(args->at(0));
3787 VisitForStackValue(args->at(1));
3788 VisitForAccumulatorValue(args->at(2));
3792 context()->Plug(
eax);
3796 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3797 ZoneList<Expression*>* args = expr->arguments();
3801 int cache_id =
Smi::cast(*(args->at(0)->AsLiteral()->value()))->
value();
3803 Handle<FixedArray> jsfunction_result_caches(
3804 isolate()->native_context()->jsfunction_result_caches());
3805 if (jsfunction_result_caches->length() <= cache_id) {
3806 __ Abort(kAttemptToUseUndefinedCache);
3807 __ mov(
eax, isolate()->factory()->undefined_value());
3808 context()->Plug(
eax);
3812 VisitForAccumulatorValue(args->at(1));
3815 Register cache =
ebx;
3824 Label done, not_found;
3834 __ bind(¬_found);
3838 __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3841 context()->Plug(
eax);
3845 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3846 ZoneList<Expression*>* args = expr->arguments();
3847 ASSERT(args->length() == 1);
3849 VisitForAccumulatorValue(args->at(0));
3851 __ AssertString(
eax);
3853 Label materialize_true, materialize_false;
3854 Label* if_true =
NULL;
3855 Label* if_false =
NULL;
3856 Label* fall_through =
NULL;
3857 context()->PrepareTest(&materialize_true, &materialize_false,
3858 &if_true, &if_false, &fall_through);
3862 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
3863 Split(
zero, if_true, if_false, fall_through);
3865 context()->Plug(if_true, if_false);
3869 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3870 ZoneList<Expression*>* args = expr->arguments();
3871 ASSERT(args->length() == 1);
3872 VisitForAccumulatorValue(args->at(0));
3874 __ AssertString(
eax);
3879 context()->Plug(
eax);
3883 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3884 Label bailout, done, one_char_separator, long_separator,
3885 non_trivial_array, not_size_one_array, loop,
3886 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3888 ZoneList<Expression*>* args = expr->arguments();
3889 ASSERT(args->length() == 2);
3891 VisitForStackValue(args->at(1));
3893 VisitForAccumulatorValue(args->at(0));
3895 Register array =
eax;
3896 Register elements =
no_reg;
3898 Register index =
edx;
3900 Register string_length =
ecx;
3902 Register
string =
esi;
3904 Register scratch =
ebx;
3906 Register array_length =
edi;
3907 Register result_pos =
no_reg;
3910 Operand separator_operand = Operand(
esp, 2 * kPointerSize);
3911 Operand result_operand = Operand(
esp, 1 * kPointerSize);
3912 Operand array_length_operand = Operand(
esp, 0);
3913 __ sub(
esp, Immediate(2 * kPointerSize));
3916 __ JumpIfSmi(array, &bailout);
3921 __ CheckFastElements(scratch, &bailout);
3925 __ SmiUntag(array_length);
3927 __ mov(result_operand, isolate()->factory()->empty_string());
3931 __ bind(&non_trivial_array);
3932 __ mov(array_length_operand, array_length);
3943 __ Move(index, Immediate(0));
3944 __ Move(string_length, Immediate(0));
3948 if (generate_debug_code_) {
3949 __ cmp(index, array_length);
3950 __ Assert(
less, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3957 __ JumpIfSmi(
string, &bailout);
3960 __ and_(scratch, Immediate(
3964 __ add(string_length,
3967 __ add(index, Immediate(1));
3968 __ cmp(index, array_length);
3972 __ cmp(array_length, 1);
3975 __ mov(result_operand, scratch);
3978 __ bind(¬_size_one_array);
3981 result_pos = array_length;
3989 __ mov(
string, separator_operand);
3990 __ JumpIfSmi(
string, &bailout);
3993 __ and_(scratch, Immediate(
4000 __ mov(scratch, separator_operand);
4002 __ sub(string_length, scratch);
4003 __ imul(scratch, array_length_operand);
4005 __ add(string_length, scratch);
4008 __ shr(string_length, 1);
4012 __ AllocateAsciiString(result_pos, string_length, scratch,
4013 index,
string, &bailout);
4014 __ mov(result_operand, result_pos);
4018 __ mov(
string, separator_operand);
4021 __ j(
equal, &one_char_separator);
4026 __ mov(index, Immediate(0));
4027 __ jmp(&loop_1_condition);
4040 __ mov(string_length,
4042 __ shr(string_length, 1);
4045 __ CopyBytes(
string, result_pos, string_length, scratch);
4046 __ add(index, Immediate(1));
4047 __ bind(&loop_1_condition);
4048 __ cmp(index, array_length_operand);
4055 __ bind(&one_char_separator);
4058 __ mov_b(separator_operand, scratch);
4060 __ Move(index, Immediate(0));
4063 __ jmp(&loop_2_entry);
4072 __ mov_b(scratch, separator_operand);
4073 __ mov_b(Operand(result_pos, 0), scratch);
4076 __ bind(&loop_2_entry);
4081 __ mov(string_length,
4083 __ shr(string_length, 1);
4086 __ CopyBytes(
string, result_pos, string_length, scratch);
4087 __ add(index, Immediate(1));
4089 __ cmp(index, array_length_operand);
4095 __ bind(&long_separator);
4097 __ Move(index, Immediate(0));
4100 __ jmp(&loop_3_entry);
4109 __ mov(
string, separator_operand);
4110 __ mov(string_length,
4112 __ shr(string_length, 1);
4115 __ CopyBytes(
string, result_pos, string_length, scratch);
4117 __ bind(&loop_3_entry);
4122 __ mov(string_length,
4124 __ shr(string_length, 1);
4127 __ CopyBytes(
string, result_pos, string_length, scratch);
4128 __ add(index, Immediate(1));
4130 __ cmp(index, array_length_operand);
4136 __ mov(result_operand, isolate()->factory()->undefined_value());
4138 __ mov(
eax, result_operand);
4140 __ add(
esp, Immediate(3 * kPointerSize));
4143 context()->Plug(
eax);
4147 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4148 if (expr->function() !=
NULL &&
4150 Comment cmnt(masm_,
"[ InlineRuntimeCall");
4151 EmitInlineRuntimeCall(expr);
4155 Comment cmnt(masm_,
"[ CallRuntime");
4156 ZoneList<Expression*>* args = expr->arguments();
4158 if (expr->is_jsruntime()) {
4164 __ mov(edx, Operand(
esp, 0));
4165 __ mov(ecx, Immediate(expr->name()));
4169 __ push(Operand(
esp, 0));
4170 __ mov(Operand(
esp, kPointerSize),
eax);
4173 ZoneList<Expression*>* args = expr->arguments();
4174 int arg_count = args->length();
4175 for (
int i = 0; i < arg_count; i++) {
4176 VisitForStackValue(args->at(i));
4180 SetSourcePosition(expr->position());
4182 __ mov(
edi, Operand(
esp, (arg_count + 1) * kPointerSize));
4186 context()->DropAndPlug(1,
eax);
4190 int arg_count = args->length();
4191 for (
int i = 0; i < arg_count; i++) {
4192 VisitForStackValue(args->at(i));
4196 __ CallRuntime(expr->function(), arg_count);
4198 context()->Plug(
eax);
4203 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4204 switch (expr->op()) {
4205 case Token::DELETE: {
4206 Comment cmnt(masm_,
"[ UnaryOperation (DELETE)");
4207 Property*
property = expr->expression()->AsProperty();
4208 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4210 if (property !=
NULL) {
4211 VisitForStackValue(property->obj());
4212 VisitForStackValue(property->key());
4215 context()->Plug(
eax);
4216 }
else if (proxy !=
NULL) {
4217 Variable* var = proxy->var();
4221 if (var->IsUnallocated()) {
4223 __ push(Immediate(var->name()));
4226 context()->Plug(
eax);
4227 }
else if (var->IsStackAllocated() || var->IsContextSlot()) {
4231 context()->Plug(var->is_this());
4235 __ push(context_register());
4236 __ push(Immediate(var->name()));
4237 __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
4238 context()->Plug(
eax);
4243 VisitForEffect(expr->expression());
4244 context()->Plug(
true);
4250 Comment cmnt(masm_,
"[ UnaryOperation (VOID)");
4251 VisitForEffect(expr->expression());
4252 context()->Plug(isolate()->factory()->undefined_value());
4257 Comment cmnt(masm_,
"[ UnaryOperation (NOT)");
4258 if (context()->IsEffect()) {
4261 VisitForEffect(expr->expression());
4262 }
else if (context()->IsTest()) {
4263 const TestContext* test = TestContext::cast(context());
4265 VisitForControl(expr->expression(),
4266 test->false_label(),
4268 test->fall_through());
4269 context()->Plug(test->true_label(), test->false_label());
4275 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4276 Label materialize_true, materialize_false, done;
4277 VisitForControl(expr->expression(),
4281 __ bind(&materialize_true);
4282 PrepareForBailoutForId(expr->MaterializeTrueId(),
NO_REGISTERS);
4283 if (context()->IsAccumulatorValue()) {
4284 __ mov(
eax, isolate()->factory()->true_value());
4286 __ Push(isolate()->factory()->true_value());
4288 __ jmp(&done, Label::kNear);
4289 __ bind(&materialize_false);
4290 PrepareForBailoutForId(expr->MaterializeFalseId(),
NO_REGISTERS);
4291 if (context()->IsAccumulatorValue()) {
4292 __ mov(
eax, isolate()->factory()->false_value());
4294 __ Push(isolate()->factory()->false_value());
4301 case Token::TYPEOF: {
4302 Comment cmnt(masm_,
"[ UnaryOperation (TYPEOF)");
4303 { StackValueContext context(
this);
4304 VisitForTypeofValue(expr->expression());
4306 __ CallRuntime(Runtime::kTypeof, 1);
4307 context()->Plug(
eax);
4317 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4318 ASSERT(expr->expression()->IsValidLeftHandSide());
4320 Comment cmnt(masm_,
"[ CountOperation");
4321 SetSourcePosition(expr->position());
4325 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4326 LhsKind assign_type = VARIABLE;
4327 Property* prop = expr->expression()->AsProperty();
4332 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4336 if (assign_type == VARIABLE) {
4337 ASSERT(expr->expression()->AsVariableProxy()->var() !=
NULL);
4338 AccumulatorValueContext context(
this);
4339 EmitVariableLoad(expr->expression()->AsVariableProxy());
4342 if (expr->is_postfix() && !context()->IsEffect()) {
4345 if (assign_type == NAMED_PROPERTY) {
4347 VisitForAccumulatorValue(prop->obj());
4350 EmitNamedPropertyLoad(prop);
4352 VisitForStackValue(prop->obj());
4353 VisitForStackValue(prop->key());
4354 __ mov(edx, Operand(
esp, kPointerSize));
4355 __ mov(ecx, Operand(
esp, 0));
4356 EmitKeyedPropertyLoad(prop);
4362 if (assign_type == VARIABLE) {
4363 PrepareForBailout(expr->expression(),
TOS_REG);
4365 PrepareForBailoutForId(prop->LoadId(),
TOS_REG);
4369 Label done, stub_call;
4370 JumpPatchSite patch_site(masm_);
4371 if (ShouldInlineSmiCase(expr->op())) {
4373 patch_site.EmitJumpIfNotSmi(
eax, &slow, Label::kNear);
4376 if (expr->is_postfix()) {
4377 if (!context()->IsEffect()) {
4381 switch (assign_type) {
4385 case NAMED_PROPERTY:
4386 __ mov(Operand(
esp, kPointerSize),
eax);
4388 case KEYED_PROPERTY:
4389 __ mov(Operand(
esp, 2 * kPointerSize),
eax);
4395 if (expr->op() == Token::INC) {
4402 if (expr->op() == Token::INC) {
4407 __ jmp(&stub_call, Label::kNear);
4410 ToNumberStub convert_stub;
4411 __ CallStub(&convert_stub);
4414 if (expr->is_postfix()) {
4415 if (!context()->IsEffect()) {
4419 switch (assign_type) {
4423 case NAMED_PROPERTY:
4424 __ mov(Operand(
esp, kPointerSize),
eax);
4426 case KEYED_PROPERTY:
4427 __ mov(Operand(
esp, 2 * kPointerSize),
eax);
4434 SetSourcePosition(expr->position());
4437 __ bind(&stub_call);
4441 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4442 patch_site.EmitPatchInfo();
4446 switch (assign_type) {
4448 if (expr->is_postfix()) {
4450 { EffectContext context(
this);
4451 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4453 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4458 if (!context()->IsEffect()) {
4459 context()->PlugTOS();
4463 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4465 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4466 context()->Plug(
eax);
4469 case NAMED_PROPERTY: {
4470 __ mov(ecx, prop->key()->AsLiteral()->value());
4472 CallStoreIC(expr->CountStoreFeedbackId());
4473 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4474 if (expr->is_postfix()) {
4475 if (!context()->IsEffect()) {
4476 context()->PlugTOS();
4479 context()->Plug(
eax);
4483 case KEYED_PROPERTY: {
4486 Handle<Code> ic = strict_mode() ==
SLOPPY
4487 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4488 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4489 CallIC(ic, expr->CountStoreFeedbackId());
4490 PrepareForBailoutForId(expr->AssignmentId(),
TOS_REG);
4491 if (expr->is_postfix()) {
4493 if (!context()->IsEffect()) {
4494 context()->PlugTOS();
4497 context()->Plug(
eax);
4505 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4506 VariableProxy* proxy = expr->AsVariableProxy();
4507 ASSERT(!context()->IsEffect());
4508 ASSERT(!context()->IsTest());
4510 if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
4511 Comment cmnt(masm_,
"[ Global variable");
4513 __ mov(ecx, Immediate(proxy->name()));
4517 PrepareForBailout(expr,
TOS_REG);
4518 context()->Plug(
eax);
4519 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
4520 Comment cmnt(masm_,
"[ Lookup slot");
4525 EmitDynamicLookupFastCase(proxy->var(),
INSIDE_TYPEOF, &slow, &done);
4529 __ push(Immediate(proxy->name()));
4530 __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4531 PrepareForBailout(expr,
TOS_REG);
4534 context()->Plug(
eax);
4537 VisitInDuplicateContext(expr);
4542 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4543 Expression* sub_expr,
4544 Handle<String>
check) {
4545 Label materialize_true, materialize_false;
4546 Label* if_true =
NULL;
4547 Label* if_false =
NULL;
4548 Label* fall_through =
NULL;
4549 context()->PrepareTest(&materialize_true, &materialize_false,
4550 &if_true, &if_false, &fall_through);
4552 { AccumulatorValueContext context(
this);
4553 VisitForTypeofValue(sub_expr);
4555 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4557 if (check->Equals(isolate()->heap()->number_string())) {
4558 __ JumpIfSmi(
eax, if_true);
4560 isolate()->factory()->heap_number_map());
4561 Split(
equal, if_true, if_false, fall_through);
4562 }
else if (check->Equals(isolate()->heap()->string_string())) {
4563 __ JumpIfSmi(
eax, if_false);
4569 Split(
zero, if_true, if_false, fall_through);
4570 }
else if (check->Equals(isolate()->heap()->symbol_string())) {
4571 __ JumpIfSmi(
eax, if_false);
4573 Split(
equal, if_true, if_false, fall_through);
4574 }
else if (check->Equals(isolate()->heap()->boolean_string())) {
4575 __ cmp(
eax, isolate()->factory()->true_value());
4577 __ cmp(
eax, isolate()->factory()->false_value());
4578 Split(
equal, if_true, if_false, fall_through);
4579 }
else if (FLAG_harmony_typeof &&
4580 check->Equals(isolate()->heap()->null_string())) {
4581 __ cmp(
eax, isolate()->factory()->null_value());
4582 Split(
equal, if_true, if_false, fall_through);
4583 }
else if (check->Equals(isolate()->heap()->undefined_string())) {
4584 __ cmp(
eax, isolate()->factory()->undefined_value());
4586 __ JumpIfSmi(
eax, if_false);
4591 Split(
not_zero, if_true, if_false, fall_through);
4592 }
else if (check->Equals(isolate()->heap()->function_string())) {
4593 __ JumpIfSmi(
eax, if_false);
4598 Split(
equal, if_true, if_false, fall_through);
4599 }
else if (check->Equals(isolate()->heap()->object_string())) {
4600 __ JumpIfSmi(
eax, if_false);
4601 if (!FLAG_harmony_typeof) {
4602 __ cmp(
eax, isolate()->factory()->null_value());
4612 Split(
zero, if_true, if_false, fall_through);
4614 if (if_false != fall_through)
__ jmp(if_false);
4616 context()->Plug(if_true, if_false);
4620 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4621 Comment cmnt(masm_,
"[ CompareOperation");
4622 SetSourcePosition(expr->position());
4626 if (TryLiteralCompare(expr))
return;
4630 Label materialize_true, materialize_false;
4631 Label* if_true =
NULL;
4632 Label* if_false =
NULL;
4633 Label* fall_through =
NULL;
4634 context()->PrepareTest(&materialize_true, &materialize_false,
4635 &if_true, &if_false, &fall_through);
4638 VisitForStackValue(expr->left());
4641 VisitForStackValue(expr->right());
4643 PrepareForBailoutBeforeSplit(expr,
false,
NULL,
NULL);
4644 __ cmp(
eax, isolate()->factory()->true_value());
4645 Split(
equal, if_true, if_false, fall_through);
4648 case Token::INSTANCEOF: {
4649 VisitForStackValue(expr->right());
4652 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4655 Split(
zero, if_true, if_false, fall_through);
4660 VisitForAccumulatorValue(expr->right());
4664 bool inline_smi_code = ShouldInlineSmiCase(op);
4665 JumpPatchSite patch_site(masm_);
4666 if (inline_smi_code) {
4670 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4672 Split(cc, if_true, if_false,
NULL);
4673 __ bind(&slow_case);
4677 SetSourcePosition(expr->position());
4679 CallIC(ic, expr->CompareOperationFeedbackId());
4680 patch_site.EmitPatchInfo();
4682 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4684 Split(cc, if_true, if_false, fall_through);
4690 context()->Plug(if_true, if_false);
4694 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4695 Expression* sub_expr,
4697 Label materialize_true, materialize_false;
4698 Label* if_true =
NULL;
4699 Label* if_false =
NULL;
4700 Label* fall_through =
NULL;
4701 context()->PrepareTest(&materialize_true, &materialize_false,
4702 &if_true, &if_false, &fall_through);
4704 VisitForAccumulatorValue(sub_expr);
4705 PrepareForBailoutBeforeSplit(expr,
true, if_true, if_false);
4708 ? isolate()->factory()->null_value()
4709 : isolate()->factory()->undefined_value();
4710 if (expr->op() == Token::EQ_STRICT) {
4711 __ cmp(
eax, nil_value);
4712 Split(
equal, if_true, if_false, fall_through);
4715 CallIC(ic, expr->CompareOperationFeedbackId());
4717 Split(
not_zero, if_true, if_false, fall_through);
4719 context()->Plug(if_true, if_false);
4723 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4725 context()->Plug(
eax);
4729 Register FullCodeGenerator::result_register() {
4734 Register FullCodeGenerator::context_register() {
4739 void FullCodeGenerator::StoreToFrameField(
int frame_offset, Register value) {
4741 __ mov(Operand(
ebp, frame_offset), value);
4745 void FullCodeGenerator::LoadContextField(Register dst,
int context_index) {
4750 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4752 if (declaration_scope->is_global_scope() ||
4753 declaration_scope->is_module_scope()) {
4759 }
else if (declaration_scope->is_eval_scope()) {
4765 ASSERT(declaration_scope->is_function_scope());
4774 void FullCodeGenerator::EnterFinallyBlock() {
4776 ASSERT(!result_register().is(edx));
4778 __ sub(edx, Immediate(masm_->CodeObject()));
4785 __ push(result_register());
4788 ExternalReference pending_message_obj =
4789 ExternalReference::address_of_pending_message_obj(isolate());
4790 __ mov(edx, Operand::StaticVariable(pending_message_obj));
4793 ExternalReference has_pending_message =
4794 ExternalReference::address_of_has_pending_message(isolate());
4795 __ mov(edx, Operand::StaticVariable(has_pending_message));
4799 ExternalReference pending_message_script =
4800 ExternalReference::address_of_pending_message_script(isolate());
4801 __ mov(edx, Operand::StaticVariable(pending_message_script));
4806 void FullCodeGenerator::ExitFinallyBlock() {
4807 ASSERT(!result_register().is(edx));
4810 ExternalReference pending_message_script =
4811 ExternalReference::address_of_pending_message_script(isolate());
4812 __ mov(Operand::StaticVariable(pending_message_script), edx);
4816 ExternalReference has_pending_message =
4817 ExternalReference::address_of_has_pending_message(isolate());
4818 __ mov(Operand::StaticVariable(has_pending_message), edx);
4821 ExternalReference pending_message_obj =
4822 ExternalReference::address_of_pending_message_obj(isolate());
4823 __ mov(Operand::StaticVariable(pending_message_obj), edx);
4826 __ pop(result_register());
4831 __ add(edx, Immediate(masm_->CodeObject()));
4838 #define __ ACCESS_MASM(masm())
4842 int* context_length) {
4849 __ Drop(*stack_depth);
4850 if (*context_length > 0) {
4856 __ call(finally_entry_);
4859 *context_length = 0;
4866 static const byte kJnsInstruction = 0x79;
4867 static const byte kJnsOffset = 0x11;
4868 static const byte kNopByteOne = 0x66;
4869 static const byte kNopByteTwo = 0x90;
4871 static const byte kCallInstruction = 0xe8;
4877 BackEdgeState target_state,
4878 Code* replacement_code) {
4880 Address jns_instr_address = call_target_address - 3;
4881 Address jns_offset_address = call_target_address - 2;
4883 switch (target_state) {
4889 *jns_instr_address = kJnsInstruction;
4890 *jns_offset_address = kJnsOffset;
4899 *jns_instr_address = kNopByteOne;
4900 *jns_offset_address = kNopByteTwo;
4906 replacement_code->entry());
4907 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4908 unoptimized_code, call_target_address, replacement_code);
4914 Code* unoptimized_code,
4917 Address jns_instr_address = call_target_address - 3;
4918 ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
4920 if (*jns_instr_address == kJnsInstruction) {
4921 ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
4922 ASSERT_EQ(isolate->builtins()->InterruptCheck()->entry(),
4928 ASSERT_EQ(kNopByteOne, *jns_instr_address);
4929 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
4932 isolate->builtins()->OnStackReplacement()->entry()) {
4936 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4945 #endif // V8_TARGET_ARCH_IA32
static const int kFunctionOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
static const int kLengthOffset
static const int kHashFieldOffset
static const int kBitFieldOffset
Scope * DeclarationScope()
Isolate * isolate() const
const intptr_t kSmiTagMask
static const int kForInFastCaseMarker
VariableDeclaration * function() const
static const int kCodeEntryOffset
static const int kValueOffset
static int SlotOffset(int index)
static const int kFingerOffset
static const int kBuiltinsOffset
static Handle< Code > GetUninitialized(Isolate *isolate)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static const int kEnumCacheOffset
static String * cast(Object *obj)
const uint32_t kTwoByteStringTag
static Smi * FromInt(int value)
static const int kResultValuePropertyOffset
bool IsFastObjectElementsKind(ElementsKind kind)
static TypeFeedbackId None()
static Handle< Code > GetUninitialized(Isolate *isolate, Token::Value op)
static const int kGlobalReceiverOffset
Scope * outer_scope() const
static const int kGeneratorClosed
static const unsigned int kContainsCachedArrayIndexMask
static bool IsSupported(CpuFeature f)
static const int kForInSlowCaseMarker
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
static const int kResultDonePropertyOffset
#define ASSERT(condition)
static const int kContextOffset
static const int kMaxBackEdgeWeight
static const int kInObjectFieldCount
static bool IsCompileTimeValue(Expression *expression)
#define POINTER_SIZE_ALIGN(value)
const uint32_t kStringRepresentationMask
static const int kReceiverOffset
MemOperand GlobalObjectOperand()
static const int kCallerFPOffset
static const int kInstanceClassNameOffset
bool IsOptimizable() const
Variable * parameter(int index) const
MemOperand ContextOperand(Register context, int index)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
kInstanceClassNameOffset flag
static const int kLiteralsOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static Condition ComputeCondition(Token::Value op)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
static const int kLengthOffset
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Variable * arguments() const
static const int kFirstOffset
static bool IsValid(intptr_t value)
static BailoutId Declarations()
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
void check(i::Vector< const uint8_t > string)
static const int kJSReturnSequenceLength
static const int kStringWrapperSafeForDefaultValueOf
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
bool ShouldSelfOptimize()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kCallerSPOffset
static const int kCacheStampOffset
static const int kDescriptorSize
static const int kPropertiesOffset
int num_parameters() const
static const int kMarkerOffset
static const int kExpressionsOffset
static const int kHeaderSize
static const int kElementsOffset
static BailoutId FunctionEntry()
Operand FixedArrayElementOperand(Register array, Register index_as_smi, int additional_offset=0)
const uint32_t kStringTag
bool IsDeclaredVariableMode(VariableMode mode)
friend class NestedStatement
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
static const int kContextOffset
static const int kMapOffset
static const int kValueOffset
static const int kEnumCacheBridgeCacheOffset
const uint32_t kIsNotStringMask
void VisitIllegalRedeclaration(AstVisitor *visitor)
static const int kLengthOffset
static Handle< Object > UninitializedSentinel(Isolate *isolate)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
static const int kContextOffset
static const int kFunctionOffset
static const int kFormalParameterCountOffset
void CopyBytes(uint8_t *target, uint8_t *source)
static const int kGeneratorExecuting
Condition NegateCondition(Condition cond)
static bool ShouldGenerateLog(Isolate *isolate, Expression *type)
#define ASSERT_EQ(v1, v2)
static const int kContinuationOffset
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
static const int kConstructorOffset
const uint32_t kOneByteStringTag
#define ASSERT_NE(v1, v2)
static const int kIsUndetectable
static const int kMaximumClonedProperties
static const int kPrototypeOffset
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target)
static const int kValueOffset
bool IsImmutableVariableMode(VariableMode mode)
static const int kNativeContextOffset
void AddNoFrameRange(int from, int to)
static const int kSharedFunctionInfoOffset
static FixedArrayBase * cast(Object *object)
static const int kLocal0Offset
static const int kBitField2Offset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
static const int kExponentOffset
static const int kMaximumClonedLength
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset
static const int kOperandStackOffset
static const int kMantissaOffset