30 #if V8_TARGET_ARCH_IA32
41 #define __ ACCESS_MASM(masm)
44 void Builtins::Generate_Adaptor(MacroAssembler* masm,
60 int num_extra_args = 0;
63 Register scratch =
ebx;
73 __ add(
eax, Immediate(num_extra_args + 1));
74 __ JumpToExternalReference(ExternalReference(
id, masm->isolate()));
78 static void CallRuntimePassFunction(
86 __ CallRuntime(function_id, 1);
92 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
100 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
106 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
113 ExternalReference stack_limit =
114 ExternalReference::address_of_stack_limit(masm->isolate());
115 __ cmp(
esp, Operand::StaticVariable(stack_limit));
118 CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode);
119 GenerateTailCallToReturnedCode(masm);
122 GenerateTailCallToSharedCode(masm);
126 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
127 bool is_api_function,
128 bool count_constructions,
129 bool create_memento) {
137 ASSERT(!is_api_function || !count_constructions);
140 ASSERT(!is_api_function || !create_memento);
143 ASSERT(!count_constructions || !create_memento);
147 FrameScope scope(masm, StackFrame::CONSTRUCT);
149 if (create_memento) {
150 __ AssertUndefinedOrAllocationSite(
ebx);
163 Label rt_call, allocated;
164 if (FLAG_inline_new) {
165 Label undo_allocation;
166 #ifdef ENABLE_DEBUGGER_SUPPORT
167 ExternalReference debug_step_in_fp =
168 ExternalReference::debug_step_in_fp_address(masm->isolate());
169 __ cmp(Operand::StaticVariable(debug_step_in_fp), Immediate(0));
178 __ JumpIfSmi(
eax, &rt_call);
192 if (count_constructions) {
205 __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
218 if (create_memento) {
224 Factory* factory = masm->isolate()->factory();
231 __ mov(
ecx, factory->empty_fixed_array());
239 __ mov(
edx, factory->undefined_value());
240 if (count_constructions) {
246 if (FLAG_debug_code) {
249 kUnexpectedNumberOfPreAllocatedPropertyFields);
252 __ mov(
edx, factory->one_pointer_filler_map());
254 }
else if (create_memento) {
260 Handle<Map> allocation_memento_map = factory->allocation_memento_map();
262 allocation_memento_map);
295 __ Assert(
positive, kPropertyAllocationCountFailed);
317 __ mov(
eax, factory->fixed_array_map());
327 __ mov(
edx, factory->undefined_value());
354 __ bind(&undo_allocation);
355 __ UndoAllocationInNewSpace(
ebx);
361 if (create_memento) {
372 if (create_memento) {
373 __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2);
375 __ CallRuntime(Runtime::kHiddenNewObject, 1);
382 Label count_incremented;
383 if (create_memento) {
384 __ jmp(&count_incremented);
391 if (create_memento) {
393 __ cmp(
ecx, masm->isolate()->factory()->undefined_value());
394 __ j(
equal, &count_incremented);
399 __ bind(&count_incremented);
429 if (is_api_function) {
432 masm->isolate()->builtins()->HandleApiCallConstruct();
433 __ call(code, RelocInfo::CODE_TARGET);
435 ParameterCount actual(
eax);
441 if (!is_api_function && !count_constructions) {
442 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
451 Label use_receiver, exit;
454 __ JumpIfSmi(
eax, &use_receiver);
463 __ bind(&use_receiver);
478 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
483 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
484 Generate_JSConstructStubHelper(masm,
false,
true,
false);
488 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
489 Generate_JSConstructStubHelper(masm,
false,
false, FLAG_pretenuring_call_new);
493 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
494 Generate_JSConstructStubHelper(masm,
true,
false,
false);
498 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
503 __ Move(
esi, Immediate(0));
525 __ Move(
ecx, Immediate(0));
529 __ push(Operand(
edx, 0));
542 __ mov(
ebx, masm->isolate()->factory()->undefined_value());
546 ParameterCount actual(
eax);
559 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
560 Generate_JSEntryTrampolineHelper(masm,
false);
564 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
565 Generate_JSEntryTrampolineHelper(masm,
true);
569 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
570 CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized);
571 GenerateTailCallToReturnedCode(masm);
576 static void CallCompileOptimized(MacroAssembler* masm,
bool concurrent) {
583 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
585 __ CallRuntime(Runtime::kHiddenCompileOptimized, 2);
591 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
592 CallCompileOptimized(masm,
false);
593 GenerateTailCallToReturnedCode(masm);
597 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
598 CallCompileOptimized(masm,
true);
599 GenerateTailCallToReturnedCode(masm);
603 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
612 __ sub(Operand(
esp, 0), Immediate(5));
616 FrameScope scope(masm, StackFrame::MANUAL);
617 __ PrepareCallCFunction(2,
ebx);
619 Immediate(ExternalReference::isolate_address(masm->isolate())));
622 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
628 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
629 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
630 MacroAssembler* masm) { \
631 GenerateMakeCodeYoungAgainCommon(masm); \
633 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
634 MacroAssembler* masm) { \
635 GenerateMakeCodeYoungAgainCommon(masm); \
638 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
641 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
650 FrameScope scope(masm, StackFrame::MANUAL);
651 __ PrepareCallCFunction(2,
ebx);
653 Immediate(ExternalReference::isolate_address(masm->isolate())));
656 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
674 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
675 GenerateMakeCodeYoungAgainCommon(masm);
679 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
689 __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles);
699 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
704 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
706 PlatformFeatureScope sse2(
SSE2);
707 Generate_NotifyStubFailureHelper(masm,
kSaveFPRegs);
709 Generate_NotifyStubFailureHelper(masm,
kSaveFPRegs);
714 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
721 __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1);
731 Label not_no_registers, not_tos_eax;
736 __ bind(¬_no_registers);
742 __ bind(¬_tos_eax);
743 __ Abort(kNoCasesLeft);
747 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
752 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
757 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
762 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
763 Factory* factory = masm->isolate()->factory();
770 __ push(Immediate(factory->undefined_value()));
778 Label slow, non_function;
781 __ JumpIfSmi(
edi, &non_function);
787 Label shift_arguments;
788 __ Move(
edx, Immediate(0));
789 { Label convert_to_object, use_global_receiver, patch_receiver;
809 __ JumpIfSmi(
ebx, &convert_to_object);
810 __ cmp(
ebx, factory->null_value());
811 __ j(
equal, &use_global_receiver);
812 __ cmp(
ebx, factory->undefined_value());
813 __ j(
equal, &use_global_receiver);
818 __ bind(&convert_to_object);
828 __ Move(
edx, Immediate(0));
836 __ jmp(&patch_receiver);
838 __ bind(&use_global_receiver);
843 __ bind(&patch_receiver);
846 __ jmp(&shift_arguments);
851 __ Move(
edx, Immediate(1));
854 __ bind(&non_function);
855 __ Move(
edx, Immediate(2));
866 __ bind(&shift_arguments);
880 { Label
function, non_proxy;
883 __ Move(
ebx, Immediate(0));
884 __ cmp(
edx, Immediate(1));
891 __ GetBuiltinEntry(
edx, Builtins::CALL_FUNCTION_PROXY);
892 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
893 RelocInfo::CODE_TARGET);
896 __ GetBuiltinEntry(
edx, Builtins::CALL_NON_FUNCTION);
897 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
898 RelocInfo::CODE_TARGET);
912 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline());
914 ParameterCount expected(0);
919 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
926 __ push(Operand(
ebp, kFunctionOffset));
927 __ push(Operand(
ebp, kArgumentsOffset));
934 ExternalReference real_stack_limit =
935 ExternalReference::address_of_real_stack_limit(masm->isolate());
936 __ mov(
edi, Operand::StaticVariable(real_stack_limit));
957 const int kLimitOffset =
959 const int kIndexOffset = kLimitOffset - 1 *
kPointerSize;
961 __ push(Immediate(0));
964 __ mov(
ebx, Operand(
ebp, kReceiverOffset));
967 Label push_receiver, use_global_receiver;
968 __ mov(
edi, Operand(
ebp, kFunctionOffset));
977 Label call_to_object;
983 Factory* factory = masm->isolate()->factory();
993 __ JumpIfSmi(
ebx, &call_to_object);
994 __ cmp(
ebx, factory->null_value());
995 __ j(
equal, &use_global_receiver);
996 __ cmp(
ebx, factory->undefined_value());
997 __ j(
equal, &use_global_receiver);
1002 __ bind(&call_to_object);
1006 __ jmp(&push_receiver);
1008 __ bind(&use_global_receiver);
1014 __ bind(&push_receiver);
1019 __ mov(
ecx, Operand(
ebp, kIndexOffset));
1022 __ mov(
edx, Operand(
ebp, kArgumentsOffset));
1025 Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Initialize();
1026 __ call(ic, RelocInfo::CODE_TARGET);
1036 __ mov(
ecx, Operand(
ebp, kIndexOffset));
1038 __ mov(Operand(
ebp, kIndexOffset),
ecx);
1041 __ cmp(
ecx, Operand(
ebp, kLimitOffset));
1047 ParameterCount actual(
eax);
1049 __ mov(
edi, Operand(
ebp, kFunctionOffset));
1054 frame_scope.GenerateLeaveFrame();
1055 __ ret(3 * kPointerSize);
1058 __ bind(&call_proxy);
1061 __ Move(
ebx, Immediate(0));
1062 __ GetBuiltinEntry(
edx, Builtins::CALL_FUNCTION_PROXY);
1063 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1064 RelocInfo::CODE_TARGET);
1068 __ ret(3 * kPointerSize);
1072 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1078 Label generic_array_code;
1083 if (FLAG_debug_code) {
1088 __ Assert(
not_zero, kUnexpectedInitialMapForInternalArrayFunction);
1090 __ Assert(
equal, kUnexpectedInitialMapForInternalArrayFunction);
1096 InternalArrayConstructorStub stub(masm->isolate());
1097 __ TailCallStub(&stub);
1101 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1107 Label generic_array_code;
1112 if (FLAG_debug_code) {
1117 __ Assert(
not_zero, kUnexpectedInitialMapForArrayFunction);
1119 __ Assert(
equal, kUnexpectedInitialMapForArrayFunction);
1124 __ mov(
ebx, masm->isolate()->factory()->undefined_value());
1125 ArrayConstructorStub stub(masm->isolate());
1126 __ TailCallStub(&stub);
1130 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1138 Counters* counters = masm->isolate()->counters();
1139 __ IncrementCounter(counters->string_ctor_calls(), 1);
1141 if (FLAG_debug_code) {
1144 __ Assert(
equal, kUnexpectedStringFunction);
1151 __ j(
zero, &no_arguments);
1159 Label not_cached, argument_is_string;
1160 __ LookupNumberStringCache(
eax,
1165 __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1166 __ bind(&argument_is_string);
1183 __ LoadGlobalFunctionInitialMap(
edi,
ecx);
1184 if (FLAG_debug_code) {
1187 __ Assert(
equal, kUnexpectedStringWrapperInstanceSize);
1189 __ Assert(
equal, kUnexpectedUnusedPropertiesOfStringWrapper);
1194 Factory* factory = masm->isolate()->factory();
1195 __ Move(
ecx, Immediate(factory->empty_fixed_array()));
1210 Label convert_argument;
1211 __ bind(¬_cached);
1213 __ JumpIfSmi(
eax, &convert_argument);
1217 __ IncrementCounter(counters->string_ctor_string_value(), 1);
1218 __ jmp(&argument_is_string);
1221 __ bind(&convert_argument);
1222 __ IncrementCounter(counters->string_ctor_conversions(), 1);
1231 __ jmp(&argument_is_string);
1235 __ bind(&no_arguments);
1236 __ Move(
ebx, Immediate(factory->empty_string()));
1238 __ lea(
esp, Operand(
esp, kPointerSize));
1240 __ jmp(&argument_is_string);
1244 __ bind(&gc_required);
1245 __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1249 __ CallRuntime(Runtime::kNewStringWrapper, 1);
1255 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1274 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1289 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1296 Label invoke, dont_adapt_arguments;
1297 __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
1299 Label enough, too_few;
1304 __ j(
equal, &dont_adapt_arguments);
1308 EnterArgumentsAdaptorFrame(masm);
1318 __ push(Operand(
eax, 0));
1319 __ sub(
eax, Immediate(kPointerSize));
1327 EnterArgumentsAdaptorFrame(masm);
1336 __ sub(
eax, Immediate(1));
1341 __ push(Operand(
edi, 0));
1342 __ sub(
edi, Immediate(kPointerSize));
1350 __ push(Immediate(masm->isolate()->factory()->undefined_value()));
1362 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1365 LeaveArgumentsAdaptorFrame(masm);
1371 __ bind(&dont_adapt_arguments);
1376 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1383 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1388 __ cmp(
eax, Immediate(0));
1413 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1416 ExternalReference stack_limit =
1417 ExternalReference::address_of_stack_limit(masm->isolate());
1418 __ cmp(
esp, Operand::StaticVariable(stack_limit));
1422 __ CallRuntime(Runtime::kHiddenStackGuard, 0);
1424 __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
1425 RelocInfo::CODE_TARGET);
1435 #endif // V8_TARGET_ARCH_IA32
static const int kLengthOffset
const intptr_t kSmiTagMask
static const int kCodeOffset
static const int kCodeEntryOffset
static const int kPrototypeOrInitialMapOffset
static int SlotOffset(int index)
static Smi * FromInt(int value)
static const int kGlobalReceiverOffset
static const int kConstructionCountOffset
static const int kDeoptimizationDataOffset
static const int kNativeByteOffset
static const int kStrictModeBitWithinByte
#define ASSERT(condition)
static const int kContextOffset
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static const int kUnusedPropertyFieldsOffset
static const int kContextOffset
static const int kPretenureCreateCountOffset
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kDontAdaptArgumentsSentinel
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kCallerSPOffset
static const int kPropertiesOffset
static const int kInObjectPropertiesOffset
static const int kExpressionsOffset
static const int kElementsOffset
static const int kNativeBitWithinByte
static const int kArgcOffset
static const int kFunctionArgOffset
static int OffsetOfElementAt(int index)
static const int kHeaderSize
static const int kMapOffset
static const int kCallInstructionLength
static const int kLengthOffset
static const int kFunctionOffset
static const int kReceiverArgOffset
static const int kFormalParameterCountOffset
static const int kStrictModeByteOffset
static const int kHeaderSize
static const int kAllocationSiteOffset
Condition NegateCondition(Condition cond)
static const int kArgvOffset
static const int kHeaderSize
static const int kValueOffset
static const int kSharedFunctionInfoOffset
static const int kPreAllocatedPropertyFieldsOffset