30 #if V8_TARGET_ARCH_IA32
39 #define __ ACCESS_MASM(masm)
42 static void ProbeTable(Isolate* isolate,
51 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
52 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
53 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
58 __ lea(offset, Operand(offset, offset,
times_2, 0));
60 if (extra.is_valid()) {
62 __ mov(extra, Operand::StaticArray(offset,
times_1, value_offset));
65 __ cmp(name, Operand::StaticArray(offset,
times_1, key_offset));
69 __ mov(offset, Operand::StaticArray(offset,
times_1, map_offset));
76 __ cmp(offset, flags);
97 __ cmp(name, Operand::StaticArray(offset,
times_1, key_offset));
101 __ mov(offset, Operand::StaticArray(offset,
times_1, map_offset));
106 __ mov(offset, Operand(
esp, 0));
109 __ mov(offset, Operand::StaticArray(offset,
times_1, value_offset));
114 __ cmp(offset, flags);
127 __ mov(offset, Operand::StaticArray(offset,
times_1, value_offset));
140 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
146 ASSERT(name->IsUniqueName());
147 ASSERT(!receiver.is(scratch0));
148 Counters* counters = masm->isolate()->counters();
149 __ IncrementCounter(counters->negative_lookups(), 1);
150 __ IncrementCounter(counters->negative_lookups_miss(), 1);
154 const int kInterceptorOrAccessCheckNeededMask =
159 kInterceptorOrAccessCheckNeededMask);
167 Register properties = scratch0;
172 Immediate(masm->isolate()->factory()->hash_table_map()));
183 __ DecrementCounter(counters->negative_lookups_miss(), 1);
199 ASSERT(
sizeof(Entry) == 12);
205 ASSERT(!scratch.is(receiver));
206 ASSERT(!scratch.is(name));
207 ASSERT(!extra.is(receiver));
209 ASSERT(!extra.is(scratch));
216 Register offset = scratch;
219 Counters* counters = masm->isolate()->counters();
220 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
223 __ JumpIfSmi(receiver, &miss);
228 __ xor_(offset, flags);
237 ProbeTable(
isolate(), masm, flags,
kPrimary, name, receiver, offset, extra);
242 __ xor_(offset, flags);
244 __ sub(offset, name);
245 __ add(offset, Immediate(flags));
255 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
259 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
261 Register prototype) {
262 __ LoadGlobalFunction(index, prototype);
263 __ LoadGlobalFunctionInitialMap(prototype, prototype);
269 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
270 MacroAssembler* masm,
275 Handle<JSFunction>
function(
278 Register scratch = prototype;
280 __ mov(scratch, Operand(
esi, offset));
286 __ Move(prototype, Immediate(Handle<Map>(function->initial_map())));
292 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
297 __ JumpIfSmi(receiver, miss_label);
309 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
314 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
315 __ mov(
eax, scratch1);
320 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
325 Representation representation) {
326 ASSERT(!representation.IsDouble());
338 static void PushInterceptorArguments(MacroAssembler* masm,
342 Handle<JSObject> holder_obj) {
349 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
350 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
351 Register scratch =
name;
352 __ mov(scratch, Immediate(interceptor));
359 static void CompileCallLoadPropertyWithInterceptor(
360 MacroAssembler* masm,
364 Handle<JSObject> holder_obj,
366 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
367 __ CallExternalReference(
368 ExternalReference(IC_Utility(
id), masm->isolate()),
377 void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
378 const CallOptimization& optimization,
379 Handle<Map> receiver_map,
390 for (
int i = 0; i < argc; i++) {
391 Register arg = values[argc-1-i];
392 ASSERT(!receiver.is(arg));
393 ASSERT(!scratch_in.is(arg));
398 ASSERT(optimization.is_simple_api_call());
401 Register callee =
eax;
402 Register call_data =
ebx;
403 Register holder =
ecx;
404 Register api_function_address =
edx;
405 Register scratch =
edi;
408 CallOptimization::HolderLookup holder_lookup;
409 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
412 switch (holder_lookup) {
413 case CallOptimization::kHolderIsReceiver:
414 __ Move(holder, receiver);
416 case CallOptimization::kHolderFound:
417 __ LoadHeapObject(holder, api_holder);
419 case CallOptimization::kHolderNotFound:
424 Isolate* isolate = masm->isolate();
425 Handle<JSFunction>
function = optimization.constant_function();
426 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
427 Handle<Object> call_data_obj(api_call_info->data(), isolate);
430 __ LoadHeapObject(callee,
function);
432 bool call_data_undefined =
false;
434 if (isolate->heap()->InNewSpace(*call_data_obj)) {
435 __ mov(scratch, api_call_info);
437 }
else if (call_data_obj->IsUndefined()) {
438 call_data_undefined =
true;
439 __ mov(call_data, Immediate(isolate->factory()->undefined_value()));
441 __ mov(call_data, call_data_obj);
445 Address function_address = v8::ToCData<Address>(api_call_info->callback());
446 __ mov(api_function_address, Immediate(function_address));
449 CallApiFunctionStub stub(is_store, call_data_undefined, argc);
450 __ TailCallStub(&stub);
457 if (!label->is_unused()) {
459 __ mov(this->
name(), Immediate(name));
467 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
468 Handle<JSGlobalObject> global,
472 Handle<PropertyCell> cell =
474 ASSERT(cell->value()->IsTheHole());
475 Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value();
477 __ mov(scratch, Immediate(cell));
479 Immediate(the_hole));
481 __ cmp(Operand::ForCell(cell), Immediate(the_hole));
488 MacroAssembler* masm,
489 Handle<JSObject> holder,
493 if (holder->IsJSGlobalObject()) {
494 GenerateCheckPropertyCell(
495 masm, Handle<JSGlobalObject>::cast(holder), name,
scratch1(), miss);
496 }
else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
497 GenerateDictionaryNegativeLookup(
506 Handle<JSObject>
object,
507 LookupResult* lookup,
508 Handle<Map> transition,
510 Register receiver_reg,
511 Register storage_reg,
518 int descriptor = transition->LastAdded();
519 DescriptorArray* descriptors = transition->instance_descriptors();
520 PropertyDetails details = descriptors->GetDetails(descriptor);
521 Representation representation = details.representation();
522 ASSERT(!representation.IsNone());
525 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
526 __ CmpObject(value_reg, constant);
528 }
else if (representation.IsSmi()) {
529 __ JumpIfNotSmi(value_reg, miss_label);
530 }
else if (representation.IsHeapObject()) {
531 __ JumpIfSmi(value_reg, miss_label);
532 }
else if (representation.IsDouble()) {
533 Label do_store, heap_number;
534 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow);
536 __ JumpIfNotSmi(value_reg, &heap_number);
537 __ SmiUntag(value_reg);
539 CpuFeatureScope use_sse2(masm,
SSE2);
540 __ Cvtsi2sd(
xmm0, value_reg);
543 __ fild_s(Operand(
esp, 0));
546 __ SmiTag(value_reg);
549 __ bind(&heap_number);
550 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
553 CpuFeatureScope use_sse2(masm,
SSE2);
561 CpuFeatureScope use_sse2(masm,
SSE2);
570 ASSERT(object->IsJSGlobalProxy() || !
object->IsAccessCheckNeeded());
573 if (details.type() ==
FIELD &&
574 object->map()->unused_property_fields() == 0) {
578 __ push(receiver_reg);
579 __ push(Immediate(transition));
582 __ TailCallExternalReference(
583 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
591 __ mov(scratch1, Immediate(transition));
595 __ RecordWriteField(receiver_reg,
609 int index = transition->instance_descriptors()->GetFieldIndex(
610 transition->LastAdded());
615 index -=
object->map()->inobject_properties();
617 SmiCheck smi_check = representation.IsTagged()
622 int offset =
object->map()->instance_size() + (index *
kPointerSize);
623 if (representation.IsDouble()) {
629 if (!representation.IsSmi()) {
631 if (!representation.IsDouble()) {
632 __ mov(storage_reg, value_reg);
634 __ RecordWriteField(receiver_reg,
647 if (representation.IsDouble()) {
653 if (!representation.IsSmi()) {
655 if (!representation.IsDouble()) {
656 __ mov(storage_reg, value_reg);
658 __ RecordWriteField(scratch1,
677 Handle<JSObject>
object,
678 LookupResult* lookup,
679 Register receiver_reg,
687 ASSERT(object->IsJSGlobalProxy() || !
object->IsAccessCheckNeeded());
689 int index = lookup->GetFieldIndex().field_index();
694 index -=
object->map()->inobject_properties();
696 Representation representation = lookup->representation();
697 ASSERT(!representation.IsNone());
698 if (representation.IsSmi()) {
699 __ JumpIfNotSmi(value_reg, miss_label);
700 }
else if (representation.IsHeapObject()) {
701 __ JumpIfSmi(value_reg, miss_label);
702 }
else if (representation.IsDouble()) {
705 int offset =
object->map()->instance_size() + (index *
kPointerSize);
714 Label do_store, heap_number;
715 __ JumpIfNotSmi(value_reg, &heap_number);
716 __ SmiUntag(value_reg);
718 CpuFeatureScope use_sse2(masm,
SSE2);
719 __ Cvtsi2sd(
xmm0, value_reg);
722 __ fild_s(Operand(
esp, 0));
725 __ SmiTag(value_reg);
727 __ bind(&heap_number);
728 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
731 CpuFeatureScope use_sse2(masm,
SSE2);
738 CpuFeatureScope use_sse2(masm,
SSE2);
749 ASSERT(!representation.IsDouble());
751 SmiCheck smi_check = representation.IsTagged()
755 int offset =
object->map()->instance_size() + (index *
kPointerSize);
758 if (!representation.IsSmi()) {
761 __ mov(name_reg, value_reg);
762 __ RecordWriteField(receiver_reg,
777 if (!representation.IsSmi()) {
780 __ mov(name_reg, value_reg);
781 __ RecordWriteField(scratch1,
797 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code>
code) {
798 __ jmp(code, RelocInfo::CODE_TARGET);
803 #define __ ACCESS_MASM(masm())
806 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
808 Handle<JSObject> holder,
818 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
819 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
820 && !scratch2.is(scratch1));
823 Register reg = object_reg;
827 if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
829 Handle<Map> current_map = receiver_map;
830 Handle<Map> holder_map(holder->map());
833 while (!current_map.is_identical_to(holder_map)) {
838 ASSERT(current_map->IsJSGlobalProxyMap() ||
839 !current_map->is_access_check_needed());
842 if (current_map->is_dictionary_map() &&
843 !current_map->IsJSGlobalObjectMap() &&
844 !current_map->IsJSGlobalProxyMap()) {
845 if (!name->IsUniqueName()) {
849 ASSERT(current.is_null() ||
850 current->property_dictionary()->FindEntry(*name) ==
853 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
860 bool in_new_space = heap()->InNewSpace(*prototype);
868 if (current_map->IsJSGlobalProxyMap()) {
869 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
870 }
else if (current_map->IsJSGlobalObjectMap()) {
871 GenerateCheckPropertyCell(
872 masm(), Handle<JSGlobalObject>::cast(current), name,
889 __ mov(reg, prototype);
895 current_map =
handle(current->map());
899 LOG(isolate(), IntEvent(
"check-maps-depth", depth + 1));
907 ASSERT(current_map->IsJSGlobalProxyMap() ||
908 !current_map->is_access_check_needed());
909 if (current_map->IsJSGlobalProxyMap()) {
910 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
919 if (!miss->is_unused()) {
930 if (!miss->is_unused()) {
941 Handle<HeapType> type,
943 Handle<JSObject> holder,
945 Handle<Object> callback) {
950 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
951 ASSERT(!reg.is(scratch2()));
954 bool must_preserve_dictionary_reg = reg.is(dictionary);
957 if (must_preserve_dictionary_reg) {
963 Label probe_done, pop_and_miss;
971 __ bind(&pop_and_miss);
972 if (must_preserve_dictionary_reg) {
976 __ bind(&probe_done);
981 const int kElementsStartOffset =
984 const int kValueOffset = kElementsStartOffset +
kPointerSize;
987 if (must_preserve_dictionary_reg) {
1000 Handle<JSObject> holder,
1001 PropertyIndex field,
1002 Representation representation) {
1003 if (!reg.is(receiver()))
__ mov(
receiver(), reg);
1004 if (
kind() == Code::LOAD_IC) {
1005 LoadFieldStub stub(field.is_inobject(holder),
1006 field.translate(holder),
1008 GenerateTailCall(masm(), stub.GetCode(isolate()));
1010 KeyedLoadFieldStub stub(field.is_inobject(holder),
1011 field.translate(holder),
1013 GenerateTailCall(masm(), stub.GetCode(isolate()));
1020 Handle<ExecutableAccessorInfo> callback) {
1033 if (isolate()->heap()->InNewSpace(callback->data())) {
1038 __ push(Immediate(Handle<Object>(callback->data(), isolate())));
1040 __ push(Immediate(isolate()->factory()->undefined_value()));
1042 __ push(Immediate(isolate()->factory()->undefined_value()));
1043 __ push(Immediate(reinterpret_cast<int>(isolate())));
1055 Register getter_address =
edx;
1056 Address function_address = v8::ToCData<Address>(callback->getter());
1057 __ mov(getter_address, Immediate(function_address));
1059 CallApiGetterStub stub;
1060 __ TailCallStub(&stub);
1066 __ LoadObject(
eax, value);
1072 Register holder_reg,
1073 Handle<Object>
object,
1074 Handle<JSObject> interceptor_holder,
1075 LookupResult* lookup,
1076 Handle<Name> name) {
1077 ASSERT(interceptor_holder->HasNamedInterceptor());
1078 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1083 bool compile_followup_inline =
false;
1084 if (lookup->IsFound() && lookup->IsCacheable()) {
1085 if (lookup->IsField()) {
1086 compile_followup_inline =
true;
1087 }
else if (lookup->type() ==
CALLBACKS &&
1088 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1089 ExecutableAccessorInfo* callback =
1091 compile_followup_inline = callback->getter() !=
NULL &&
1092 callback->IsCompatibleReceiver(*
object);
1096 if (compile_followup_inline) {
1106 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1107 bool must_preserve_receiver_reg = !
receiver().
is(holder_reg) &&
1108 (lookup->type() ==
CALLBACKS || must_perfrom_prototype_check);
1115 if (must_preserve_receiver_reg) {
1118 __ push(holder_reg);
1124 CompileCallLoadPropertyWithInterceptor(
1125 masm(),
receiver(), holder_reg, this->
name(), interceptor_holder,
1126 IC::kLoadPropertyWithInterceptorOnly);
1130 Label interceptor_failed;
1131 __ cmp(
eax, factory()->no_interceptor_result_sentinel());
1132 __ j(
equal, &interceptor_failed);
1133 frame_scope.GenerateLeaveFrame();
1137 __ bind(&interceptor_failed);
1138 if (FLAG_debug_code) {
1140 __ mov(holder_reg, Immediate(BitCast<int32_t>(
kZapValue)));
1146 if (must_preserve_receiver_reg) {
1158 PushInterceptorArguments(masm(),
receiver(), holder_reg,
1159 this->
name(), interceptor_holder);
1162 ExternalReference ref =
1163 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1170 void StubCompiler::GenerateBooleanCheck(Register
object, Label* miss) {
1173 __ cmp(
object, factory()->true_value());
1175 __ cmp(
object, factory()->false_value());
1182 Handle<JSObject>
object,
1183 Handle<JSObject> holder,
1185 Handle<ExecutableAccessorInfo> callback) {
1191 __ push(holder_reg);
1198 ExternalReference store_callback_property =
1199 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1200 __ TailCallExternalReference(store_callback_property, 5, 1);
1208 #define __ ACCESS_MASM(masm)
1212 MacroAssembler* masm,
1213 Handle<HeapType> type,
1215 Handle<JSFunction> setter) {
1225 if (!setter.is_null()) {
1227 if (
IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1234 ParameterCount actual(1);
1235 ParameterCount expected(setter);
1236 __ InvokeFunction(setter, expected, actual,
1241 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1255 #define __ ACCESS_MASM(masm())
1259 Handle<JSObject>
object,
1260 Handle<Name> name) {
1268 ExternalReference store_ic_property =
1269 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1270 __ TailCallExternalReference(store_ic_property, 3, 1);
1284 ExternalReference ref =
1285 ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength),
1287 __ TailCallExternalReference(ref, 2, 1);
1296 __ JumpIfSmi(
receiver(), &miss, Label::kNear);
1298 for (
int i = 0; i < receiver_maps->length(); ++i) {
1300 if (transitioned_maps->at(i).is_null()) {
1301 __ j(
equal, handler_stubs->at(i));
1305 __ mov(transition_map(), Immediate(transitioned_maps->at(i)));
1306 __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1320 Handle<JSObject> last,
1321 Handle<Name> name) {
1326 __ mov(
eax, isolate()->factory()->undefined_value());
1334 Register* LoadStubCompiler::registers() {
1341 Register* KeyedLoadStubCompiler::registers() {
1348 Register StoreStubCompiler::value() {
1353 Register* StoreStubCompiler::registers() {
1360 Register* KeyedStoreStubCompiler::registers() {
1368 #define __ ACCESS_MASM(masm)
1372 Handle<HeapType> type,
1374 Handle<JSFunction> getter) {
1378 if (!getter.is_null()) {
1380 if (
IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1386 ParameterCount actual(0);
1387 ParameterCount expected(getter);
1388 __ InvokeFunction(getter, expected, actual,
1393 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1404 #define __ ACCESS_MASM(masm())
1408 Handle<HeapType> type,
1409 Handle<GlobalObject> global,
1410 Handle<PropertyCell> cell,
1412 bool is_dont_delete) {
1418 __ mov(
eax, Immediate(cell));
1421 __ mov(
eax, Operand::ForCell(cell));
1425 if (!is_dont_delete) {
1426 __ cmp(
eax, factory()->the_hole_value());
1428 }
else if (FLAG_debug_code) {
1429 __ cmp(
eax, factory()->the_hole_value());
1430 __ Check(
not_equal, kDontDeleteCellsCannotContainTheHole);
1433 Counters* counters = isolate()->counters();
1434 __ IncrementCounter(counters->named_load_global_stub(), 1);
1454 (
kind() == Code::KEYED_LOAD_IC ||
kind() == Code::KEYED_STORE_IC)) {
1455 __ cmp(this->
name(), Immediate(name));
1465 int receiver_count = types->length();
1466 int number_of_handled_maps = 0;
1467 for (
int current = 0; current < receiver_count; ++current) {
1468 Handle<HeapType> type = types->at(current);
1470 if (!map->is_deprecated()) {
1471 number_of_handled_maps++;
1472 __ cmp(map_reg, map);
1473 if (type->Is(HeapType::Number())) {
1474 ASSERT(!number_case.is_unused());
1475 __ bind(&number_case);
1477 __ j(
equal, handlers->at(current));
1480 ASSERT(number_of_handled_maps != 0);
1493 #define __ ACCESS_MASM(masm)
1497 MacroAssembler* masm) {
1507 __ JumpIfNotSmi(
ecx, &miss);
1528 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1536 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1544 #endif // V8_TARGET_ARCH_IA32
static const int kThisIndex
virtual void HandlerFrontendFooter(Handle< Name > name, Label *miss)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
void GenerateRestoreName(MacroAssembler *masm, Label *label, Handle< Name > name)
static const int kHashFieldOffset
static const int kBitFieldOffset
static const int kNotFound
static const int kValueOffset
static int SlotOffset(int index)
static const int kElementsStartIndex
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static const int kIsolateIndex
static ExecutableAccessorInfo * cast(Object *obj)
void GenerateProbe(MacroAssembler *masm, Code::Flags flags, Register receiver, Register name, Register scratch, Register extra, Register extra2=no_reg, Register extra3=no_reg)
static const int kReturnValueOffset
static const int kFlagsOffset
#define LOG(isolate, Call)
Handle< Code > CompileStoreInterceptor(Handle< JSObject > object, Handle< Name > name)
static Handle< String > cast(Handle< S > that)
static const int kGlobalReceiverOffset
static const int kDataOffset
void GenerateLoadField(Register reg, Handle< JSObject > holder, PropertyIndex field, Representation representation)
static StubType ExtractTypeFromFlags(Flags flags)
static const int kInterceptorArgsLength
static const int kDataIndex
static bool IsSupported(CpuFeature f)
static const int kHolderIndex
static const int kInterceptorArgsNameIndex
static const int kHasNamedInterceptor
static const int kIsAccessCheckNeeded
List< Handle< Map > > MapHandleList
#define ASSERT(condition)
static const int kContextOffset
const int kPointerSizeLog2
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< JSObject > holder, Handle< Name > name, Handle< ExecutableAccessorInfo > callback)
virtual Register HandlerFrontendHeader(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name, Label *miss)
virtual void HandlerFrontendFooter(Handle< Name > name, Label *miss)
void GenerateLoadPostInterceptor(Register reg, Handle< JSObject > interceptor_holder, Handle< Name > name, LookupResult *lookup)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kValueOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > setter)
static Handle< HeapType > CurrentTypeOf(Handle< Object > object, Isolate *isolate)
void check(i::Vector< const uint8_t > string)
Handle< Code > CompilePolymorphicIC(TypeHandleList *types, CodeHandleList *handlers, Handle< Name > name, Code::StubType type, IcCheckType check)
Operand FieldOperand(Register object, int offset)
void GenerateLoadConstant(Handle< Object > value)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kInterceptorArgsThisIndex
static Handle< PropertyCell > EnsurePropertyCell(Handle< JSGlobalObject > global, Handle< Name > name)
List< Handle< HeapType > > TypeHandleList
static const int kPropertiesOffset
static const int kReturnValueDefaultValueIndex
void GenerateNegativeHolderLookup(MacroAssembler *masm, Handle< JSObject > holder, Register holder_reg, Handle< Name > name, Label *miss)
static const int kElementsOffset
Handle< Code > CompileLoadNonexistent(Handle< HeapType > type, Handle< JSObject > last, Handle< Name > name)
Handle< Code > CompileLoadGlobal(Handle< HeapType > type, Handle< GlobalObject > holder, Handle< PropertyCell > cell, Handle< Name > name, bool is_dont_delete)
static void GenerateLoadDictionaryElement(MacroAssembler *masm)
void GenerateLoadInterceptor(Register holder_reg, Handle< Object > object, Handle< JSObject > holder, LookupResult *lookup, Handle< Name > name)
static const int kLengthOffset
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
static const int kInterceptorArgsInfoIndex
static const int kHeaderSize
void GenerateLoadCallback(Register reg, Handle< ExecutableAccessorInfo > callback)
static Builtins::Name MissBuiltin(Code::Kind kind)
static const int kMapOffset
bool is(Register reg) const
List< Handle< Code > > CodeHandleList
Register CallbackHandlerFrontend(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name, Handle< Object > callback)
Handle< T > handle(T *t, Isolate *isolate)
static const int kDataOffset
void GenerateStoreField(MacroAssembler *masm, Handle< JSObject > object, LookupResult *lookup, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Label *miss_label)
static const int kHeaderSize
static Handle< T > null()
void GenerateStoreArrayLength()
void NonexistentHandlerFrontend(Handle< HeapType > type, Handle< JSObject > last, Handle< Name > name)
const int kHeapObjectTagSize
static const int kPrototypeOffset
static const int kFlagsNotUsedInLookup
Handle< Code > GetCode(Code::Kind kind, Code::StubType type, Handle< Name > name)
void GenerateStoreTransition(MacroAssembler *masm, Handle< JSObject > object, LookupResult *lookup, Handle< Map > transition, Handle< Name > name, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Register scratch3, Label *miss_label, Label *slow)
static const int kNativeContextOffset
Register HandlerFrontend(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name)
Handle< Code > CompileStorePolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_stubs, MapHandleList *transitioned_maps)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > getter)
static const int kInterceptorArgsHolderIndex
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Handle< Code > GetICCode(Code::Kind kind, Code::StubType type, Handle< Name > name, InlineCacheState state=MONOMORPHIC)
static JSObject * cast(Object *obj)
static Handle< Map > TypeToMap(HeapType *type, Isolate *isolate)
bool IncludesNumberType(TypeHandleList *types)
static JSFunction * cast(Object *obj)