30 #if V8_TARGET_ARCH_X64
40 #define __ ACCESS_MASM(masm)
43 static void ProbeTable(Isolate* isolate,
62 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
63 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
67 __ leap(offset, Operand(offset, offset,
times_2, 0));
93 __ cmpl(offset, Immediate(flags));
112 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
118 ASSERT(name->IsUniqueName());
119 ASSERT(!receiver.is(scratch0));
120 Counters* counters = masm->isolate()->counters();
121 __ IncrementCounter(counters->negative_lookups(), 1);
122 __ IncrementCounter(counters->negative_lookups_miss(), 1);
126 const int kInterceptorOrAccessCheckNeededMask =
131 Immediate(kInterceptorOrAccessCheckNeededMask));
139 Register properties = scratch0;
144 Heap::kHashTableMapRootIndex);
155 __ DecrementCounter(counters->negative_lookups_miss(), 1);
167 Isolate* isolate = masm->isolate();
180 ASSERT(!scratch.is(receiver));
181 ASSERT(!scratch.is(name));
188 Counters* counters = masm->isolate()->counters();
189 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
192 __ JumpIfSmi(receiver, &miss);
198 __ xorp(scratch, Immediate(flags));
204 ProbeTable(isolate, masm, flags,
kPrimary, receiver, name, scratch);
209 __ xorp(scratch, Immediate(flags));
211 __ subl(scratch, name);
212 __ addl(scratch, Immediate(flags));
216 ProbeTable(isolate, masm, flags,
kSecondary, receiver, name, scratch);
221 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
225 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
227 Register prototype) {
244 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
245 MacroAssembler* masm,
249 Isolate* isolate = masm->isolate();
251 Handle<JSFunction>
function(
255 Register scratch = prototype;
257 __ movp(scratch, Operand(
rsi, offset));
263 __ Move(prototype, Handle<Map>(function->initial_map()));
269 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
274 __ JumpIfSmi(receiver, miss_label);
286 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
291 __ TryGetFunctionPrototype(receiver, result, miss_label);
292 if (!result.is(
rax))
__ movp(
rax, result);
297 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
302 Representation representation) {
303 ASSERT(!representation.IsDouble());
315 static void PushInterceptorArguments(MacroAssembler* masm,
319 Handle<JSObject> holder_obj) {
326 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
327 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
335 static void CompileCallLoadPropertyWithInterceptor(
336 MacroAssembler* masm,
340 Handle<JSObject> holder_obj,
342 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
343 __ CallExternalReference(
344 ExternalReference(IC_Utility(
id), masm->isolate()),
350 void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
351 const CallOptimization& optimization,
352 Handle<Map> receiver_map,
358 ASSERT(optimization.is_simple_api_call());
360 __ PopReturnAddressTo(scratch_in);
364 for (
int i = 0; i < argc; i++) {
365 Register arg = values[argc-1-i];
366 ASSERT(!receiver.is(arg));
367 ASSERT(!scratch_in.is(arg));
370 __ PushReturnAddressFrom(scratch_in);
374 Register callee =
rax;
375 Register call_data =
rbx;
376 Register holder =
rcx;
377 Register api_function_address =
rdx;
378 Register scratch =
rdi;
381 CallOptimization::HolderLookup holder_lookup;
382 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
385 switch (holder_lookup) {
386 case CallOptimization::kHolderIsReceiver:
387 __ Move(holder, receiver);
389 case CallOptimization::kHolderFound:
390 __ Move(holder, api_holder);
392 case CallOptimization::kHolderNotFound:
397 Isolate* isolate = masm->isolate();
398 Handle<JSFunction>
function = optimization.constant_function();
399 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
400 Handle<Object> call_data_obj(api_call_info->data(), isolate);
403 __ Move(callee,
function);
405 bool call_data_undefined =
false;
407 if (isolate->heap()->InNewSpace(*call_data_obj)) {
408 __ Move(scratch, api_call_info);
410 }
else if (call_data_obj->IsUndefined()) {
411 call_data_undefined =
true;
412 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
414 __ Move(call_data, call_data_obj);
418 Address function_address = v8::ToCData<Address>(api_call_info->callback());
420 api_function_address, function_address, RelocInfo::EXTERNAL_REFERENCE);
423 CallApiFunctionStub stub(is_store, call_data_undefined, argc);
424 __ TailCallStub(&stub);
431 if (!label->is_unused()) {
433 __ Move(this->
name(), name);
438 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
439 Handle<JSGlobalObject> global,
443 Handle<PropertyCell> cell =
445 ASSERT(cell->value()->IsTheHole());
446 __ Move(scratch, cell);
448 masm->isolate()->factory()->the_hole_value());
454 MacroAssembler* masm,
455 Handle<JSObject> holder,
459 if (holder->IsJSGlobalObject()) {
460 GenerateCheckPropertyCell(
461 masm, Handle<JSGlobalObject>::cast(holder), name,
scratch1(), miss);
462 }
else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
463 GenerateDictionaryNegativeLookup(
472 Handle<JSObject>
object,
473 LookupResult* lookup,
474 Handle<Map> transition,
476 Register receiver_reg,
477 Register storage_reg,
484 int descriptor = transition->LastAdded();
485 DescriptorArray* descriptors = transition->instance_descriptors();
486 PropertyDetails details = descriptors->GetDetails(descriptor);
487 Representation representation = details.representation();
488 ASSERT(!representation.IsNone());
491 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
492 __ Cmp(value_reg, constant);
494 }
else if (representation.IsSmi()) {
495 __ JumpIfNotSmi(value_reg, miss_label);
496 }
else if (representation.IsHeapObject()) {
497 __ JumpIfSmi(value_reg, miss_label);
498 }
else if (representation.IsDouble()) {
499 Label do_store, heap_number;
500 __ AllocateHeapNumber(storage_reg, scratch1, slow);
502 __ JumpIfNotSmi(value_reg, &heap_number);
503 __ SmiToInteger32(scratch1, value_reg);
504 __ Cvtlsi2sd(
xmm0, scratch1);
507 __ bind(&heap_number);
508 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
518 ASSERT(object->IsJSGlobalProxy() || !
object->IsAccessCheckNeeded());
521 if (details.type() ==
FIELD &&
522 object->map()->unused_property_fields() == 0) {
525 __ PopReturnAddressTo(scratch1);
526 __ Push(receiver_reg);
529 __ PushReturnAddressFrom(scratch1);
530 __ TailCallExternalReference(
531 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
539 __ Move(scratch1, transition);
543 __ RecordWriteField(receiver_reg,
557 int index = transition->instance_descriptors()->GetFieldIndex(
558 transition->LastAdded());
563 index -=
object->map()->inobject_properties();
566 SmiCheck smi_check = representation.IsTagged()
570 int offset =
object->map()->instance_size() + (index *
kPointerSize);
571 if (representation.IsDouble()) {
577 if (!representation.IsSmi()) {
579 if (!representation.IsDouble()) {
580 __ movp(storage_reg, value_reg);
591 if (representation.IsDouble()) {
597 if (!representation.IsSmi()) {
599 if (!representation.IsDouble()) {
600 __ movp(storage_reg, value_reg);
617 Handle<JSObject>
object,
618 LookupResult* lookup,
619 Register receiver_reg,
627 ASSERT(object->IsJSGlobalProxy() || !
object->IsAccessCheckNeeded());
629 int index = lookup->GetFieldIndex().field_index();
634 index -=
object->map()->inobject_properties();
636 Representation representation = lookup->representation();
637 ASSERT(!representation.IsNone());
638 if (representation.IsSmi()) {
639 __ JumpIfNotSmi(value_reg, miss_label);
640 }
else if (representation.IsHeapObject()) {
641 __ JumpIfSmi(value_reg, miss_label);
642 }
else if (representation.IsDouble()) {
645 int offset =
object->map()->instance_size() + (index *
kPointerSize);
655 Label do_store, heap_number;
656 __ JumpIfNotSmi(value_reg, &heap_number);
657 __ SmiToInteger32(scratch2, value_reg);
658 __ Cvtlsi2sd(
xmm0, scratch2);
661 __ bind(&heap_number);
662 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
674 SmiCheck smi_check = representation.IsTagged()
678 int offset =
object->map()->instance_size() + (index *
kPointerSize);
681 if (!representation.IsSmi()) {
684 __ movp(name_reg, value_reg);
696 if (!representation.IsSmi()) {
699 __ movp(name_reg, value_reg);
712 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code>
code) {
713 __ jmp(code, RelocInfo::CODE_TARGET);
718 #define __ ACCESS_MASM((masm()))
721 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
723 Handle<JSObject> holder,
733 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
734 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
735 && !scratch2.is(scratch1));
740 Register reg = object_reg;
744 if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
746 Handle<Map> current_map = receiver_map;
747 Handle<Map> holder_map(holder->map());
750 while (!current_map.is_identical_to(holder_map)) {
755 ASSERT(current_map->IsJSGlobalProxyMap() ||
756 !current_map->is_access_check_needed());
759 if (current_map->is_dictionary_map() &&
760 !current_map->IsJSGlobalObjectMap() &&
761 !current_map->IsJSGlobalProxyMap()) {
762 if (!name->IsUniqueName()) {
766 ASSERT(current.is_null() ||
767 current->property_dictionary()->FindEntry(*name) ==
770 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
777 bool in_new_space = heap()->InNewSpace(*prototype);
789 if (current_map->IsJSGlobalProxyMap()) {
790 __ CheckAccessGlobalProxy(reg, scratch2, miss);
791 }
else if (current_map->IsJSGlobalObjectMap()) {
792 GenerateCheckPropertyCell(
793 masm(), Handle<JSGlobalObject>::cast(current), name,
804 __ Move(reg, prototype);
810 current_map =
handle(current->map());
814 LOG(isolate(), IntEvent(
"check-maps-depth", depth + 1));
822 ASSERT(current_map->IsJSGlobalProxyMap() ||
823 !current_map->is_access_check_needed());
824 if (current_map->IsJSGlobalProxyMap()) {
825 __ CheckAccessGlobalProxy(reg, scratch1, miss);
834 if (!miss->is_unused()) {
845 if (!miss->is_unused()) {
856 Handle<HeapType> type,
858 Handle<JSObject> holder,
860 Handle<Object> callback) {
865 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
866 ASSERT(!reg.is(scratch2()));
868 ASSERT(!reg.is(scratch4()));
871 Register dictionary = scratch4();
883 __ bind(&probe_done);
888 const int kElementsStartOffset =
891 const int kValueOffset = kElementsStartOffset +
kPointerSize;
895 __ Move(
scratch3(), callback, RelocInfo::EMBEDDED_OBJECT);
906 Handle<JSObject> holder,
908 Representation representation) {
909 if (!reg.is(receiver()))
__ movp(
receiver(), reg);
910 if (
kind() == Code::LOAD_IC) {
911 LoadFieldStub stub(field.is_inobject(holder),
912 field.translate(holder),
914 GenerateTailCall(masm(), stub.GetCode(isolate()));
916 KeyedLoadFieldStub stub(field.is_inobject(holder),
917 field.translate(holder),
919 GenerateTailCall(masm(), stub.GetCode(isolate()));
926 Handle<ExecutableAccessorInfo> callback) {
928 ASSERT(!scratch4().is(reg));
929 __ PopReturnAddressTo(scratch4());
939 if (heap()->InNewSpace(callback->data())) {
945 __ Push(Handle<Object>(callback->data(), isolate()));
951 __ PushAddress(ExternalReference::isolate_address(isolate()));
957 __ PushReturnAddressFrom(scratch4());
960 Register api_function_address =
r8;
961 Address getter_address = v8::ToCData<Address>(callback->getter());
962 __ Move(api_function_address, getter_address, RelocInfo::EXTERNAL_REFERENCE);
964 CallApiGetterStub stub;
965 __ TailCallStub(&stub);
978 Handle<Object>
object,
979 Handle<JSObject> interceptor_holder,
980 LookupResult* lookup,
982 ASSERT(interceptor_holder->HasNamedInterceptor());
983 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
988 bool compile_followup_inline =
false;
989 if (lookup->IsFound() && lookup->IsCacheable()) {
990 if (lookup->IsField()) {
991 compile_followup_inline =
true;
992 }
else if (lookup->type() ==
CALLBACKS &&
993 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
994 ExecutableAccessorInfo* callback =
996 compile_followup_inline = callback->getter() !=
NULL &&
997 callback->IsCompatibleReceiver(*
object);
1001 if (compile_followup_inline) {
1011 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1012 bool must_preserve_receiver_reg = !
receiver().
is(holder_reg) &&
1013 (lookup->type() ==
CALLBACKS || must_perfrom_prototype_check);
1020 if (must_preserve_receiver_reg) {
1023 __ Push(holder_reg);
1029 CompileCallLoadPropertyWithInterceptor(
1030 masm(),
receiver(), holder_reg, this->
name(), interceptor_holder,
1031 IC::kLoadPropertyWithInterceptorOnly);
1035 Label interceptor_failed;
1036 __ CompareRoot(
rax, Heap::kNoInterceptorResultSentinelRootIndex);
1037 __ j(
equal, &interceptor_failed);
1038 frame_scope.GenerateLeaveFrame();
1041 __ bind(&interceptor_failed);
1044 if (must_preserve_receiver_reg) {
1056 PushInterceptorArguments(masm(),
receiver(), holder_reg,
1057 this->
name(), interceptor_holder);
1060 ExternalReference ref = ExternalReference(
1061 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1067 void StubCompiler::GenerateBooleanCheck(Register
object, Label* miss) {
1070 __ Cmp(
object, factory()->true_value());
1072 __ Cmp(
object, factory()->false_value());
1079 Handle<JSObject>
object,
1080 Handle<JSObject> holder,
1082 Handle<ExecutableAccessorInfo> callback) {
1088 __ Push(holder_reg);
1095 ExternalReference store_callback_property =
1096 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1097 __ TailCallExternalReference(store_callback_property, 5, 1);
1105 #define __ ACCESS_MASM(masm)
1109 MacroAssembler* masm,
1110 Handle<HeapType> type,
1112 Handle<JSFunction> setter) {
1122 if (!setter.is_null()) {
1124 if (
IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1131 ParameterCount actual(1);
1132 ParameterCount expected(setter);
1133 __ InvokeFunction(setter, expected, actual,
1138 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1152 #define __ ACCESS_MASM(masm())
1156 Handle<JSObject>
object,
1157 Handle<Name> name) {
1165 ExternalReference store_ic_property =
1166 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1167 __ TailCallExternalReference(store_ic_property, 3, 1);
1181 ExternalReference ref =
1182 ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength),
1184 __ TailCallExternalReference(ref, 2, 1);
1193 __ JumpIfSmi(
receiver(), &miss, Label::kNear);
1196 int receiver_count = receiver_maps->length();
1197 for (
int i = 0; i < receiver_count; ++i) {
1200 if (transitioned_maps->at(i).is_null()) {
1201 __ j(
equal, handler_stubs->at(i), RelocInfo::CODE_TARGET);
1205 __ Move(transition_map(),
1206 transitioned_maps->at(i),
1207 RelocInfo::EMBEDDED_OBJECT);
1208 __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1224 Handle<JSObject> last,
1225 Handle<Name> name) {
1230 __ LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
1238 Register* LoadStubCompiler::registers() {
1245 Register* KeyedLoadStubCompiler::registers() {
1252 Register StoreStubCompiler::value() {
1257 Register* StoreStubCompiler::registers() {
1264 Register* KeyedStoreStubCompiler::registers() {
1272 #define __ ACCESS_MASM(masm)
1276 Handle<HeapType> type,
1278 Handle<JSFunction> getter) {
1287 if (!getter.is_null()) {
1289 if (
IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1295 ParameterCount actual(0);
1296 ParameterCount expected(getter);
1297 __ InvokeFunction(getter, expected, actual,
1302 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1313 #define __ ACCESS_MASM(masm())
1317 Handle<HeapType> type,
1318 Handle<GlobalObject> global,
1319 Handle<PropertyCell> cell,
1321 bool is_dont_delete) {
1333 if (!is_dont_delete) {
1334 __ CompareRoot(
rbx, Heap::kTheHoleValueRootIndex);
1336 }
else if (FLAG_debug_code) {
1337 __ CompareRoot(
rbx, Heap::kTheHoleValueRootIndex);
1338 __ Check(
not_equal, kDontDeleteCellsCannotContainTheHole);
1341 Counters* counters = isolate()->counters();
1342 __ IncrementCounter(counters->named_load_global_stub(), 1);
1362 (
kind() == Code::KEYED_LOAD_IC ||
kind() == Code::KEYED_STORE_IC)) {
1363 __ Cmp(this->
name(), name);
1373 int receiver_count = types->length();
1374 int number_of_handled_maps = 0;
1375 for (
int current = 0; current < receiver_count; ++current) {
1376 Handle<HeapType> type = types->at(current);
1378 if (!map->is_deprecated()) {
1379 number_of_handled_maps++;
1381 __ Cmp(map_reg, map);
1382 if (type->Is(HeapType::Number())) {
1383 ASSERT(!number_case.is_unused());
1384 __ bind(&number_case);
1386 __ j(
equal, handlers->at(current), RelocInfo::CODE_TARGET);
1389 ASSERT(number_of_handled_maps > 0);
1402 #define __ ACCESS_MASM(masm)
1406 MacroAssembler* masm) {
1417 __ JumpIfNotSmi(
rax, &miss);
1435 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1443 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1451 #endif // V8_TARGET_ARCH_X64
static const int kThisIndex
virtual void HandlerFrontendFooter(Handle< Name > name, Label *miss)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
void GenerateRestoreName(MacroAssembler *masm, Label *label, Handle< Name > name)
static const int kHashFieldOffset
static const int kBitFieldOffset
static const int kArgsLength
static const int kNotFound
static const int kPrototypeOrInitialMapOffset
static const int kValueOffset
static int SlotOffset(int index)
static const int kElementsStartIndex
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static const int kIsolateIndex
static ExecutableAccessorInfo * cast(Object *obj)
void GenerateProbe(MacroAssembler *masm, Code::Flags flags, Register receiver, Register name, Register scratch, Register extra, Register extra2=no_reg, Register extra3=no_reg)
static const int kReturnValueOffset
static const int kFlagsOffset
#define LOG(isolate, Call)
Handle< Code > CompileStoreInterceptor(Handle< JSObject > object, Handle< Name > name)
static Handle< String > cast(Handle< S > that)
static const int kGlobalReceiverOffset
static const int kDataOffset
void GenerateLoadField(Register reg, Handle< JSObject > holder, PropertyIndex field, Representation representation)
static StubType ExtractTypeFromFlags(Flags flags)
static const int kInterceptorArgsLength
static const int kDataIndex
static const int kHolderIndex
static const int kInterceptorArgsNameIndex
static const int kHasNamedInterceptor
static const int kIsAccessCheckNeeded
List< Handle< Map > > MapHandleList
#define ASSERT(condition)
static const int kContextOffset
const int kPointerSizeLog2
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< JSObject > holder, Handle< Name > name, Handle< ExecutableAccessorInfo > callback)
virtual Register HandlerFrontendHeader(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name, Label *miss)
virtual void HandlerFrontendFooter(Handle< Name > name, Label *miss)
void GenerateLoadPostInterceptor(Register reg, Handle< JSObject > interceptor_holder, Handle< Name > name, LookupResult *lookup)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kValueOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > setter)
static Handle< HeapType > CurrentTypeOf(Handle< Object > object, Isolate *isolate)
void check(i::Vector< const uint8_t > string)
Handle< Code > CompilePolymorphicIC(TypeHandleList *types, CodeHandleList *handlers, Handle< Name > name, Code::StubType type, IcCheckType check)
Operand FieldOperand(Register object, int offset)
void GenerateLoadConstant(Handle< Object > value)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kInterceptorArgsThisIndex
static Handle< PropertyCell > EnsurePropertyCell(Handle< JSGlobalObject > global, Handle< Name > name)
List< Handle< HeapType > > TypeHandleList
static const int kPropertiesOffset
static const int kReturnValueDefaultValueIndex
void GenerateNegativeHolderLookup(MacroAssembler *masm, Handle< JSObject > holder, Register holder_reg, Handle< Name > name, Label *miss)
static const int kElementsOffset
Handle< Code > CompileLoadNonexistent(Handle< HeapType > type, Handle< JSObject > last, Handle< Name > name)
Handle< Code > CompileLoadGlobal(Handle< HeapType > type, Handle< GlobalObject > holder, Handle< PropertyCell > cell, Handle< Name > name, bool is_dont_delete)
static void GenerateLoadDictionaryElement(MacroAssembler *masm)
void GenerateLoadInterceptor(Register holder_reg, Handle< Object > object, Handle< JSObject > holder, LookupResult *lookup, Handle< Name > name)
static const int kLengthOffset
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
static const int kInterceptorArgsInfoIndex
static const int kHeaderSize
void GenerateLoadCallback(Register reg, Handle< ExecutableAccessorInfo > callback)
static Builtins::Name MissBuiltin(Code::Kind kind)
static const int kMapOffset
bool is(Register reg) const
List< Handle< Code > > CodeHandleList
Register CallbackHandlerFrontend(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name, Handle< Object > callback)
Handle< T > handle(T *t, Isolate *isolate)
static const int kDataOffset
const Register kScratchRegister
void GenerateStoreField(MacroAssembler *masm, Handle< JSObject > object, LookupResult *lookup, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Label *miss_label)
static const int kHeaderSize
static Handle< T > null()
#define ASSERT_EQ(v1, v2)
void GenerateStoreArrayLength()
void NonexistentHandlerFrontend(Handle< HeapType > type, Handle< JSObject > last, Handle< Name > name)
const int kHeapObjectTagSize
static const int kPrototypeOffset
static const int kFlagsNotUsedInLookup
Handle< Code > GetCode(Code::Kind kind, Code::StubType type, Handle< Name > name)
void GenerateStoreTransition(MacroAssembler *masm, Handle< JSObject > object, LookupResult *lookup, Handle< Map > transition, Handle< Name > name, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Register scratch3, Label *miss_label, Label *slow)
static const int kNativeContextOffset
Register HandlerFrontend(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name)
Handle< Code > CompileStorePolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_stubs, MapHandleList *transitioned_maps)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > getter)
static const int kInterceptorArgsHolderIndex
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Handle< Code > GetICCode(Code::Kind kind, Code::StubType type, Handle< Name > name, InlineCacheState state=MONOMORPHIC)
static JSObject * cast(Object *obj)
static Handle< Map > TypeToMap(HeapType *type, Isolate *isolate)
bool IncludesNumberType(TypeHandleList *types)
static JSFunction * cast(Object *obj)