30 #if defined(V8_TARGET_ARCH_IA32)
39 #define __ ACCESS_MASM(masm)
42 static void ProbeTable(Isolate* isolate,
51 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
52 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
53 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
58 __ lea(offset, Operand(offset, offset,
times_2, 0));
60 if (extra.is_valid()) {
62 __ mov(extra, Operand::StaticArray(offset,
times_1, value_offset));
65 __ cmp(name, Operand::StaticArray(offset,
times_1, key_offset));
69 __ mov(offset, Operand::StaticArray(offset,
times_1, map_offset));
76 __ cmp(offset, flags);
97 __ cmp(name, Operand::StaticArray(offset,
times_1, key_offset));
101 __ mov(offset, Operand::StaticArray(offset,
times_1, map_offset));
106 __ mov(offset, Operand(
esp, 0));
109 __ mov(offset, Operand::StaticArray(offset,
times_1, value_offset));
114 __ cmp(offset, flags);
127 __ mov(offset, Operand::StaticArray(offset,
times_1, value_offset));
145 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
152 Counters* counters = masm->isolate()->counters();
153 __ IncrementCounter(counters->negative_lookups(), 1);
154 __ IncrementCounter(counters->negative_lookups_miss(), 1);
158 const int kInterceptorOrAccessCheckNeededMask =
163 kInterceptorOrAccessCheckNeededMask);
171 Register properties =
r0;
176 Immediate(masm->isolate()->factory()->hash_table_map()));
187 __ DecrementCounter(counters->negative_lookups_miss(), 1);
203 ASSERT(
sizeof(Entry) == 12);
209 ASSERT(!scratch.is(receiver));
210 ASSERT(!scratch.is(name));
211 ASSERT(!extra.is(receiver));
213 ASSERT(!extra.is(scratch));
220 Register offset = scratch;
223 Counters* counters = masm->isolate()->counters();
224 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
227 __ JumpIfSmi(receiver, &miss);
232 __ xor_(offset, flags);
241 ProbeTable(
isolate(), masm, flags,
kPrimary, name, receiver, offset, extra);
246 __ xor_(offset, flags);
248 __ sub(offset, name);
249 __ add(offset, Immediate(flags));
259 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
263 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
265 Register prototype) {
266 __ LoadGlobalFunction(index, prototype);
267 __ LoadGlobalFunctionInitialMap(prototype, prototype);
273 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
274 MacroAssembler* masm,
280 masm->isolate()->global());
283 Handle<JSFunction>
function(
286 __ Set(prototype, Immediate(Handle<Map>(function->initial_map())));
292 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
297 __ JumpIfSmi(receiver, miss_label);
311 static void GenerateStringCheck(MacroAssembler* masm,
315 Label* non_string_object) {
317 __ JumpIfSmi(receiver, smi);
328 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
333 bool support_wrappers) {
338 GenerateStringCheck(masm, receiver, scratch1, miss,
339 support_wrappers ? &check_wrapper : miss);
345 if (support_wrappers) {
347 __ bind(&check_wrapper);
354 GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
361 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
366 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
367 __ mov(
eax, scratch1);
375 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
378 Handle<JSObject> holder,
381 index -= holder->map()->inobject_properties();
384 int offset = holder->map()->instance_size() + (index *
kPointerSize);
395 static void PushInterceptorArguments(MacroAssembler* masm,
399 Handle<JSObject> holder_obj) {
401 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
402 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
403 Register scratch =
name;
404 __ mov(scratch, Immediate(interceptor));
409 __ push(Immediate(reinterpret_cast<int>(masm->isolate())));
413 static void CompileCallLoadPropertyWithInterceptor(
414 MacroAssembler* masm,
418 Handle<JSObject> holder_obj) {
419 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
420 __ CallExternalReference(
421 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
428 static const int kFastApiCallArguments = 4;
435 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
441 for (
int i = 0; i < kFastApiCallArguments; i++) {
449 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
465 static void GenerateFastApiCall(MacroAssembler* masm,
466 const CallOptimization& optimization,
483 Handle<JSFunction>
function = optimization.constant_function();
484 __ LoadHeapObject(
edi,
function);
489 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
490 Handle<Object> call_data(api_call_info->data());
491 if (masm->isolate()->heap()->InNewSpace(*call_data)) {
492 __ mov(
ecx, api_call_info);
499 Immediate(reinterpret_cast<int>(masm->isolate())));
504 const int kApiArgc = 1;
508 const int kApiStackSpace = 4;
510 __ PrepareCallApiFunction(kApiArgc + kApiStackSpace);
524 Address function_address = v8::ToCData<Address>(api_call_info->callback());
525 __ CallApiFunctionAndReturn(function_address,
526 argc + kFastApiCallArguments + 1);
532 CallInterceptorCompiler(StubCompiler* stub_compiler,
533 const ParameterCount& arguments,
536 : stub_compiler_(stub_compiler),
537 arguments_(arguments),
539 extra_state_(extra_state) {}
541 void Compile(MacroAssembler* masm,
542 Handle<JSObject>
object,
543 Handle<JSObject> holder,
545 LookupResult* lookup,
551 ASSERT(holder->HasNamedInterceptor());
552 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
555 __ JumpIfSmi(receiver, miss);
557 CallOptimization optimization(lookup);
558 if (optimization.is_constant_call()) {
559 CompileCacheable(masm,
object, receiver, scratch1, scratch2, scratch3,
560 holder, lookup, name, optimization, miss);
562 CompileRegular(masm,
object, receiver, scratch1, scratch2, scratch3,
568 void CompileCacheable(MacroAssembler* masm,
569 Handle<JSObject>
object,
574 Handle<JSObject> interceptor_holder,
575 LookupResult* lookup,
577 const CallOptimization& optimization,
579 ASSERT(optimization.is_constant_call());
580 ASSERT(!lookup->holder()->IsGlobalObject());
584 bool can_do_fast_api_call =
false;
585 if (optimization.is_simple_api_call() &&
586 !lookup->holder()->IsGlobalObject()) {
587 depth1 = optimization.GetPrototypeDepthOfExpectedType(
588 object, interceptor_holder);
590 depth2 = optimization.GetPrototypeDepthOfExpectedType(
591 interceptor_holder, Handle<JSObject>(lookup->holder()));
593 can_do_fast_api_call =
597 Counters* counters = masm->isolate()->counters();
598 __ IncrementCounter(counters->call_const_interceptor(), 1);
600 if (can_do_fast_api_call) {
601 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1);
602 ReserveSpaceForFastApiCall(masm, scratch1);
608 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
610 stub_compiler_->CheckPrototypes(
object, receiver, interceptor_holder,
611 scratch1, scratch2, scratch3,
616 Label regular_invoke;
617 LoadWithInterceptor(masm, receiver, holder, interceptor_holder,
625 if (*interceptor_holder != lookup->holder()) {
626 stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
627 Handle<JSObject>(lookup->holder()),
628 scratch1, scratch2, scratch3,
639 if (can_do_fast_api_call) {
640 GenerateFastApiCall(masm, optimization, arguments_.immediate());
645 __ InvokeFunction(optimization.constant_function(), arguments_,
650 if (can_do_fast_api_call) {
651 __ bind(&miss_cleanup);
652 FreeSpaceForFastApiCall(masm, scratch1);
657 __ bind(®ular_invoke);
658 if (can_do_fast_api_call) {
659 FreeSpaceForFastApiCall(masm, scratch1);
663 void CompileRegular(MacroAssembler* masm,
664 Handle<JSObject>
object,
670 Handle<JSObject> interceptor_holder,
673 stub_compiler_->CheckPrototypes(
object, receiver, interceptor_holder,
674 scratch1, scratch2, scratch3,
681 PushInterceptorArguments(masm, receiver, holder,
name_, interceptor_holder);
683 __ CallExternalReference(
684 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
694 void LoadWithInterceptor(MacroAssembler* masm,
697 Handle<JSObject> holder_obj,
698 Label* interceptor_succeeded) {
704 CompileCallLoadPropertyWithInterceptor(masm,
715 __ cmp(
eax, masm->isolate()->factory()->no_interceptor_result_sentinel());
719 StubCompiler* stub_compiler_;
720 const ParameterCount& arguments_;
726 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm,
Code::Kind kind) {
729 ? masm->isolate()->builtins()->LoadIC_Miss()
730 : masm->isolate()->builtins()->KeyedLoadIC_Miss();
731 __ jmp(code, RelocInfo::CODE_TARGET);
735 void StubCompiler::GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm) {
737 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
738 __ jmp(code, RelocInfo::CODE_TARGET);
744 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
745 Handle<JSObject>
object,
747 Handle<Map> transition,
749 Register receiver_reg,
754 LookupResult lookup(masm->isolate());
755 object->Lookup(*name, &lookup);
756 if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
767 __ CheckMap(receiver_reg, Handle<Map>(object->map()),
771 if (object->IsJSGlobalProxy()) {
772 __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
776 if (!transition.is_null() &&
object->GetPrototype()->IsJSObject()) {
778 if (lookup.IsFound()) {
779 holder = lookup.holder();
785 }
while (holder->GetPrototype()->IsJSObject());
789 Label miss_pop, done_check;
790 CheckPrototypes(
object, receiver_reg, Handle<JSObject>(holder), name_reg,
791 scratch1, scratch2, name, &miss_pop);
796 __ bind(&done_check);
802 ASSERT(object->IsJSGlobalProxy() || !
object->IsAccessCheckNeeded());
805 if (!transition.is_null() && (
object->map()->unused_property_fields() == 0)) {
809 __ push(receiver_reg);
810 __ push(Immediate(transition));
813 __ TailCallExternalReference(
814 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
821 if (!transition.is_null()) {
823 __ mov(scratch1, Immediate(transition));
828 __ RecordWriteField(receiver_reg,
840 index -=
object->map()->inobject_properties();
844 int offset =
object->map()->instance_size() + (index *
kPointerSize);
849 __ mov(name_reg,
eax);
850 __ RecordWriteField(receiver_reg,
864 __ mov(name_reg,
eax);
865 __ RecordWriteField(scratch1,
880 static void GenerateCheckPropertyCell(MacroAssembler* masm,
881 Handle<GlobalObject> global,
885 Handle<JSGlobalPropertyCell> cell =
887 ASSERT(cell->value()->IsTheHole());
888 Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value();
890 __ mov(scratch, Immediate(cell));
892 Immediate(the_hole));
894 __ cmp(Operand::Cell(cell), Immediate(the_hole));
902 static void GenerateCheckPropertyCells(MacroAssembler* masm,
903 Handle<JSObject>
object,
904 Handle<JSObject> holder,
908 Handle<JSObject> current = object;
909 while (!current.is_identical_to(holder)) {
910 if (current->IsGlobalObject()) {
911 GenerateCheckPropertyCell(masm,
912 Handle<GlobalObject>::cast(current),
917 current = Handle<JSObject>(
JSObject::cast(current->GetPrototype()));
922 #define __ ACCESS_MASM(masm())
925 Register StubCompiler::CheckPrototypes(Handle<JSObject>
object,
927 Handle<JSObject> holder,
935 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
936 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
937 && !scratch2.is(scratch1));
940 Register reg = object_reg;
941 Handle<JSObject> current = object;
944 if (save_at_depth == depth) {
950 while (!current.is_identical_to(holder)) {
955 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
957 Handle<JSObject> prototype(
JSObject::cast(current->GetPrototype()));
958 if (!current->HasFastProperties() &&
959 !current->IsJSGlobalObject() &&
960 !current->IsJSGlobalProxy()) {
961 if (!name->IsSymbol()) {
962 name = factory()->LookupSymbol(name);
964 ASSERT(current->property_dictionary()->FindEntry(*name) ==
967 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
974 bool in_new_space = heap()->InNewSpace(*prototype);
975 Handle<Map> current_map(current->map());
986 if (current->IsJSGlobalProxy()) {
987 __ CheckAccessGlobalProxy(reg, scratch2, miss);
997 __ mov(reg, prototype);
1001 if (save_at_depth == depth) {
1006 current = prototype;
1008 ASSERT(current.is_identical_to(holder));
1011 LOG(isolate(), IntEvent(
"check-maps-depth", depth + 1));
1014 __ CheckMap(reg, Handle<Map>(holder->map()),
1018 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1019 if (holder->IsJSGlobalProxy()) {
1020 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1026 GenerateCheckPropertyCells(masm(),
object, holder, name, scratch1, miss);
1033 void StubCompiler::GenerateLoadField(Handle<JSObject>
object,
1034 Handle<JSObject> holder,
1040 Handle<String> name,
1043 __ JumpIfSmi(receiver, miss);
1046 Register reg = CheckPrototypes(
1047 object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1050 GenerateFastPropertyLoad(masm(),
eax, reg, holder, index);
1055 void StubCompiler::GenerateLoadCallback(Handle<JSObject>
object,
1056 Handle<JSObject> holder,
1062 Handle<AccessorInfo> callback,
1063 Handle<String> name,
1066 __ JumpIfSmi(receiver, miss);
1069 Register reg = CheckPrototypes(
object, receiver, holder, scratch1,
1070 scratch2, scratch3, name, miss);
1073 ASSERT(!scratch3.is(reg));
1077 __ mov(scratch2,
esp);
1078 ASSERT(!scratch2.is(reg));
1081 if (isolate()->heap()->InNewSpace(callback->data())) {
1082 __ mov(scratch1, Immediate(callback));
1085 __ push(Immediate(Handle<Object>(callback->data())));
1087 __ push(Immediate(reinterpret_cast<int>(isolate())));
1100 const int kStackSpace = 6;
1101 const int kApiArgc = 2;
1103 __ PrepareCallApiFunction(kApiArgc);
1112 Address getter_address = v8::ToCData<Address>(callback->getter());
1113 __ CallApiFunctionAndReturn(getter_address, kStackSpace);
1117 void StubCompiler::GenerateLoadConstant(Handle<JSObject>
object,
1118 Handle<JSObject> holder,
1123 Handle<JSFunction> value,
1124 Handle<String> name,
1127 __ JumpIfSmi(receiver, miss);
1131 object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1134 __ LoadHeapObject(
eax, value);
1139 void StubCompiler::GenerateLoadInterceptor(Handle<JSObject>
object,
1140 Handle<JSObject> interceptor_holder,
1141 LookupResult* lookup,
1147 Handle<String> name,
1149 ASSERT(interceptor_holder->HasNamedInterceptor());
1150 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1153 __ JumpIfSmi(receiver, miss);
1158 bool compile_followup_inline =
false;
1159 if (lookup->IsFound() && lookup->IsCacheable()) {
1160 if (lookup->type() ==
FIELD) {
1161 compile_followup_inline =
true;
1162 }
else if (lookup->type() ==
CALLBACKS &&
1163 lookup->GetCallbackObject()->IsAccessorInfo()) {
1165 compile_followup_inline = callback->getter() !=
NULL &&
1166 callback->IsCompatibleReceiver(*
object);
1170 if (compile_followup_inline) {
1174 Register holder_reg = CheckPrototypes(
object, receiver, interceptor_holder,
1175 scratch1, scratch2, scratch3,
1177 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1183 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1184 bool must_preserve_receiver_reg = !receiver.is(holder_reg) &&
1185 (lookup->type() ==
CALLBACKS || must_perfrom_prototype_check);
1192 if (must_preserve_receiver_reg) {
1195 __ push(holder_reg);
1201 CompileCallLoadPropertyWithInterceptor(masm(),
1205 interceptor_holder);
1209 Label interceptor_failed;
1210 __ cmp(
eax, factory()->no_interceptor_result_sentinel());
1211 __ j(
equal, &interceptor_failed);
1212 frame_scope.GenerateLeaveFrame();
1216 __ bind(&interceptor_failed);
1217 if (FLAG_debug_code) {
1218 __ mov(receiver, Immediate(BitCast<int32_t>(
kZapValue)));
1219 __ mov(holder_reg, Immediate(BitCast<int32_t>(
kZapValue)));
1220 __ mov(name_reg, Immediate(BitCast<int32_t>(
kZapValue)));
1225 if (must_preserve_receiver_reg) {
1234 if (must_perfrom_prototype_check) {
1235 holder_reg = CheckPrototypes(interceptor_holder,
1237 Handle<JSObject>(lookup->holder()),
1245 if (lookup->type() ==
FIELD) {
1248 GenerateFastPropertyLoad(masm(),
eax, holder_reg,
1249 Handle<JSObject>(lookup->holder()),
1250 lookup->GetFieldIndex());
1256 Handle<AccessorInfo> callback(
1265 __ push(holder_reg);
1266 __ mov(holder_reg, Immediate(callback));
1268 __ push(Immediate(reinterpret_cast<int>(isolate())));
1269 __ push(holder_reg);
1273 ExternalReference ref =
1274 ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1276 __ TailCallExternalReference(ref, 6, 1);
1281 Register holder_reg =
1282 CheckPrototypes(
object, receiver, interceptor_holder,
1283 scratch1, scratch2, scratch3, name, miss);
1285 PushInterceptorArguments(masm(), receiver, holder_reg,
1286 name_reg, interceptor_holder);
1289 ExternalReference ref =
1290 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1292 __ TailCallExternalReference(ref, 6, 1);
1297 void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
1299 __ cmp(
ecx, Immediate(name));
1305 void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject>
object,
1306 Handle<JSObject> holder,
1307 Handle<String> name,
1309 ASSERT(holder->IsGlobalObject());
1312 const int argc = arguments().immediate();
1319 __ JumpIfSmi(
edx, miss);
1320 CheckPrototypes(
object,
edx, holder,
ebx,
eax,
edi, name, miss);
1324 void CallStubCompiler::GenerateLoadFunctionFromCell(
1325 Handle<JSGlobalPropertyCell> cell,
1326 Handle<JSFunction>
function,
1330 __ mov(
edi, Immediate(cell));
1333 __ mov(
edi, Operand::Cell(cell));
1337 if (isolate()->heap()->InNewSpace(*
function)) {
1343 __ JumpIfSmi(
edi, miss);
1349 Immediate(Handle<SharedFunctionInfo>(function->shared())));
1351 __ cmp(
edi, Immediate(
function));
1357 void CallStubCompiler::GenerateMissBranch() {
1359 isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1362 __ jmp(code, RelocInfo::CODE_TARGET);
1367 Handle<JSObject> holder,
1369 Handle<String> name) {
1379 GenerateNameCheck(name, &miss);
1382 const int argc = arguments().immediate();
1386 __ JumpIfSmi(
edx, &miss);
1389 Register reg = CheckPrototypes(
object,
edx, holder,
ebx,
eax,
edi,
1392 GenerateFastPropertyLoad(masm(),
edi, reg, holder, index);
1395 __ JumpIfSmi(
edi, &miss);
1401 if (object->IsGlobalObject()) {
1411 NullCallWrapper(), call_kind);
1415 GenerateMissBranch();
1418 return GetCode(
FIELD, name);
1422 Handle<Code> CallStubCompiler::CompileArrayPushCall(
1423 Handle<Object>
object,
1424 Handle<JSObject> holder,
1425 Handle<JSGlobalPropertyCell> cell,
1426 Handle<JSFunction>
function,
1427 Handle<String> name) {
1437 if (!object->IsJSArray() || !cell.is_null()) {
1443 GenerateNameCheck(name, &miss);
1446 const int argc = arguments().immediate();
1450 __ JumpIfSmi(
edx, &miss);
1452 CheckPrototypes(Handle<JSObject>::cast(
object),
edx, holder,
ebx,
eax,
edi,
1463 Label attempt_to_grow_elements, with_write_barrier;
1470 Immediate(factory()->fixed_array_map()));
1484 __ j(
greater, &attempt_to_grow_elements);
1488 __ JumpIfNotSmi(
ecx, &with_write_barrier);
1502 __ bind(&with_write_barrier);
1506 if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
1507 Label fast_object, not_fast_object;
1508 __ CheckFastObjectElements(
ebx, ¬_fast_object, Label::kNear);
1509 __ jmp(&fast_object);
1511 __ bind(¬_fast_object);
1512 __ CheckFastSmiElements(
ebx, &call_builtin);
1516 Label try_holey_map;
1527 __ jmp(&fast_object);
1529 __ bind(&try_holey_map);
1539 __ bind(&fast_object);
1541 __ CheckFastObjectElements(
ebx, &call_builtin);
1558 __ bind(&attempt_to_grow_elements);
1559 if (!FLAG_inline_new) {
1560 __ jmp(&call_builtin);
1566 Label no_fast_elements_check;
1567 __ JumpIfSmi(
ebx, &no_fast_elements_check);
1569 __ CheckFastObjectElements(
ecx, &call_builtin, Label::kFar);
1570 __ bind(&no_fast_elements_check);
1576 ExternalReference new_space_allocation_top =
1577 ExternalReference::new_space_allocation_top_address(isolate());
1578 ExternalReference new_space_allocation_limit =
1579 ExternalReference::new_space_allocation_limit_address(isolate());
1581 const int kAllocationDelta = 4;
1583 __ mov(
ecx, Operand::StaticVariable(new_space_allocation_top));
1592 __ cmp(
ecx, Operand::StaticVariable(new_space_allocation_limit));
1596 __ mov(Operand::StaticVariable(new_space_allocation_top),
ecx);
1601 for (
int i = 1; i < kAllocationDelta; i++) {
1603 Immediate(factory()->the_hole_value()));
1614 __ mov(
edx, Operand(
esp, (argc + 1) * kPointerSize));
1626 __ ret((argc + 1) * kPointerSize);
1629 __ bind(&call_builtin);
1630 __ TailCallExternalReference(
1631 ExternalReference(Builtins::c_ArrayPush, isolate()),
1637 GenerateMissBranch();
1640 return GetCode(
function);
1644 Handle<Code> CallStubCompiler::CompileArrayPopCall(
1645 Handle<Object>
object,
1646 Handle<JSObject> holder,
1647 Handle<JSGlobalPropertyCell> cell,
1648 Handle<JSFunction>
function,
1649 Handle<String> name) {
1659 if (!object->IsJSArray() || !cell.is_null()) {
1663 Label miss, return_undefined, call_builtin;
1665 GenerateNameCheck(name, &miss);
1668 const int argc = arguments().immediate();
1669 __ mov(
edx, Operand(
esp, (argc + 1) * kPointerSize));
1672 __ JumpIfSmi(
edx, &miss);
1673 CheckPrototypes(Handle<JSObject>::cast(
object),
edx, holder,
ebx,
eax,
edi,
1681 Immediate(factory()->fixed_array_map()));
1694 FixedArray::kHeaderSize));
1695 __ cmp(
eax, Immediate(factory()->the_hole_value()));
1704 FixedArray::kHeaderSize),
1705 Immediate(factory()->the_hole_value()));
1706 __ ret((argc + 1) * kPointerSize);
1708 __ bind(&return_undefined);
1709 __ mov(
eax, Immediate(factory()->undefined_value()));
1710 __ ret((argc + 1) * kPointerSize);
1712 __ bind(&call_builtin);
1713 __ TailCallExternalReference(
1714 ExternalReference(Builtins::c_ArrayPop, isolate()),
1719 GenerateMissBranch();
1722 return GetCode(
function);
1726 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
1727 Handle<Object>
object,
1728 Handle<JSObject> holder,
1729 Handle<JSGlobalPropertyCell> cell,
1730 Handle<JSFunction>
function,
1731 Handle<String> name) {
1741 if (!object->IsString() || !cell.is_null()) {
1745 const int argc = arguments().immediate();
1749 Label index_out_of_range;
1750 Label* index_out_of_range_label = &index_out_of_range;
1755 index_out_of_range_label = &miss;
1758 GenerateNameCheck(name, &name_miss);
1761 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1765 ASSERT(!
object.is_identical_to(holder));
1766 CheckPrototypes(Handle<JSObject>(
JSObject::cast(object->GetPrototype())),
1769 Register receiver =
ebx;
1770 Register index =
edi;
1771 Register result =
eax;
1772 __ mov(receiver, Operand(
esp, (argc + 1) * kPointerSize));
1774 __ mov(index, Operand(
esp, (argc - 0) * kPointerSize));
1776 __ Set(index, Immediate(factory()->undefined_value()));
1779 StringCharCodeAtGenerator generator(receiver,
1784 index_out_of_range_label,
1786 generator.GenerateFast(masm());
1787 __ ret((argc + 1) * kPointerSize);
1789 StubRuntimeCallHelper call_helper;
1790 generator.GenerateSlow(masm(), call_helper);
1792 if (index_out_of_range.is_linked()) {
1793 __ bind(&index_out_of_range);
1794 __ Set(
eax, Immediate(factory()->nan_value()));
1795 __ ret((argc + 1) * kPointerSize);
1800 __ Set(
ecx, Immediate(name));
1801 __ bind(&name_miss);
1802 GenerateMissBranch();
1805 return GetCode(
function);
1809 Handle<Code> CallStubCompiler::CompileStringCharAtCall(
1810 Handle<Object>
object,
1811 Handle<JSObject> holder,
1812 Handle<JSGlobalPropertyCell> cell,
1813 Handle<JSFunction>
function,
1814 Handle<String> name) {
1824 if (!object->IsString() || !cell.is_null()) {
1828 const int argc = arguments().immediate();
1832 Label index_out_of_range;
1833 Label* index_out_of_range_label = &index_out_of_range;
1838 index_out_of_range_label = &miss;
1841 GenerateNameCheck(name, &name_miss);
1844 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1848 ASSERT(!
object.is_identical_to(holder));
1849 CheckPrototypes(Handle<JSObject>(
JSObject::cast(object->GetPrototype())),
1852 Register receiver =
eax;
1853 Register index =
edi;
1854 Register scratch =
edx;
1855 Register result =
eax;
1856 __ mov(receiver, Operand(
esp, (argc + 1) * kPointerSize));
1858 __ mov(index, Operand(
esp, (argc - 0) * kPointerSize));
1860 __ Set(index, Immediate(factory()->undefined_value()));
1863 StringCharAtGenerator generator(receiver,
1869 index_out_of_range_label,
1871 generator.GenerateFast(masm());
1872 __ ret((argc + 1) * kPointerSize);
1874 StubRuntimeCallHelper call_helper;
1875 generator.GenerateSlow(masm(), call_helper);
1877 if (index_out_of_range.is_linked()) {
1878 __ bind(&index_out_of_range);
1879 __ Set(
eax, Immediate(factory()->empty_string()));
1880 __ ret((argc + 1) * kPointerSize);
1885 __ Set(
ecx, Immediate(name));
1886 __ bind(&name_miss);
1887 GenerateMissBranch();
1890 return GetCode(
function);
1894 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
1895 Handle<Object>
object,
1896 Handle<JSObject> holder,
1897 Handle<JSGlobalPropertyCell> cell,
1898 Handle<JSFunction>
function,
1899 Handle<String> name) {
1908 const int argc = arguments().immediate();
1912 if (!object->IsJSObject() || argc != 1) {
1917 GenerateNameCheck(name, &miss);
1919 if (cell.is_null()) {
1920 __ mov(edx, Operand(
esp, 2 * kPointerSize));
1922 __ JumpIfSmi(edx, &miss);
1923 CheckPrototypes(Handle<JSObject>::cast(
object), edx, holder, ebx,
eax, edi,
1926 ASSERT(cell->value() == *
function);
1927 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(
object), holder, name,
1929 GenerateLoadFunctionFromCell(cell,
function, &miss);
1933 Register code =
ebx;
1934 __ mov(code, Operand(
esp, 1 * kPointerSize));
1939 __ JumpIfNotSmi(code, &slow);
1944 StringCharFromCodeGenerator generator(code,
eax);
1945 generator.GenerateFast(masm());
1946 __ ret(2 * kPointerSize);
1948 StubRuntimeCallHelper call_helper;
1949 generator.GenerateSlow(masm(), call_helper);
1958 NullCallWrapper(), call_kind);
1962 GenerateMissBranch();
1965 return cell.is_null() ? GetCode(
function) : GetCode(
NORMAL, name);
1969 Handle<Code> CallStubCompiler::CompileMathFloorCall(
1970 Handle<Object>
object,
1971 Handle<JSObject> holder,
1972 Handle<JSGlobalPropertyCell> cell,
1973 Handle<JSFunction>
function,
1974 Handle<String> name) {
1987 CpuFeatures::Scope use_sse2(
SSE2);
1989 const int argc = arguments().immediate();
1993 if (!object->IsJSObject() || argc != 1) {
1998 GenerateNameCheck(name, &miss);
2000 if (cell.is_null()) {
2001 __ mov(edx, Operand(
esp, 2 * kPointerSize));
2004 __ JumpIfSmi(edx, &miss);
2006 CheckPrototypes(Handle<JSObject>::cast(
object), edx, holder, ebx,
eax, edi,
2009 ASSERT(cell->value() == *
function);
2010 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(
object), holder, name,
2012 GenerateLoadFunctionFromCell(cell,
function, &miss);
2016 __ mov(
eax, Operand(
esp, 1 * kPointerSize));
2021 __ JumpIfSmi(
eax, &smi);
2039 Label wont_fit_into_smi;
2040 __ test(
eax, Immediate(0xc0000000));
2046 __ ret(2 * kPointerSize);
2049 Label already_round;
2050 __ bind(&wont_fit_into_smi);
2070 __ LoadPowerOf2(
xmm1, ebx, 0);
2075 __ AllocateHeapNumber(
eax, ebx, edx, &slow);
2077 __ ret(2 * kPointerSize);
2080 __ bind(&already_round);
2081 __ mov(
eax, Operand(
esp, 1 * kPointerSize));
2082 __ ret(2 * kPointerSize);
2092 GenerateMissBranch();
2095 return cell.is_null() ? GetCode(
function) : GetCode(
NORMAL, name);
2099 Handle<Code> CallStubCompiler::CompileMathAbsCall(
2100 Handle<Object>
object,
2101 Handle<JSObject> holder,
2102 Handle<JSGlobalPropertyCell> cell,
2103 Handle<JSFunction>
function,
2104 Handle<String> name) {
2113 const int argc = arguments().immediate();
2117 if (!object->IsJSObject() || argc != 1) {
2122 GenerateNameCheck(name, &miss);
2124 if (cell.is_null()) {
2125 __ mov(edx, Operand(
esp, 2 * kPointerSize));
2128 __ JumpIfSmi(edx, &miss);
2130 CheckPrototypes(Handle<JSObject>::cast(
object), edx, holder, ebx,
eax, edi,
2133 ASSERT(cell->value() == *
function);
2134 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(
object), holder, name,
2136 GenerateLoadFunctionFromCell(cell,
function, &miss);
2140 __ mov(
eax, Operand(
esp, 1 * kPointerSize));
2145 __ JumpIfNotSmi(
eax, ¬_smi);
2164 __ ret(2 * kPointerSize);
2174 Label negative_sign;
2177 __ ret(2 * kPointerSize);
2181 __ bind(&negative_sign);
2184 __ AllocateHeapNumber(
eax, edi, edx, &slow);
2187 __ ret(2 * kPointerSize);
2197 GenerateMissBranch();
2200 return cell.is_null() ? GetCode(
function) : GetCode(
NORMAL, name);
2204 Handle<Code> CallStubCompiler::CompileFastApiCall(
2205 const CallOptimization& optimization,
2206 Handle<Object>
object,
2207 Handle<JSObject> holder,
2208 Handle<JSGlobalPropertyCell> cell,
2209 Handle<JSFunction>
function,
2210 Handle<String> name) {
2211 ASSERT(optimization.is_simple_api_call());
2214 if (object->IsGlobalObject())
return Handle<Code>::null();
2215 if (!cell.is_null())
return Handle<Code>::null();
2216 if (!object->IsJSObject())
return Handle<Code>::null();
2217 int depth = optimization.GetPrototypeDepthOfExpectedType(
2218 Handle<JSObject>::cast(
object), holder);
2221 Label miss, miss_before_stack_reserved;
2223 GenerateNameCheck(name, &miss_before_stack_reserved);
2226 const int argc = arguments().immediate();
2227 __ mov(edx, Operand(
esp, (argc + 1) * kPointerSize));
2230 __ JumpIfSmi(edx, &miss_before_stack_reserved);
2232 Counters* counters = isolate()->counters();
2233 __ IncrementCounter(counters->call_const(), 1);
2234 __ IncrementCounter(counters->call_const_fast_api(), 1);
2238 __ sub(
esp, Immediate(kFastApiCallArguments * kPointerSize));
2241 CheckPrototypes(Handle<JSObject>::cast(
object), edx, holder, ebx,
eax, edi,
2242 name, depth, &miss);
2245 __ mov(
eax, Operand(
esp, 4 * kPointerSize));
2246 __ mov(Operand(
esp, 0 * kPointerSize),
eax);
2250 GenerateFastApiCall(masm(), optimization, argc);
2253 __ add(
esp, Immediate(kFastApiCallArguments * kPointerSize));
2255 __ bind(&miss_before_stack_reserved);
2256 GenerateMissBranch();
2259 return GetCode(
function);
2264 Handle<JSObject> holder,
2265 Handle<JSFunction>
function,
2266 Handle<String> name,
2277 Handle<Code> code = CompileCustomCall(
object, holder,
2278 Handle<JSGlobalPropertyCell>::null(),
2281 if (!code.is_null())
return code;
2285 GenerateNameCheck(name, &miss);
2288 const int argc = arguments().immediate();
2289 __ mov(edx, Operand(
esp, (argc + 1) * kPointerSize));
2293 __ JumpIfSmi(edx, &miss);
2301 __ IncrementCounter(isolate()->counters()->call_const(), 1);
2304 CheckPrototypes(Handle<JSObject>::cast(
object), edx, holder, ebx,
eax,
2309 if (object->IsGlobalObject()) {
2311 __ mov(Operand(
esp, (argc + 1) * kPointerSize), edx);
2316 if (function->IsBuiltin() || !
function->shared()->is_classic_mode()) {
2321 GenerateDirectLoadGlobalFunctionPrototype(
2334 if (function->IsBuiltin() || !
function->shared()->is_classic_mode()) {
2337 __ JumpIfSmi(edx, &fast);
2342 GenerateDirectLoadGlobalFunctionPrototype(
2355 if (function->IsBuiltin() || !
function->shared()->is_classic_mode()) {
2358 __ cmp(edx, factory()->true_value());
2360 __ cmp(edx, factory()->false_value());
2364 GenerateDirectLoadGlobalFunctionPrototype(
2381 NullCallWrapper(), call_kind);
2385 GenerateMissBranch();
2388 return GetCode(
function);
2393 Handle<JSObject> holder,
2394 Handle<String> name) {
2404 GenerateNameCheck(name, &miss);
2407 const int argc = arguments().immediate();
2409 LookupResult lookup(isolate());
2410 LookupPostInterceptor(holder, name, &lookup);
2413 __ mov(edx, Operand(
esp, (argc + 1) * kPointerSize));
2415 CallInterceptorCompiler compiler(
this, arguments(),
ecx, extra_state_);
2416 compiler.Compile(masm(),
object, holder, name, &lookup, edx, ebx, edi,
eax,
2420 __ mov(edx, Operand(
esp, (argc + 1) * kPointerSize));
2423 __ JumpIfSmi(
eax, &miss);
2429 if (object->IsGlobalObject()) {
2431 __ mov(Operand(
esp, (argc + 1) * kPointerSize), edx);
2440 NullCallWrapper(), call_kind);
2444 GenerateMissBranch();
2452 Handle<JSObject>
object,
2453 Handle<GlobalObject> holder,
2454 Handle<JSGlobalPropertyCell> cell,
2455 Handle<JSFunction>
function,
2456 Handle<String> name) {
2466 Handle<Code> code = CompileCustomCall(
object, holder, cell,
function, name);
2468 if (!code.is_null())
return code;
2472 GenerateNameCheck(name, &miss);
2475 const int argc = arguments().immediate();
2476 GenerateGlobalReceiverCheck(
object, holder, name, &miss);
2477 GenerateLoadFunctionFromCell(cell,
function, &miss);
2480 if (object->IsGlobalObject()) {
2482 __ mov(Operand(
esp, (argc + 1) * kPointerSize), edx);
2489 Counters* counters = isolate()->counters();
2490 __ IncrementCounter(counters->call_global_inline(), 1);
2491 ParameterCount expected(function->shared()->formal_parameter_count());
2500 NullCallWrapper(), call_kind);
2504 __ IncrementCounter(counters->call_global_inline_miss(), 1);
2505 GenerateMissBranch();
2508 return GetCode(
NORMAL, name);
2514 Handle<Map> transition,
2515 Handle<String> name) {
2525 GenerateStoreField(masm(),
2534 __ mov(
ecx, Immediate(name));
2535 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2536 __ jmp(ic, RelocInfo::CODE_TARGET);
2544 Handle<JSObject>
object,
2545 Handle<AccessorInfo> callback,
2546 Handle<String> name) {
2556 __ CheckMap(edx, Handle<Map>(object->map()),
2560 if (object->IsJSGlobalProxy()) {
2561 __ CheckAccessGlobalProxy(edx, ebx, &miss);
2566 ASSERT(object->IsJSGlobalProxy() || !
object->IsAccessCheckNeeded());
2570 __ push(Immediate(callback));
2576 ExternalReference store_callback_property =
2577 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
2578 __ TailCallExternalReference(store_callback_property, 4, 1);
2582 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2583 __ jmp(ic, RelocInfo::CODE_TARGET);
2591 Handle<JSObject> receiver,
2592 Handle<JSFunction> setter,
2593 Handle<String> name) {
2603 __ CheckMap(edx, Handle<Map>(receiver->map()), &miss,
DO_SMI_CHECK,
2615 ParameterCount actual(1);
2628 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2629 __ jmp(ic, RelocInfo::CODE_TARGET);
2637 Handle<JSObject> receiver,
2638 Handle<String> name) {
2648 __ CheckMap(edx, Handle<Map>(receiver->map()),
2652 if (receiver->IsJSGlobalProxy()) {
2653 __ CheckAccessGlobalProxy(edx, ebx, &miss);
2658 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2668 ExternalReference store_ic_property =
2669 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
2670 __ TailCallExternalReference(store_ic_property, 4, 1);
2674 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2675 __ jmp(ic, RelocInfo::CODE_TARGET);
2683 Handle<GlobalObject>
object,
2684 Handle<JSGlobalPropertyCell> cell,
2685 Handle<String> name) {
2696 Immediate(Handle<Map>(object->map())));
2700 __ mov(ebx, Immediate(cell));
2707 __ cmp(cell_operand, factory()->the_hole_value());
2711 __ mov(cell_operand,
eax);
2715 Counters* counters = isolate()->counters();
2716 __ IncrementCounter(counters->named_store_global_inline(), 1);
2721 __ IncrementCounter(counters->named_store_global_inline_miss(), 1);
2722 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2723 __ jmp(ic, RelocInfo::CODE_TARGET);
2726 return GetCode(
NORMAL, name);
2732 Handle<Map> transition,
2733 Handle<String> name) {
2742 Counters* counters = isolate()->counters();
2743 __ IncrementCounter(counters->keyed_store_field(), 1);
2746 __ cmp(
ecx, Immediate(name));
2750 GenerateStoreField(masm(),
2760 __ DecrementCounter(counters->keyed_store_field(), 1);
2761 Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2762 __ jmp(ic, RelocInfo::CODE_TARGET);
2770 Handle<Map> receiver_map) {
2777 ElementsKind elements_kind = receiver_map->elements_kind();
2778 bool is_jsarray = receiver_map->instance_type() ==
JS_ARRAY_TYPE;
2780 KeyedStoreElementStub(is_jsarray, elements_kind, grow_mode_).GetCode();
2784 Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2785 __ jmp(ic, RelocInfo::CODE_TARGET);
2788 return GetCode(
NORMAL, factory()->empty_string());
2803 __ JumpIfSmi(edx, &miss, Label::kNear);
2806 for (
int i = 0; i < receiver_maps->length(); ++i) {
2807 __ cmp(edi, receiver_maps->at(i));
2808 if (transitioned_maps->at(i).is_null()) {
2809 __ j(
equal, handler_stubs->at(i));
2813 __ mov(ebx, Immediate(transitioned_maps->at(i)));
2814 __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
2819 Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
2820 __ jmp(miss_ic, RelocInfo::CODE_TARGET);
2828 Handle<JSObject>
object,
2829 Handle<JSObject> last) {
2838 __ JumpIfSmi(edx, &miss);
2840 ASSERT(last->IsGlobalObject() || last->HasFastProperties());
2845 CheckPrototypes(
object, edx, last, ebx,
eax, edi, name, &miss);
2849 if (last->IsGlobalObject()) {
2850 GenerateCheckPropertyCell(
2851 masm(), Handle<GlobalObject>::cast(last), name,
eax, &miss);
2856 __ mov(
eax, isolate()->factory()->undefined_value());
2863 return GetCode(
NONEXISTENT, factory()->empty_string());
2868 Handle<JSObject> holder,
2870 Handle<String> name) {
2878 GenerateLoadField(
object, holder, edx, ebx,
eax, edi, index, name, &miss);
2883 return GetCode(
FIELD, name);
2888 Handle<String> name,
2889 Handle<JSObject>
object,
2890 Handle<JSObject> holder,
2891 Handle<AccessorInfo> callback) {
2899 GenerateLoadCallback(
object, holder, edx,
ecx, ebx,
eax, edi, callback,
2910 Handle<String> name,
2911 Handle<JSObject> receiver,
2912 Handle<JSObject> holder,
2913 Handle<JSFunction> getter) {
2922 __ JumpIfSmi(edx, &miss);
2923 CheckPrototypes(receiver, edx, holder, ebx,
eax, edi, name, &miss);
2930 ParameterCount actual(0);
2948 Handle<JSObject> holder,
2949 Handle<JSFunction> value,
2950 Handle<String> name) {
2958 GenerateLoadConstant(
object, holder, edx, ebx,
eax, edi, value, name, &miss);
2968 Handle<JSObject> holder,
2969 Handle<String> name) {
2977 LookupResult lookup(isolate());
2978 LookupPostInterceptor(holder, name, &lookup);
2982 GenerateLoadInterceptor(receiver, holder, &lookup, edx,
ecx,
eax, ebx, edi,
2994 Handle<JSObject>
object,
2995 Handle<GlobalObject> holder,
2996 Handle<JSGlobalPropertyCell> cell,
2997 Handle<String> name,
2998 bool is_dont_delete) {
3007 __ JumpIfSmi(edx, &miss);
3008 CheckPrototypes(
object, edx, holder, ebx,
eax, edi, name, &miss);
3012 __ mov(ebx, Immediate(cell));
3015 __ mov(ebx, Operand::Cell(cell));
3019 if (!is_dont_delete) {
3020 __ cmp(ebx, factory()->the_hole_value());
3022 }
else if (FLAG_debug_code) {
3023 __ cmp(ebx, factory()->the_hole_value());
3024 __ Check(
not_equal,
"DontDelete cells can't contain the hole");
3027 Counters* counters = isolate()->counters();
3028 __ IncrementCounter(counters->named_load_global_stub(), 1);
3033 __ IncrementCounter(counters->named_load_global_stub_miss(), 1);
3037 return GetCode(
NORMAL, name);
3042 Handle<JSObject> receiver,
3043 Handle<JSObject> holder,
3052 Counters* counters = isolate()->counters();
3053 __ IncrementCounter(counters->keyed_load_field(), 1);
3056 __ cmp(
ecx, Immediate(name));
3059 GenerateLoadField(receiver, holder, edx, ebx,
eax, edi, index, name, &miss);
3062 __ DecrementCounter(counters->keyed_load_field(), 1);
3066 return GetCode(
FIELD, name);
3071 Handle<String> name,
3072 Handle<JSObject> receiver,
3073 Handle<JSObject> holder,
3074 Handle<AccessorInfo> callback) {
3082 Counters* counters = isolate()->counters();
3083 __ IncrementCounter(counters->keyed_load_callback(), 1);
3086 __ cmp(
ecx, Immediate(name));
3089 GenerateLoadCallback(receiver, holder, edx,
ecx, ebx,
eax, edi, callback,
3093 __ DecrementCounter(counters->keyed_load_callback(), 1);
3102 Handle<String> name,
3103 Handle<JSObject> receiver,
3104 Handle<JSObject> holder,
3105 Handle<JSFunction> value) {
3113 Counters* counters = isolate()->counters();
3114 __ IncrementCounter(counters->keyed_load_constant_function(), 1);
3117 __ cmp(
ecx, Immediate(name));
3120 GenerateLoadConstant(
3121 receiver, holder, edx, ebx,
eax, edi, value, name, &miss);
3123 __ DecrementCounter(counters->keyed_load_constant_function(), 1);
3132 Handle<JSObject> receiver,
3133 Handle<JSObject> holder,
3134 Handle<String> name) {
3142 Counters* counters = isolate()->counters();
3143 __ IncrementCounter(counters->keyed_load_interceptor(), 1);
3146 __ cmp(
ecx, Immediate(name));
3149 LookupResult lookup(isolate());
3150 LookupPostInterceptor(holder, name, &lookup);
3151 GenerateLoadInterceptor(receiver, holder, &lookup, edx,
ecx,
eax, ebx, edi,
3154 __ DecrementCounter(counters->keyed_load_interceptor(), 1);
3163 Handle<String> name) {
3171 Counters* counters = isolate()->counters();
3172 __ IncrementCounter(counters->keyed_load_array_length(), 1);
3175 __ cmp(
ecx, Immediate(name));
3178 GenerateLoadArrayLength(masm(), edx,
eax, &miss);
3180 __ DecrementCounter(counters->keyed_load_array_length(), 1);
3189 Handle<String> name) {
3197 Counters* counters = isolate()->counters();
3198 __ IncrementCounter(counters->keyed_load_string_length(), 1);
3201 __ cmp(
ecx, Immediate(name));
3204 GenerateLoadStringLength(masm(), edx,
eax, ebx, &miss,
true);
3206 __ DecrementCounter(counters->keyed_load_string_length(), 1);
3215 Handle<String> name) {
3223 Counters* counters = isolate()->counters();
3224 __ IncrementCounter(counters->keyed_load_function_prototype(), 1);
3227 __ cmp(
ecx, Immediate(name));
3230 GenerateLoadFunctionPrototype(masm(), edx,
eax, ebx, &miss);
3232 __ DecrementCounter(counters->keyed_load_function_prototype(), 1);
3241 Handle<Map> receiver_map) {
3248 ElementsKind elements_kind = receiver_map->elements_kind();
3249 Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode();
3256 return GetCode(
NORMAL, factory()->empty_string());
3269 __ JumpIfSmi(edx, &miss);
3271 Register map_reg =
ebx;
3273 int receiver_count = receiver_maps->length();
3274 for (
int current = 0; current < receiver_count; ++current) {
3275 __ cmp(map_reg, receiver_maps->at(current));
3276 __ j(
equal, handler_ics->at(current));
3290 Handle<JSFunction>
function) {
3297 Label generic_stub_call;
3298 #ifdef ENABLE_DEBUGGER_SUPPORT
3304 __ cmp(ebx, factory()->undefined_value());
3311 __ JumpIfSmi(ebx, &generic_stub_call);
3320 __ Assert(
not_equal,
"Function constructed by construct stub.");
3336 __ mov(ebx, factory()->empty_fixed_array());
3353 __ mov(edi, factory()->undefined_value());
3361 Handle<SharedFunctionInfo> shared(function->shared());
3362 for (
int i = 0; i < shared->this_property_assignments_count(); i++) {
3363 if (shared->IsThisPropertyAssignmentArgument(i)) {
3367 int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3369 __ cmp(
eax, arg_number);
3371 CpuFeatures::Scope use_cmov(
CMOV);
3372 __ cmov(
above, ebx, Operand(
ecx, arg_number * -kPointerSize));
3376 __ mov(ebx, Operand(
ecx, arg_number * -kPointerSize));
3377 __ bind(¬_passed);
3380 __ mov(Operand(edx, i * kPointerSize), ebx);
3383 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3384 __ mov(Operand(edx, i * kPointerSize), Immediate(constant));
3389 ASSERT(function->has_initial_map());
3390 for (
int i = shared->this_property_assignments_count();
3391 i <
function->initial_map()->inobject_properties();
3393 __ mov(Operand(edx, i * kPointerSize), edi);
3405 Counters* counters = isolate()->counters();
3406 __ IncrementCounter(counters->constructed_objects(), 1);
3407 __ IncrementCounter(counters->constructed_objects_stub(), 1);
3412 __ bind(&generic_stub_call);
3413 Handle<Code> code = isolate()->builtins()->JSConstructStubGeneric();
3414 __ jmp(code, RelocInfo::CODE_TARGET);
3422 #define __ ACCESS_MASM(masm)
3426 MacroAssembler* masm) {
3432 Label slow, miss_force_generic;
3436 __ JumpIfNotSmi(
ecx, &miss_force_generic);
3444 __ LoadFromNumberDictionary(&slow,
eax,
ecx, ebx, edx, edi,
eax);
3458 Handle<Code> slow_ic =
3459 masm->isolate()->builtins()->KeyedLoadIC_Slow();
3460 __ jmp(slow_ic, RelocInfo::CODE_TARGET);
3462 __ bind(&miss_force_generic);
3469 Handle<Code> miss_force_generic_ic =
3470 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3471 __ jmp(miss_force_generic_ic, RelocInfo::CODE_TARGET);
3475 static void GenerateSmiKeyCheck(MacroAssembler* masm,
3478 XMMRegister xmm_scratch0,
3479 XMMRegister xmm_scratch1,
3484 CpuFeatures::Scope use_sse2(
SSE2);
3486 __ JumpIfSmi(key, &key_ok);
3488 Immediate(Handle<Map>(masm->isolate()->heap()->heap_number_map())));
3491 __ cvttsd2si(scratch, Operand(xmm_scratch0));
3492 __ cvtsi2sd(xmm_scratch1, scratch);
3493 __ ucomisd(xmm_scratch1, xmm_scratch0);
3497 __ cmp(scratch, 0xc0000000);
3500 __ mov(key, scratch);
3503 __ JumpIfNotSmi(key, fail);
3509 MacroAssembler* masm,
3516 Label miss_force_generic, failed_allocation, slow;
3522 GenerateSmiKeyCheck(masm,
ecx,
eax,
xmm0,
xmm1, &miss_force_generic);
3531 switch (elements_kind) {
3574 __ cmp(
eax, 0xc0000000);
3581 __ test(
eax, Immediate(0xc0000000));
3594 __ fild_s(Operand(
esp, 0));
3601 __ push(Immediate(0));
3603 __ fild_d(Operand(
esp, 0));
3608 __ AllocateHeapNumber(
eax, ebx, edi, &failed_allocation);
3616 __ AllocateHeapNumber(
eax, ebx, edi, &failed_allocation);
3627 __ bind(&failed_allocation);
3633 Counters* counters = masm->isolate()->counters();
3634 __ IncrementCounter(counters->keyed_load_external_array_slow(), 1);
3642 Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Slow();
3643 __ jmp(ic, RelocInfo::CODE_TARGET);
3652 __ bind(&miss_force_generic);
3653 Handle<Code> miss_ic =
3654 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3655 __ jmp(miss_ic, RelocInfo::CODE_TARGET);
3660 MacroAssembler* masm,
3668 Label miss_force_generic, slow, check_heap_number;
3674 GenerateSmiKeyCheck(masm,
ecx, ebx,
xmm0,
xmm1, &miss_force_generic);
3689 __ JumpIfNotSmi(
eax, &slow);
3691 __ JumpIfNotSmi(
eax, &check_heap_number);
3699 switch (elements_kind) {
3722 __ fild_s(Operand(
esp, 0));
3738 __ bind(&check_heap_number);
3744 Immediate(masm->isolate()->factory()->heap_number_map()));
3772 CpuFeatures::Scope scope(
SSE3);
3777 __ sub(
esp, Immediate(2 * kPointerSize));
3778 __ fisttp_d(Operand(
esp, 0));
3784 __ cmp(Operand(
esp, kPointerSize), Immediate(0x80000000u));
3786 __ cmp(Operand(
esp, 0), Immediate(0));
3788 __ add(
esp, Immediate(2 * kPointerSize));
3793 __ add(
esp, Immediate(kPointerSize));
3797 CpuFeatures::Scope scope(
SSE2);
3799 __ cmp(ebx, 0x80000000u);
3802 switch (elements_kind) {
3831 Counters* counters = masm->isolate()->counters();
3832 __ IncrementCounter(counters->keyed_store_external_array_slow(), 1);
3841 Handle<Code> ic = masm->isolate()->builtins()->KeyedStoreIC_Slow();
3842 __ jmp(ic, RelocInfo::CODE_TARGET);
3851 __ bind(&miss_force_generic);
3852 Handle<Code> miss_ic =
3853 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
3854 __ jmp(miss_ic, RelocInfo::CODE_TARGET);
3864 Label miss_force_generic;
3870 GenerateSmiKeyCheck(masm,
ecx,
eax,
xmm0,
xmm1, &miss_force_generic);
3874 __ AssertFastElements(
eax);
3883 __ cmp(ebx, masm->isolate()->factory()->the_hole_value());
3884 __ j(
equal, &miss_force_generic);
3888 __ bind(&miss_force_generic);
3889 Handle<Code> miss_ic =
3890 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3891 __ jmp(miss_ic, RelocInfo::CODE_TARGET);
3896 MacroAssembler* masm) {
3902 Label miss_force_generic, slow_allocate_heapnumber;
3908 GenerateSmiKeyCheck(masm,
ecx,
eax,
xmm0,
xmm1, &miss_force_generic);
3912 __ AssertFastElements(
eax);
3921 __ j(
equal, &miss_force_generic);
3925 CpuFeatures::Scope use_sse2(
SSE2);
3931 __ AllocateHeapNumber(
eax, ebx, edi, &slow_allocate_heapnumber);
3934 CpuFeatures::Scope use_sse2(
SSE2);
3941 __ bind(&slow_allocate_heapnumber);
3947 Handle<Code> slow_ic =
3948 masm->isolate()->builtins()->KeyedLoadIC_Slow();
3949 __ jmp(slow_ic, RelocInfo::CODE_TARGET);
3951 __ bind(&miss_force_generic);
3952 Handle<Code> miss_ic =
3953 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3954 __ jmp(miss_ic, RelocInfo::CODE_TARGET);
3959 MacroAssembler* masm,
3969 Label miss_force_generic, grow, slow, transition_elements_kind;
3970 Label check_capacity, prepare_slow, finish_store, commit_backing_store;
3976 GenerateSmiKeyCheck(masm,
ecx, ebx,
xmm0,
xmm1, &miss_force_generic);
3979 __ JumpIfNotSmi(
eax, &transition_elements_kind);
3999 Immediate(masm->isolate()->factory()->fixed_array_map()));
4002 __ bind(&finish_store);
4009 FixedArray::kHeaderSize),
eax);
4018 FixedArray::kHeaderSize));
4029 __ bind(&miss_force_generic);
4030 Handle<Code> ic_force_generic =
4031 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4032 __ jmp(ic_force_generic, RelocInfo::CODE_TARGET);
4035 __ bind(&transition_elements_kind);
4036 Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4037 __ jmp(ic_miss, RelocInfo::CODE_TARGET);
4051 __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
4055 __ AllocateInNewSpace(size, edi, ebx,
ecx, &prepare_slow,
TAG_OBJECT);
4064 Immediate(masm->isolate()->factory()->fixed_array_map()));
4067 __ mov(ebx, Immediate(masm->isolate()->factory()->the_hole_value()));
4085 __ bind(&check_capacity);
4087 Immediate(masm->isolate()->factory()->fixed_cow_array_map()));
4088 __ j(
equal, &miss_force_generic);
4101 __ jmp(&finish_store);
4103 __ bind(&prepare_slow);
4105 __ mov(
ecx, Immediate(0));
4108 Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
4109 __ jmp(ic_slow, RelocInfo::CODE_TARGET);
4115 MacroAssembler* masm,
4124 Label miss_force_generic, transition_elements_kind, grow, slow;
4125 Label check_capacity, prepare_slow, finish_store, commit_backing_store;
4131 GenerateSmiKeyCheck(masm,
ecx, ebx,
xmm0,
xmm1, &miss_force_generic);
4135 __ AssertFastElements(edi);
4151 __ bind(&finish_store);
4152 __ StoreNumberToDoubleElements(
eax, edi,
ecx, edx,
xmm0,
4153 &transition_elements_kind,
true);
4157 __ bind(&miss_force_generic);
4158 Handle<Code> ic_force_generic =
4159 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4160 __ jmp(ic_force_generic, RelocInfo::CODE_TARGET);
4163 __ bind(&transition_elements_kind);
4164 Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4165 __ jmp(ic_miss, RelocInfo::CODE_TARGET);
4178 __ JumpIfSmi(
eax, &value_is_smi);
4180 Immediate(Handle<Map>(masm->isolate()->heap()->heap_number_map())));
4182 __ bind(&value_is_smi);
4187 __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
4191 __ AllocateInNewSpace(size, edi, ebx,
ecx, &prepare_slow,
TAG_OBJECT);
4194 __ mov(
ecx, Immediate(0));
4203 Immediate(masm->isolate()->factory()->fixed_double_array_map()));
4205 Immediate(
Smi::FromInt(JSArray::kPreallocatedArrayElements)));
4216 __ jmp(&finish_store);
4218 __ bind(&check_capacity);
4230 __ jmp(&finish_store);
4232 __ bind(&prepare_slow);
4234 __ mov(
ecx, Immediate(0));
4237 Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
4238 __ jmp(ic_slow, RelocInfo::CODE_TARGET);
4247 #endif // V8_TARGET_ARCH_IA32
static const int kBitFieldOffset
Handle< Code > CompileLoadFunctionPrototype(Handle< String > name)
Handle< Code > CompileLoadCallback(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< AccessorInfo > callback)
static const int kNotFound
static const int kCodeEntryOffset
Handle< Code > CompileStoreField(Handle< JSObject > object, int index, Handle< Map > transition, Handle< String > name)
static const int kPrototypeOrInitialMapOffset
static int SlotOffset(int index)
static const int kDataOffset
Handle< Code > CompileLoadNonexistent(Handle< String > name, Handle< JSObject > object, Handle< JSObject > last)
Handle< Code > CompileStoreElement(Handle< Map > receiver_map)
void GenerateProbe(MacroAssembler *masm, Code::Flags flags, Register receiver, Register name, Register scratch, Register extra, Register extra2=no_reg, Register extra3=no_reg)
static const int kFlagsOffset
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
#define LOG(isolate, Call)
static void GenerateStoreExternalArray(MacroAssembler *masm, ElementsKind elements_kind)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Handle< Code > CompileStoreViaSetter(Handle< JSObject > receiver, Handle< JSFunction > setter, Handle< String > name)
static const int kGlobalReceiverOffset
static void GenerateLoadFastDoubleElement(MacroAssembler *masm)
static PropertyType ExtractTypeFromFlags(Flags flags)
static bool IsSupported(CpuFeature f)
static const int kExternalPointerOffset
static const int kHasNamedInterceptor
static const int kIsAccessCheckNeeded
List< Handle< Map > > MapHandleList
#define ASSERT(condition)
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< AccessorInfo > callback, Handle< String > name)
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static const int kDebugInfoOffset
static const int kContextOffset
Handle< Code > CompileLoadField(Handle< JSObject > object, Handle< JSObject > holder, int index, Handle< String > name)
Handle< Code > CompileStoreInterceptor(Handle< JSObject > object, Handle< String > name)
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< String > name, Register scratch0)
Handle< Code > CompileStoreField(Handle< JSObject > object, int index, Handle< Map > transition, Handle< String > name)
static const int kHashFieldOffset
const uint32_t kNotStringTag
Handle< Code > CompileLoadInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
Handle< Code > CompileCallGlobal(Handle< JSObject > object, Handle< GlobalObject > holder, Handle< JSGlobalPropertyCell > cell, Handle< JSFunction > function, Handle< String > name)
Handle< Code > CompileLoadField(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, int index)
static const int kValueOffset
const uint32_t kHoleNanUpper32
Handle< Code > CompileStoreGlobal(Handle< GlobalObject > object, Handle< JSGlobalPropertyCell > holder, Handle< String > name)
Handle< Code > CompileLoadViaGetter(Handle< String > name, Handle< JSObject > receiver, Handle< JSObject > holder, Handle< JSFunction > getter)
Handle< Code > CompileLoadConstant(Handle< JSObject > object, Handle< JSObject > holder, Handle< JSFunction > value, Handle< String > name)
Handle< Code > CompileLoadConstant(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< JSFunction > value)
Handle< Code > CompileCallField(Handle< JSObject > object, Handle< JSObject > holder, int index, Handle< String > name)
static void GenerateStoreFastElement(MacroAssembler *masm, bool is_js_array, ElementsKind element_kind, KeyedAccessGrowMode grow_mode)
Handle< Code > CompileLoadStringLength(Handle< String > name)
Operand FieldOperand(Register object, int offset)
const uint32_t kHoleNanLower32
static bool decode(uint32_t value)
static const int kPropertiesOffset
Handle< Code > CompileLoadGlobal(Handle< JSObject > object, Handle< GlobalObject > holder, Handle< JSGlobalPropertyCell > cell, Handle< String > name, bool is_dont_delete)
bool IsFastSmiElementsKind(ElementsKind kind)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
static const int kDataOffset
static int SizeFor(int length)
static const int kElementsOffset
static void GenerateLoadDictionaryElement(MacroAssembler *masm)
static void GenerateLoadExternalArray(MacroAssembler *masm, ElementsKind elements_kind)
static const int kLengthOffset
static int SizeFor(int length)
static const int kHeaderSize
static const int kMapOffset
List< Handle< Code > > CodeHandleList
static const int kLengthOffset
Handle< Code > CompileCallInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
static const int kDataOffset
static const int kContextOffset
static void GenerateLoadFastElement(MacroAssembler *masm)
static const uint32_t kSignMask
Operand ApiParameterOperand(int index)
static void GenerateStoreFastDoubleElement(MacroAssembler *masm, bool is_js_array, KeyedAccessGrowMode grow_mode)
static const int kHeaderSize
static Handle< T > null()
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Handle< Code > CompileLoadArrayLength(Handle< String > name)
Handle< Code > CompileCallConstant(Handle< Object > object, Handle< JSObject > holder, Handle< JSFunction > function, Handle< String > name, CheckType check)
static AccessorInfo * cast(Object *obj)
static const int kHeaderSize
const int kHeapObjectTagSize
static Handle< JSGlobalPropertyCell > EnsurePropertyCell(Handle< GlobalObject > global, Handle< String > name)
static bool HasCustomCallGenerator(Handle< JSFunction > function)
static const int kPreallocatedArrayElements
static const int kPrototypeOffset
static const int kFlagsNotUsedInLookup
const int kInvalidProtoDepth
static const int kValueOffset
Handle< Code > CompileLoadCallback(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< AccessorInfo > callback)
Handle< Code > CompileLoadPolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_ics)
Handle< Code > CompileLoadInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
Handle< Code > CompileStorePolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_stubs, MapHandleList *transitioned_maps)
static const int kSharedFunctionInfoOffset
static const int kMantissaBits
void check(i::Vector< const char > string)
static const int kExponentOffset
static const int kValueOffset
Handle< Code > CompileLoadElement(Handle< Map > receiver_map)
Handle< Code > CompileConstructStub(Handle< JSFunction > function)
static JSObject * cast(Object *obj)
static const int kInstanceTypeOffset
static const int kMantissaOffset
static JSFunction * cast(Object *obj)