30 #if defined(V8_TARGET_ARCH_ARM)
39 #define __ ACCESS_MASM(masm)
42 static void ProbeTable(Isolate* isolate,
52 Register offset_scratch) {
53 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
54 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
55 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
57 uint32_t key_off_addr =
reinterpret_cast<uint32_t
>(key_offset.address());
58 uint32_t value_off_addr =
reinterpret_cast<uint32_t
>(value_offset.address());
59 uint32_t map_off_addr =
reinterpret_cast<uint32_t
>(map_offset.address());
62 ASSERT(value_off_addr > key_off_addr);
63 ASSERT((value_off_addr - key_off_addr) % 4 == 0);
64 ASSERT((value_off_addr - key_off_addr) < (256 * 4));
65 ASSERT(map_off_addr > key_off_addr);
66 ASSERT((map_off_addr - key_off_addr) % 4 == 0);
67 ASSERT((map_off_addr - key_off_addr) < (256 * 4));
70 Register base_addr = scratch;
74 __ add(offset_scratch, offset, Operand(offset,
LSL, 1));
77 __ mov(base_addr, Operand(key_offset));
92 Register
code = scratch2;
94 __ ldr(code,
MemOperand(base_addr, value_off_addr - key_off_addr));
97 Register flags_reg = base_addr;
103 ASSERT(
__ ImmediateFitsAddrMode1Instruction(mask));
104 __ bic(flags_reg, flags_reg, Operand(mask));
107 __ cmn(flags_reg, Operand(-flags));
109 __ cmp(flags_reg, Operand(flags));
134 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
141 Counters* counters = masm->isolate()->counters();
142 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
143 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
147 const int kInterceptorOrAccessCheckNeededMask =
151 Register map = scratch1;
154 __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
155 __ b(
ne, miss_label);
160 __ b(
lt, miss_label);
163 Register properties = scratch0;
167 Register tmp = properties;
168 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
170 __ b(
ne, miss_label);
184 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
196 Isolate* isolate = masm->isolate();
201 ASSERT(
sizeof(Entry) == 12);
207 ASSERT(!scratch.is(receiver));
208 ASSERT(!scratch.is(name));
209 ASSERT(!extra.is(receiver));
211 ASSERT(!extra.is(scratch));
212 ASSERT(!extra2.is(receiver));
214 ASSERT(!extra2.is(scratch));
215 ASSERT(!extra2.is(extra));
223 Counters* counters = masm->isolate()->counters();
224 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
228 __ JumpIfSmi(receiver, &miss);
233 __ add(scratch, scratch, Operand(
ip));
234 uint32_t mask = kPrimaryTableSize - 1;
242 __ and_(scratch, scratch, Operand(mask));
258 uint32_t mask2 = kSecondaryTableSize - 1;
260 __ and_(scratch, scratch, Operand(mask2));
277 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
282 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
284 Register prototype) {
301 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
302 MacroAssembler* masm,
306 Isolate* isolate = masm->isolate();
310 __ Move(
ip, isolate->global_object());
311 __ cmp(prototype,
ip);
314 Handle<JSFunction>
function(
317 __ Move(prototype, Handle<Map>(function->initial_map()));
326 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
329 Handle<JSObject> holder,
332 index -= holder->map()->inobject_properties();
335 int offset = holder->map()->instance_size() + (index *
kPointerSize);
346 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
351 __ JumpIfSmi(receiver, miss_label);
355 __ b(
ne, miss_label);
366 static void GenerateStringCheck(MacroAssembler* masm,
371 Label* non_string_object) {
373 __ JumpIfSmi(receiver, smi);
380 __ cmp(scratch2, Operand(static_cast<int32_t>(
kStringTag)));
381 __ b(
ne, non_string_object);
389 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
394 bool support_wrappers) {
399 GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
400 support_wrappers ? &check_wrapper : miss);
406 if (support_wrappers) {
408 __ bind(&check_wrapper);
414 GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
421 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
426 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
427 __ mov(
r0, scratch1);
436 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
437 Handle<JSObject>
object,
439 Handle<Map> transition,
441 Register receiver_reg,
449 LookupResult lookup(masm->isolate());
450 object->Lookup(*name, &lookup);
451 if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
462 __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label,
466 if (object->IsJSGlobalProxy()) {
467 __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
471 if (!transition.is_null() &&
object->GetPrototype()->IsJSObject()) {
473 if (lookup.IsFound()) {
474 holder = lookup.holder();
480 }
while (holder->GetPrototype()->IsJSObject());
484 Label miss_pop, done_check;
485 CheckPrototypes(
object, receiver_reg, Handle<JSObject>(holder), name_reg,
486 scratch1, scratch2, name, &miss_pop);
491 __ bind(&done_check);
497 ASSERT(object->IsJSGlobalProxy() || !
object->IsAccessCheckNeeded());
500 if (!transition.is_null() && (
object->map()->unused_property_fields() == 0)) {
503 __ push(receiver_reg);
504 __ mov(
r2, Operand(transition));
506 __ TailCallExternalReference(
507 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
514 if (!transition.is_null()) {
516 __ mov(scratch1, Operand(transition));
521 __ RecordWriteField(receiver_reg,
534 index -=
object->map()->inobject_properties();
538 int offset =
object->map()->instance_size() + (index *
kPointerSize);
542 __ JumpIfSmi(
r0, &exit);
546 __ mov(name_reg,
r0);
547 __ RecordWriteField(receiver_reg,
562 __ JumpIfSmi(
r0, &exit);
566 __ mov(name_reg,
r0);
567 __ RecordWriteField(scratch1,
581 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm,
Code::Kind kind) {
582 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
583 Handle<Code> code = (kind == Code::LOAD_IC)
584 ? masm->isolate()->builtins()->LoadIC_Miss()
585 : masm->isolate()->builtins()->KeyedLoadIC_Miss();
586 __ Jump(code, RelocInfo::CODE_TARGET);
590 static void GenerateCallFunction(MacroAssembler* masm,
591 Handle<Object>
object,
592 const ParameterCount& arguments,
601 __ JumpIfSmi(
r1, miss);
607 if (object->IsGlobalObject()) {
620 static void PushInterceptorArguments(MacroAssembler* masm,
624 Handle<JSObject> holder_obj) {
626 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
627 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
628 Register scratch = name;
629 __ mov(scratch, Operand(interceptor));
635 __ mov(scratch, Operand(ExternalReference::isolate_address()));
640 static void CompileCallLoadPropertyWithInterceptor(
641 MacroAssembler* masm,
645 Handle<JSObject> holder_obj) {
646 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
648 ExternalReference ref =
649 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
651 __ mov(
r0, Operand(6));
652 __ mov(
r1, Operand(ref));
659 static const int kFastApiCallArguments = 4;
665 static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
668 for (
int i = 0; i < kFastApiCallArguments; i++) {
675 static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
676 __ Drop(kFastApiCallArguments);
680 static void GenerateFastApiDirectCall(MacroAssembler* masm,
681 const CallOptimization& optimization,
694 Handle<JSFunction>
function = optimization.constant_function();
695 __ LoadHeapObject(
r5,
function);
699 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
700 Handle<Object> call_data(api_call_info->data());
701 if (masm->isolate()->heap()->InNewSpace(*call_data)) {
702 __ Move(
r0, api_call_info);
705 __ Move(
r6, call_data);
707 __ mov(
r7, Operand(ExternalReference::isolate_address()));
716 const int kApiStackSpace = 4;
718 FrameScope frame_scope(masm, StackFrame::MANUAL);
719 __ EnterExitFrame(
false, kApiStackSpace);
730 __ mov(
ip, Operand(argc));
733 __ mov(
ip, Operand(0));
736 const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
737 Address function_address = v8::ToCData<Address>(api_call_info->callback());
738 ApiFunction fun(function_address);
739 ExternalReference ref = ExternalReference(&fun,
740 ExternalReference::DIRECT_API_CALL,
742 AllowExternalCallThatCantCauseGC scope(masm);
744 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
750 CallInterceptorCompiler(StubCompiler* stub_compiler,
751 const ParameterCount& arguments,
754 : stub_compiler_(stub_compiler),
755 arguments_(arguments),
757 extra_ic_state_(extra_ic_state) {}
759 void Compile(MacroAssembler* masm,
760 Handle<JSObject>
object,
761 Handle<JSObject> holder,
763 LookupResult* lookup,
769 ASSERT(holder->HasNamedInterceptor());
770 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
773 __ JumpIfSmi(receiver, miss);
774 CallOptimization optimization(lookup);
775 if (optimization.is_constant_call()) {
776 CompileCacheable(masm,
object, receiver, scratch1, scratch2, scratch3,
777 holder, lookup, name, optimization, miss);
779 CompileRegular(masm,
object, receiver, scratch1, scratch2, scratch3,
785 void CompileCacheable(MacroAssembler* masm,
786 Handle<JSObject>
object,
791 Handle<JSObject> interceptor_holder,
792 LookupResult* lookup,
794 const CallOptimization& optimization,
796 ASSERT(optimization.is_constant_call());
797 ASSERT(!lookup->holder()->IsGlobalObject());
798 Counters* counters = masm->isolate()->counters();
801 bool can_do_fast_api_call =
false;
802 if (optimization.is_simple_api_call() &&
803 !lookup->holder()->IsGlobalObject()) {
804 depth1 = optimization.GetPrototypeDepthOfExpectedType(
805 object, interceptor_holder);
807 depth2 = optimization.GetPrototypeDepthOfExpectedType(
808 interceptor_holder, Handle<JSObject>(lookup->holder()));
810 can_do_fast_api_call =
814 __ IncrementCounter(counters->call_const_interceptor(), 1,
817 if (can_do_fast_api_call) {
818 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
820 ReserveSpaceForFastApiCall(masm, scratch1);
826 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
828 stub_compiler_->CheckPrototypes(
object, receiver, interceptor_holder,
829 scratch1, scratch2, scratch3,
834 Label regular_invoke;
835 LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
843 if (*interceptor_holder != lookup->holder()) {
844 stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
845 Handle<JSObject>(lookup->holder()),
846 scratch1, scratch2, scratch3,
857 if (can_do_fast_api_call) {
858 GenerateFastApiDirectCall(masm, optimization, arguments_.immediate());
863 __ InvokeFunction(optimization.constant_function(), arguments_,
868 if (can_do_fast_api_call) {
869 __ bind(&miss_cleanup);
870 FreeSpaceForFastApiCall(masm);
875 __ bind(®ular_invoke);
876 if (can_do_fast_api_call) {
877 FreeSpaceForFastApiCall(masm);
881 void CompileRegular(MacroAssembler* masm,
882 Handle<JSObject>
object,
888 Handle<JSObject> interceptor_holder,
891 stub_compiler_->CheckPrototypes(
object, receiver, interceptor_holder,
892 scratch1, scratch2, scratch3,
899 PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder);
900 __ CallExternalReference(
901 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
909 void LoadWithInterceptor(MacroAssembler* masm,
912 Handle<JSObject> holder_obj,
914 Label* interceptor_succeeded) {
917 __ Push(holder, name_);
918 CompileCallLoadPropertyWithInterceptor(masm,
927 __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
929 __ b(
ne, interceptor_succeeded);
932 StubCompiler* stub_compiler_;
933 const ParameterCount& arguments_;
942 static void GenerateCheckPropertyCell(MacroAssembler* masm,
943 Handle<GlobalObject> global,
947 Handle<JSGlobalPropertyCell> cell =
949 ASSERT(cell->value()->IsTheHole());
950 __ mov(scratch, Operand(cell));
953 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
961 static void GenerateCheckPropertyCells(MacroAssembler* masm,
962 Handle<JSObject>
object,
963 Handle<JSObject> holder,
967 Handle<JSObject> current = object;
968 while (!current.is_identical_to(holder)) {
969 if (current->IsGlobalObject()) {
970 GenerateCheckPropertyCell(masm,
971 Handle<GlobalObject>::cast(current),
976 current = Handle<JSObject>(
JSObject::cast(current->GetPrototype()));
984 static void StoreIntAsFloat(MacroAssembler* masm,
992 CpuFeatures::Scope scope(
VFP2);
994 __ add(scratch1, dst, Operand(wordoffset,
LSL, 2));
996 __ vstr(
s0, scratch1, 0);
998 Label not_special, done;
1011 __ cmp(ival, Operand(1));
1012 __ b(
gt, ¬_special);
1015 static const uint32_t exponent_word_for_1 =
1018 __ orr(fval, fval, Operand(exponent_word_for_1),
LeaveCC,
eq);
1021 __ bind(¬_special);
1024 Register zeros = scratch2;
1025 __ CountLeadingZeros(zeros, ival, scratch1);
1034 Operand(scratch1,
LSL, kBinary32ExponentShift));
1037 __ add(zeros, zeros, Operand(1));
1039 __ mov(ival, Operand(ival,
LSL, zeros));
1056 static void GenerateUInt2Double(MacroAssembler* masm,
1060 int leading_zeroes) {
1061 const int meaningful_bits =
kBitsPerInt - leading_zeroes - 1;
1064 const int mantissa_shift_for_hi_word =
1067 const int mantissa_shift_for_lo_word =
1071 if (mantissa_shift_for_hi_word > 0) {
1072 __ mov(loword, Operand(hiword,
LSL, mantissa_shift_for_lo_word));
1073 __ orr(hiword, scratch, Operand(hiword,
LSR, mantissa_shift_for_hi_word));
1076 __ orr(hiword, scratch, Operand(hiword,
LSL, mantissa_shift_for_hi_word));
1081 if (!(biased_exponent & 1)) {
1088 #define __ ACCESS_MASM(masm())
1091 Register StubCompiler::CheckPrototypes(Handle<JSObject>
object,
1092 Register object_reg,
1093 Handle<JSObject> holder,
1094 Register holder_reg,
1097 Handle<String> name,
1101 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1102 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1103 && !scratch2.is(scratch1));
1106 Register reg = object_reg;
1109 if (save_at_depth == depth) {
1115 Handle<JSObject> current = object;
1116 while (!current.is_identical_to(holder)) {
1121 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1123 Handle<JSObject> prototype(
JSObject::cast(current->GetPrototype()));
1124 if (!current->HasFastProperties() &&
1125 !current->IsJSGlobalObject() &&
1126 !current->IsJSGlobalProxy()) {
1127 if (!name->IsSymbol()) {
1128 name = factory()->LookupSymbol(name);
1130 ASSERT(current->property_dictionary()->FindEntry(*name) ==
1133 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
1134 scratch1, scratch2);
1140 Handle<Map> current_map(current->map());
1147 if (current->IsJSGlobalProxy()) {
1148 __ CheckAccessGlobalProxy(reg, scratch2, miss);
1152 if (heap()->InNewSpace(*prototype)) {
1158 __ mov(reg, Operand(prototype));
1162 if (save_at_depth == depth) {
1167 current = prototype;
1171 LOG(masm()->isolate(), IntEvent(
"check-maps-depth", depth + 1));
1174 __ CheckMap(reg, scratch1, Handle<Map>(current->map()), miss,
1178 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1179 if (holder->IsJSGlobalProxy()) {
1180 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1186 GenerateCheckPropertyCells(masm(),
object, holder, name, scratch1, miss);
1193 void StubCompiler::GenerateLoadField(Handle<JSObject>
object,
1194 Handle<JSObject> holder,
1200 Handle<String> name,
1203 __ JumpIfSmi(receiver, miss);
1206 Register reg = CheckPrototypes(
1207 object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1208 GenerateFastPropertyLoad(masm(),
r0, reg, holder, index);
1213 void StubCompiler::GenerateLoadConstant(Handle<JSObject>
object,
1214 Handle<JSObject> holder,
1219 Handle<JSFunction> value,
1220 Handle<String> name,
1223 __ JumpIfSmi(receiver, miss);
1227 object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1230 __ LoadHeapObject(
r0, value);
1235 void StubCompiler::GenerateDictionaryLoadCallback(Register receiver,
1240 Handle<AccessorInfo> callback,
1241 Handle<String> name,
1243 ASSERT(!receiver.is(scratch1));
1244 ASSERT(!receiver.is(scratch2));
1245 ASSERT(!receiver.is(scratch3));
1248 Register dictionary = scratch1;
1260 __ bind(&probe_done);
1264 Register pointer = scratch3;
1267 const int kValueOffset = kElementsStartOffset +
kPointerSize;
1269 __ cmp(scratch2, Operand(callback));
1274 void StubCompiler::GenerateLoadCallback(Handle<JSObject>
object,
1275 Handle<JSObject> holder,
1282 Handle<AccessorInfo> callback,
1283 Handle<String> name,
1286 __ JumpIfSmi(receiver, miss);
1289 Register reg = CheckPrototypes(
object, receiver, holder, scratch1,
1290 scratch2, scratch3, name, miss);
1292 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
1293 GenerateDictionaryLoadCallback(
1294 reg, name_reg, scratch2, scratch3, scratch4, callback, name, miss);
1300 __ mov(scratch2,
sp);
1301 if (heap()->InNewSpace(callback->data())) {
1302 __ Move(scratch3, callback);
1305 __ Move(scratch3, Handle<Object>(callback->data()));
1307 __ Push(reg, scratch3);
1308 __ mov(scratch3, Operand(ExternalReference::isolate_address()));
1309 __ Push(scratch3, name_reg);
1312 const int kApiStackSpace = 1;
1313 FrameScope frame_scope(masm(), StackFrame::MANUAL);
1314 __ EnterExitFrame(
false, kApiStackSpace);
1319 __ add(
r1,
sp, Operand(1 * kPointerSize));
1321 const int kStackUnwindSpace = 5;
1322 Address getter_address = v8::ToCData<Address>(callback->getter());
1323 ApiFunction fun(getter_address);
1324 ExternalReference ref =
1325 ExternalReference(&fun,
1326 ExternalReference::DIRECT_GETTER_CALL,
1328 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
1332 void StubCompiler::GenerateLoadInterceptor(Handle<JSObject>
object,
1333 Handle<JSObject> interceptor_holder,
1334 LookupResult* lookup,
1340 Handle<String> name,
1342 ASSERT(interceptor_holder->HasNamedInterceptor());
1343 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1346 __ JumpIfSmi(receiver, miss);
1351 bool compile_followup_inline =
false;
1352 if (lookup->IsFound() && lookup->IsCacheable()) {
1353 if (lookup->IsField()) {
1354 compile_followup_inline =
true;
1355 }
else if (lookup->type() ==
CALLBACKS &&
1356 lookup->GetCallbackObject()->IsAccessorInfo()) {
1358 compile_followup_inline = callback->getter() !=
NULL &&
1359 callback->IsCompatibleReceiver(*
object);
1363 if (compile_followup_inline) {
1367 Register holder_reg = CheckPrototypes(
object, receiver, interceptor_holder,
1368 scratch1, scratch2, scratch3,
1370 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1376 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1377 bool must_preserve_receiver_reg = !receiver.is(holder_reg) &&
1378 (lookup->type() ==
CALLBACKS || must_perfrom_prototype_check);
1384 if (must_preserve_receiver_reg) {
1385 __ Push(receiver, holder_reg, name_reg);
1387 __ Push(holder_reg, name_reg);
1392 CompileCallLoadPropertyWithInterceptor(masm(),
1396 interceptor_holder);
1399 Label interceptor_failed;
1400 __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1401 __ cmp(
r0, scratch1);
1402 __ b(
eq, &interceptor_failed);
1403 frame_scope.GenerateLeaveFrame();
1406 __ bind(&interceptor_failed);
1409 if (must_preserve_receiver_reg) {
1416 if (must_perfrom_prototype_check) {
1417 holder_reg = CheckPrototypes(interceptor_holder,
1419 Handle<JSObject>(lookup->holder()),
1427 if (lookup->IsField()) {
1430 GenerateFastPropertyLoad(masm(),
r0, holder_reg,
1431 Handle<JSObject>(lookup->holder()),
1432 lookup->GetFieldIndex());
1438 Handle<AccessorInfo> callback(
1445 __ Move(scratch2, callback);
1447 if (!receiver.is(holder_reg)) {
1448 ASSERT(scratch1.is(holder_reg));
1449 __ Push(receiver, holder_reg);
1452 __ push(holder_reg);
1456 __ mov(scratch1, Operand(ExternalReference::isolate_address()));
1457 __ Push(scratch3, scratch1, scratch2, name_reg);
1459 ExternalReference ref =
1460 ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1462 __ TailCallExternalReference(ref, 6, 1);
1467 Register holder_reg = CheckPrototypes(
object, receiver, interceptor_holder,
1468 scratch1, scratch2, scratch3,
1470 PushInterceptorArguments(masm(), receiver, holder_reg,
1471 name_reg, interceptor_holder);
1473 ExternalReference ref =
1474 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1476 __ TailCallExternalReference(ref, 6, 1);
1481 void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
1482 if (kind_ == Code::KEYED_CALL_IC) {
1483 __ cmp(
r2, Operand(name));
1489 void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject>
object,
1490 Handle<JSObject> holder,
1491 Handle<String> name,
1493 ASSERT(holder->IsGlobalObject());
1496 const int argc = arguments().immediate();
1502 __ JumpIfSmi(
r0, miss);
1503 CheckPrototypes(
object,
r0, holder,
r3,
r1,
r4, name, miss);
1507 void CallStubCompiler::GenerateLoadFunctionFromCell(
1508 Handle<JSGlobalPropertyCell> cell,
1509 Handle<JSFunction>
function,
1512 __ mov(
r3, Operand(cell));
1516 if (heap()->InNewSpace(*
function)) {
1522 __ JumpIfSmi(
r1, miss);
1527 __ Move(
r3, Handle<SharedFunctionInfo>(function->shared()));
1531 __ cmp(
r1, Operand(
function));
1537 void CallStubCompiler::GenerateMissBranch() {
1539 isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1542 __ Jump(code, RelocInfo::CODE_TARGET);
1547 Handle<JSObject> holder,
1549 Handle<String> name) {
1556 GenerateNameCheck(name, &miss);
1558 const int argc = arguments().immediate();
1563 __ JumpIfSmi(
r0, &miss);
1566 Register reg = CheckPrototypes(
object,
r0, holder,
r1,
r3,
r4, name, &miss);
1567 GenerateFastPropertyLoad(masm(),
r1, reg, holder, index);
1569 GenerateCallFunction(masm(),
object, arguments(), &miss, extra_state_);
1573 GenerateMissBranch();
1580 Handle<Code> CallStubCompiler::CompileArrayPushCall(
1581 Handle<Object>
object,
1582 Handle<JSObject> holder,
1583 Handle<JSGlobalPropertyCell> cell,
1584 Handle<JSFunction>
function,
1585 Handle<String> name) {
1595 if (!object->IsJSArray() || !cell.is_null())
return Handle<Code>::null();
1598 GenerateNameCheck(name, &miss);
1600 Register receiver =
r1;
1602 const int argc = arguments().immediate();
1606 __ JumpIfSmi(receiver, &miss);
1609 CheckPrototypes(Handle<JSObject>::cast(
object), receiver, holder,
r3,
r0,
r4,
1621 Label attempt_to_grow_elements;
1623 Register elements =
r6;
1624 Register end_elements =
r5;
1629 __ CheckMap(elements,
1631 Heap::kFixedArrayMapRootIndex,
1647 __ b(
gt, &attempt_to_grow_elements);
1650 Label with_write_barrier;
1652 __ JumpIfNotSmi(
r4, &with_write_barrier);
1660 __ add(end_elements, elements,
1662 const int kEndElementsOffset =
1670 __ bind(&with_write_barrier);
1674 if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
1675 Label fast_object, not_fast_object;
1676 __ CheckFastObjectElements(
r3,
r7, ¬_fast_object);
1677 __ jmp(&fast_object);
1679 __ bind(¬_fast_object);
1680 __ CheckFastSmiElements(
r3,
r7, &call_builtin);
1683 Label try_holey_map;
1689 __ mov(
r2, receiver);
1692 __ jmp(&fast_object);
1694 __ bind(&try_holey_map);
1700 __ mov(
r2, receiver);
1703 __ bind(&fast_object);
1705 __ CheckFastObjectElements(
r3,
r3, &call_builtin);
1714 __ add(end_elements, elements,
1718 __ RecordWrite(elements,
1728 __ bind(&attempt_to_grow_elements);
1732 if (!FLAG_inline_new) {
1733 __ b(&call_builtin);
1739 Label no_fast_elements_check;
1740 __ JumpIfSmi(
r2, &no_fast_elements_check);
1742 __ CheckFastObjectElements(
r7,
r7, &call_builtin);
1743 __ bind(&no_fast_elements_check);
1745 Isolate* isolate = masm()->isolate();
1746 ExternalReference new_space_allocation_top =
1747 ExternalReference::new_space_allocation_top_address(isolate);
1748 ExternalReference new_space_allocation_limit =
1749 ExternalReference::new_space_allocation_limit_address(isolate);
1751 const int kAllocationDelta = 4;
1753 __ add(end_elements, elements,
1755 __ add(end_elements, end_elements, Operand(kEndElementsOffset));
1756 __ mov(
r7, Operand(new_space_allocation_top));
1758 __ cmp(end_elements,
r3);
1759 __ b(
ne, &call_builtin);
1761 __ mov(
r9, Operand(new_space_allocation_limit));
1763 __ add(
r3,
r3, Operand(kAllocationDelta * kPointerSize));
1765 __ b(
hi, &call_builtin);
1773 __ LoadRoot(
r3, Heap::kTheHoleValueRootIndex);
1774 for (
int i = 1; i < kAllocationDelta; i++) {
1787 __ bind(&call_builtin);
1788 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1796 GenerateMissBranch();
1799 return GetCode(
function);
1803 Handle<Code> CallStubCompiler::CompileArrayPopCall(
1804 Handle<Object>
object,
1805 Handle<JSObject> holder,
1806 Handle<JSGlobalPropertyCell> cell,
1807 Handle<JSFunction>
function,
1808 Handle<String> name) {
1818 if (!object->IsJSArray() || !cell.is_null())
return Handle<Code>::null();
1820 Label miss, return_undefined, call_builtin;
1821 Register receiver =
r1;
1822 Register elements =
r3;
1823 GenerateNameCheck(name, &miss);
1826 const int argc = arguments().immediate();
1829 __ JumpIfSmi(receiver, &miss);
1832 CheckPrototypes(Handle<JSObject>::cast(
object), receiver, holder, elements,
1833 r4,
r0, name, &miss);
1839 __ CheckMap(elements,
1841 Heap::kFixedArrayMapRootIndex,
1848 __ b(
lt, &return_undefined);
1851 __ LoadRoot(
r6, Heap::kTheHoleValueRootIndex);
1859 __ b(
eq, &call_builtin);
1869 __ bind(&return_undefined);
1870 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
1874 __ bind(&call_builtin);
1875 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1882 GenerateMissBranch();
1885 return GetCode(
function);
1889 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
1890 Handle<Object>
object,
1891 Handle<JSObject> holder,
1892 Handle<JSGlobalPropertyCell> cell,
1893 Handle<JSFunction>
function,
1894 Handle<String> name) {
1904 if (!object->IsString() || !cell.is_null())
return Handle<Code>::null();
1906 const int argc = arguments().immediate();
1909 Label index_out_of_range;
1910 Label* index_out_of_range_label = &index_out_of_range;
1912 if (kind_ == Code::CALL_IC &&
1915 index_out_of_range_label = &miss;
1917 GenerateNameCheck(name, &name_miss);
1920 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1924 ASSERT(!
object.is_identical_to(holder));
1925 CheckPrototypes(Handle<JSObject>(
JSObject::cast(object->GetPrototype())),
1926 r0, holder,
r1,
r3,
r4, name, &miss);
1928 Register receiver =
r1;
1929 Register index =
r4;
1930 Register result =
r0;
1935 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1938 StringCharCodeAtGenerator
generator(receiver,
1943 index_out_of_range_label,
1949 StubRuntimeCallHelper call_helper;
1950 generator.GenerateSlow(masm(), call_helper);
1952 if (index_out_of_range.is_linked()) {
1953 __ bind(&index_out_of_range);
1954 __ LoadRoot(
r0, Heap::kNanValueRootIndex);
1962 __ bind(&name_miss);
1963 GenerateMissBranch();
1966 return GetCode(
function);
1970 Handle<Code> CallStubCompiler::CompileStringCharAtCall(
1971 Handle<Object>
object,
1972 Handle<JSObject> holder,
1973 Handle<JSGlobalPropertyCell> cell,
1974 Handle<JSFunction>
function,
1975 Handle<String> name) {
1985 if (!object->IsString() || !cell.is_null())
return Handle<Code>::null();
1987 const int argc = arguments().immediate();
1990 Label index_out_of_range;
1991 Label* index_out_of_range_label = &index_out_of_range;
1992 if (kind_ == Code::CALL_IC &&
1995 index_out_of_range_label = &miss;
1997 GenerateNameCheck(name, &name_miss);
2000 GenerateDirectLoadGlobalFunctionPrototype(masm(),
2004 ASSERT(!
object.is_identical_to(holder));
2005 CheckPrototypes(Handle<JSObject>(
JSObject::cast(object->GetPrototype())),
2006 r0, holder,
r1,
r3,
r4, name, &miss);
2008 Register receiver =
r0;
2009 Register index =
r4;
2010 Register scratch =
r3;
2011 Register result =
r0;
2016 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
2019 StringCharAtGenerator
generator(receiver,
2025 index_out_of_range_label,
2031 StubRuntimeCallHelper call_helper;
2032 generator.GenerateSlow(masm(), call_helper);
2034 if (index_out_of_range.is_linked()) {
2035 __ bind(&index_out_of_range);
2036 __ LoadRoot(
r0, Heap::kEmptyStringRootIndex);
2044 __ bind(&name_miss);
2045 GenerateMissBranch();
2048 return GetCode(
function);
2052 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
2053 Handle<Object>
object,
2054 Handle<JSObject> holder,
2055 Handle<JSGlobalPropertyCell> cell,
2056 Handle<JSFunction>
function,
2057 Handle<String> name) {
2066 const int argc = arguments().immediate();
2070 if (!object->IsJSObject() || argc != 1)
return Handle<Code>::null();
2073 GenerateNameCheck(name, &miss);
2075 if (cell.is_null()) {
2079 __ JumpIfSmi(r1, &miss);
2081 CheckPrototypes(Handle<JSObject>::cast(
object), r1, holder,
r0, r3, r4,
2084 ASSERT(cell->value() == *
function);
2085 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(
object), holder, name,
2087 GenerateLoadFunctionFromCell(cell,
function, &miss);
2097 __ JumpIfNotSmi(code, &slow);
2107 StubRuntimeCallHelper call_helper;
2108 generator.GenerateSlow(masm(), call_helper);
2118 GenerateMissBranch();
2121 return cell.is_null() ? GetCode(
function) : GetCode(Code::
NORMAL, name);
2125 Handle<Code> CallStubCompiler::CompileMathFloorCall(
2126 Handle<Object>
object,
2127 Handle<JSObject> holder,
2128 Handle<JSGlobalPropertyCell> cell,
2129 Handle<JSFunction>
function,
2130 Handle<String> name) {
2143 CpuFeatures::Scope scope_vfp2(
VFP2);
2144 const int argc = arguments().immediate();
2147 if (!object->IsJSObject() || argc != 1)
return Handle<Code>::null();
2150 GenerateNameCheck(name, &miss);
2152 if (cell.is_null()) {
2155 __ JumpIfSmi(r1, &miss);
2156 CheckPrototypes(Handle<JSObject>::cast(
object), r1, holder,
r0, r3, r4,
2159 ASSERT(cell->value() == *
function);
2160 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(
object), holder, name,
2162 GenerateLoadFunctionFromCell(cell,
function, &miss);
2171 __ Drop(argc + 1,
eq);
2176 Label wont_fit_smi, no_vfp_exception, restore_fpscr_and_return;
2207 __ mov(
r6, Operand(
r6,
LSR, HeapNumber::kMantissaBitsInTopWord));
2212 __ b(&no_vfp_exception,
eq);
2218 >> HeapNumber::kMantissaBitsInTopWord),
SetCC);
2219 __ b(&restore_fpscr_and_return,
eq);
2224 __ b(&restore_fpscr_and_return,
ge);
2225 __ b(&wont_fit_smi);
2227 __ bind(&no_vfp_exception);
2231 __ add(r1,
r0, Operand(0x40000000),
SetCC);
2232 __ b(&wont_fit_smi,
mi);
2239 __ b(&restore_fpscr_and_return,
ne);
2246 __ bind(&restore_fpscr_and_return);
2252 __ bind(&wont_fit_smi);
2264 GenerateMissBranch();
2267 return cell.is_null() ? GetCode(
function) : GetCode(Code::
NORMAL, name);
2271 Handle<Code> CallStubCompiler::CompileMathAbsCall(
2272 Handle<Object>
object,
2273 Handle<JSObject> holder,
2274 Handle<JSGlobalPropertyCell> cell,
2275 Handle<JSFunction>
function,
2276 Handle<String> name) {
2285 const int argc = arguments().immediate();
2288 if (!object->IsJSObject() || argc != 1)
return Handle<Code>::null();
2291 GenerateNameCheck(name, &miss);
2292 if (cell.is_null()) {
2295 __ JumpIfSmi(r1, &miss);
2296 CheckPrototypes(Handle<JSObject>::cast(
object), r1, holder,
r0, r3, r4,
2299 ASSERT(cell->value() == *
function);
2300 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(
object), holder, name,
2302 GenerateLoadFunctionFromCell(cell,
function, &miss);
2311 __ JumpIfNotSmi(
r0, ¬_smi);
2337 Label negative_sign;
2339 __ b(
ne, &negative_sign);
2345 __ bind(&negative_sign);
2348 __ LoadRoot(
r6, Heap::kHeapNumberMapRootIndex);
2349 __ AllocateHeapNumber(
r0, r4,
r5,
r6, &slow);
2363 GenerateMissBranch();
2366 return cell.is_null() ? GetCode(
function) : GetCode(Code::
NORMAL, name);
2370 Handle<Code> CallStubCompiler::CompileFastApiCall(
2371 const CallOptimization& optimization,
2372 Handle<Object>
object,
2373 Handle<JSObject> holder,
2374 Handle<JSGlobalPropertyCell> cell,
2375 Handle<JSFunction>
function,
2376 Handle<String> name) {
2377 Counters* counters = isolate()->counters();
2379 ASSERT(optimization.is_simple_api_call());
2382 if (object->IsGlobalObject())
return Handle<Code>::null();
2383 if (!cell.is_null())
return Handle<Code>::null();
2384 if (!object->IsJSObject())
return Handle<Code>::null();
2385 int depth = optimization.GetPrototypeDepthOfExpectedType(
2386 Handle<JSObject>::cast(
object), holder);
2389 Label miss, miss_before_stack_reserved;
2390 GenerateNameCheck(name, &miss_before_stack_reserved);
2393 const int argc = arguments().immediate();
2397 __ JumpIfSmi(r1, &miss_before_stack_reserved);
2399 __ IncrementCounter(counters->call_const(), 1,
r0,
r3);
2400 __ IncrementCounter(counters->call_const_fast_api(), 1,
r0,
r3);
2402 ReserveSpaceForFastApiCall(masm(),
r0);
2405 CheckPrototypes(Handle<JSObject>::cast(
object), r1, holder,
r0, r3, r4, name,
2408 GenerateFastApiDirectCall(masm(), optimization, argc);
2411 FreeSpaceForFastApiCall(masm());
2413 __ bind(&miss_before_stack_reserved);
2414 GenerateMissBranch();
2417 return GetCode(
function);
2422 Handle<JSObject> holder,
2423 Handle<JSFunction>
function,
2424 Handle<String> name,
2431 Handle<Code> code = CompileCustomCall(
object, holder,
2432 Handle<JSGlobalPropertyCell>::null(),
2435 if (!code.is_null())
return code;
2439 GenerateNameCheck(name, &miss);
2442 const int argc = arguments().immediate();
2447 __ JumpIfSmi(r1, &miss);
2455 __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2459 CheckPrototypes(Handle<JSObject>::cast(
object), r1, holder,
r0, r3, r4,
2464 if (object->IsGlobalObject()) {
2471 if (function->IsBuiltin() || !
function->shared()->is_classic_mode()) {
2476 GenerateDirectLoadGlobalFunctionPrototype(
2480 r0, holder,
r3,
r1,
r4, name, &miss);
2489 if (function->IsBuiltin() || !
function->shared()->is_classic_mode()) {
2492 __ JumpIfSmi(r1, &fast);
2497 GenerateDirectLoadGlobalFunctionPrototype(
2501 r0, holder,
r3,
r1,
r4, name, &miss);
2510 if (function->IsBuiltin() || !
function->shared()->is_classic_mode()) {
2513 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
2516 __ LoadRoot(
ip, Heap::kFalseValueRootIndex);
2521 GenerateDirectLoadGlobalFunctionPrototype(
2525 r0, holder,
r3,
r1,
r4, name, &miss);
2538 function, arguments(),
JUMP_FUNCTION, NullCallWrapper(), call_kind);
2542 GenerateMissBranch();
2545 return GetCode(
function);
2550 Handle<JSObject> holder,
2551 Handle<String> name) {
2557 GenerateNameCheck(name, &miss);
2560 const int argc = arguments().immediate();
2561 LookupResult lookup(isolate());
2562 LookupPostInterceptor(holder, name, &lookup);
2567 CallInterceptorCompiler compiler(
this, arguments(),
r2, extra_state_);
2568 compiler.Compile(masm(),
object, holder, name, &lookup, r1, r3, r4,
r0,
2576 GenerateCallFunction(masm(),
object, arguments(), &miss, extra_state_);
2580 GenerateMissBranch();
2588 Handle<JSObject>
object,
2589 Handle<GlobalObject> holder,
2590 Handle<JSGlobalPropertyCell> cell,
2591 Handle<JSFunction>
function,
2592 Handle<String> name) {
2598 Handle<Code> code = CompileCustomCall(
object, holder, cell,
function, name);
2600 if (!code.is_null())
return code;
2604 GenerateNameCheck(name, &miss);
2607 const int argc = arguments().immediate();
2608 GenerateGlobalReceiverCheck(
object, holder, name, &miss);
2609 GenerateLoadFunctionFromCell(cell,
function, &miss);
2613 if (object->IsGlobalObject()) {
2622 Counters* counters = masm()->isolate()->counters();
2623 __ IncrementCounter(counters->call_global_inline(), 1,
r3,
r4);
2624 ParameterCount expected(function->shared()->formal_parameter_count());
2633 NullCallWrapper(), call_kind);
2637 __ IncrementCounter(counters->call_global_inline_miss(), 1,
r1,
r3);
2638 GenerateMissBranch();
2647 Handle<Map> transition,
2648 Handle<String> name) {
2657 GenerateStoreField(masm(),
2665 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2666 __ Jump(ic, RelocInfo::CODE_TARGET);
2669 return GetCode(transition.is_null()
2676 Handle<String> name,
2677 Handle<JSObject> receiver,
2678 Handle<JSObject> holder,
2679 Handle<AccessorInfo> callback) {
2688 __ JumpIfSmi(r1, &miss);
2689 CheckPrototypes(receiver, r1, holder, r3, r4,
r5, name, &miss);
2692 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
2695 __ mov(
ip, Operand(callback));
2699 ExternalReference store_callback_property =
2700 ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2702 __ TailCallExternalReference(store_callback_property, 4, 1);
2706 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2707 __ Jump(ic, RelocInfo::CODE_TARGET);
2715 #define __ ACCESS_MASM(masm)
2719 MacroAssembler* masm,
2720 Handle<JSFunction> setter) {
2733 if (!setter.is_null()) {
2736 ParameterCount actual(1);
2742 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
2756 #define __ ACCESS_MASM(masm())
2760 Handle<String> name,
2761 Handle<JSObject> receiver,
2762 Handle<JSObject> holder,
2763 Handle<JSFunction> setter) {
2773 __ JumpIfSmi(r1, &miss);
2774 CheckPrototypes(receiver, r1, holder, r3, r4,
r5, name, &miss);
2779 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2780 __ Jump(ic, RelocInfo::CODE_TARGET);
2788 Handle<JSObject> receiver,
2789 Handle<String> name) {
2799 __ CheckMap(r1, r3, Handle<Map>(receiver->map()), &miss,
2803 if (receiver->IsJSGlobalProxy()) {
2804 __ CheckAccessGlobalProxy(r1, r3, &miss);
2809 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2817 ExternalReference store_ic_property =
2818 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2820 __ TailCallExternalReference(store_ic_property, 4, 1);
2824 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2825 __ Jump(ic, RelocInfo::CODE_TARGET);
2833 Handle<GlobalObject>
object,
2834 Handle<JSGlobalPropertyCell> cell,
2835 Handle<String> name) {
2846 __ cmp(r3, Operand(Handle<Map>(object->map())));
2853 __ mov(r4, Operand(cell));
2854 __ LoadRoot(
r5, Heap::kTheHoleValueRootIndex);
2863 Counters* counters = masm()->isolate()->counters();
2864 __ IncrementCounter(counters->named_store_global_inline(), 1,
r4,
r3);
2869 __ IncrementCounter(counters->named_store_global_inline_miss(), 1,
r4,
r3);
2870 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2871 __ Jump(ic, RelocInfo::CODE_TARGET);
2879 Handle<JSObject>
object,
2880 Handle<JSObject> last) {
2888 __ JumpIfSmi(
r0, &miss);
2891 CheckPrototypes(
object,
r0, last, r3, r1, r4, name, &miss);
2895 if (last->IsGlobalObject()) {
2896 GenerateCheckPropertyCell(
2897 masm(), Handle<GlobalObject>::cast(last), name, r1, &miss);
2902 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
2906 GenerateLoadMiss(masm(), Code::LOAD_IC);
2914 Handle<JSObject> holder,
2916 Handle<String> name) {
2924 GenerateLoadField(
object, holder,
r0, r3, r1, r4, index, name, &miss);
2926 GenerateLoadMiss(masm(), Code::LOAD_IC);
2934 Handle<String> name,
2935 Handle<JSObject>
object,
2936 Handle<JSObject> holder,
2937 Handle<AccessorInfo> callback) {
2944 GenerateLoadCallback(
object, holder,
r0,
r2, r3, r1, r4,
r5, callback, name,
2947 GenerateLoadMiss(masm(), Code::LOAD_IC);
2955 #define __ ACCESS_MASM(masm)
2959 Handle<JSFunction> getter) {
2968 if (!getter.is_null()) {
2971 ParameterCount actual(0);
2977 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
2988 #define __ ACCESS_MASM(masm())
2992 Handle<String> name,
2993 Handle<JSObject> receiver,
2994 Handle<JSObject> holder,
2995 Handle<JSFunction> getter) {
3004 __ JumpIfSmi(
r0, &miss);
3005 CheckPrototypes(receiver,
r0, holder, r3, r4, r1, name, &miss);
3010 GenerateLoadMiss(masm(), Code::LOAD_IC);
3018 Handle<JSObject> holder,
3019 Handle<JSFunction> value,
3020 Handle<String> name) {
3028 GenerateLoadConstant(
object, holder,
r0, r3, r1, r4, value, name, &miss);
3030 GenerateLoadMiss(masm(), Code::LOAD_IC);
3038 Handle<JSObject> holder,
3039 Handle<String> name) {
3047 LookupResult lookup(isolate());
3048 LookupPostInterceptor(holder, name, &lookup);
3049 GenerateLoadInterceptor(
object, holder, &lookup,
r0,
r2, r3, r1, r4, name,
3052 GenerateLoadMiss(masm(), Code::LOAD_IC);
3060 Handle<JSObject>
object,
3061 Handle<GlobalObject> holder,
3062 Handle<JSGlobalPropertyCell> cell,
3063 Handle<String> name,
3064 bool is_dont_delete) {
3073 __ JumpIfSmi(
r0, &miss);
3074 CheckPrototypes(
object,
r0, holder, r3, r4, r1, name, &miss);
3077 __ mov(r3, Operand(cell));
3081 if (!is_dont_delete) {
3082 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
3088 Counters* counters = masm()->isolate()->counters();
3089 __ IncrementCounter(counters->named_load_global_stub(), 1,
r1,
r3);
3093 __ IncrementCounter(counters->named_load_global_stub_miss(), 1,
r1,
r3);
3094 GenerateLoadMiss(masm(), Code::LOAD_IC);
3102 Handle<JSObject> receiver,
3103 Handle<JSObject> holder,
3113 __ cmp(
r0, Operand(name));
3116 GenerateLoadField(receiver, holder, r1,
r2, r3, r4, index, name, &miss);
3118 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3125 Handle<String> name,
3126 Handle<JSObject> receiver,
3127 Handle<JSObject> holder,
3128 Handle<AccessorInfo> callback) {
3137 __ cmp(
r0, Operand(name));
3140 GenerateLoadCallback(receiver, holder, r1,
r0,
r2, r3, r4,
r5, callback, name,
3143 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3150 Handle<String> name,
3151 Handle<JSObject> receiver,
3152 Handle<JSObject> holder,
3153 Handle<JSFunction> value) {
3162 __ cmp(
r0, Operand(name));
3165 GenerateLoadConstant(receiver, holder, r1,
r2, r3, r4, value, name, &miss);
3167 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3175 Handle<JSObject> receiver,
3176 Handle<JSObject> holder,
3177 Handle<String> name) {
3186 __ cmp(
r0, Operand(name));
3189 LookupResult lookup(isolate());
3190 LookupPostInterceptor(holder, name, &lookup);
3191 GenerateLoadInterceptor(receiver, holder, &lookup, r1,
r0,
r2, r3, r4, name,
3194 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3201 Handle<String> name) {
3210 __ cmp(
r0, Operand(name));
3213 GenerateLoadArrayLength(masm(), r1,
r2, &miss);
3215 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3222 Handle<String> name) {
3230 Counters* counters = masm()->isolate()->counters();
3231 __ IncrementCounter(counters->keyed_load_string_length(), 1,
r2,
r3);
3234 __ cmp(
r0, Operand(name));
3237 GenerateLoadStringLength(masm(), r1,
r2, r3, &miss,
true);
3239 __ DecrementCounter(counters->keyed_load_string_length(), 1,
r2,
r3);
3241 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3248 Handle<String> name) {
3256 Counters* counters = masm()->isolate()->counters();
3257 __ IncrementCounter(counters->keyed_load_function_prototype(), 1,
r2,
r3);
3260 __ cmp(
r0, Operand(name));
3263 GenerateLoadFunctionPrototype(masm(), r1,
r2, r3, &miss);
3265 __ DecrementCounter(counters->keyed_load_function_prototype(), 1,
r2,
r3);
3266 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3273 Handle<Map> receiver_map) {
3279 ElementsKind elements_kind = receiver_map->elements_kind();
3280 Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode();
3284 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3285 __ Jump(ic, RelocInfo::CODE_TARGET);
3288 return GetCode(
Code::NORMAL, factory()->empty_string());
3301 __ JumpIfSmi(r1, &miss);
3303 int receiver_count = receiver_maps->length();
3305 for (
int current = 0; current < receiver_count; ++current) {
3306 __ mov(
ip, Operand(receiver_maps->at(current)));
3308 __ Jump(handler_ics->at(current), RelocInfo::CODE_TARGET,
eq);
3312 Handle<Code> miss_ic = isolate()->builtins()->KeyedLoadIC_Miss();
3313 __ Jump(miss_ic, RelocInfo::CODE_TARGET,
al);
3322 Handle<Map> transition,
3323 Handle<String> name) {
3332 Counters* counters = masm()->isolate()->counters();
3333 __ IncrementCounter(counters->keyed_store_field(), 1,
r3,
r4);
3336 __ cmp(r1, Operand(name));
3341 GenerateStoreField(masm(),
3350 __ DecrementCounter(counters->keyed_store_field(), 1,
r3,
r4);
3351 Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3352 __ Jump(ic, RelocInfo::CODE_TARGET);
3355 return GetCode(transition.is_null()
3362 Handle<Map> receiver_map) {
3370 ElementsKind elements_kind = receiver_map->elements_kind();
3371 bool is_js_array = receiver_map->instance_type() ==
JS_ARRAY_TYPE;
3373 KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode();
3377 Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
3378 __ Jump(ic, RelocInfo::CODE_TARGET);
3381 return GetCode(
Code::NORMAL, factory()->empty_string());
3397 __ JumpIfSmi(
r2, &miss);
3399 int receiver_count = receiver_maps->length();
3401 for (
int i = 0; i < receiver_count; ++i) {
3402 __ mov(
ip, Operand(receiver_maps->at(i)));
3404 if (transitioned_maps->at(i).is_null()) {
3405 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET,
eq);
3408 __ b(
ne, &next_map);
3409 __ mov(r3, Operand(transitioned_maps->at(i)));
3410 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET,
al);
3416 Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
3417 __ Jump(miss_ic, RelocInfo::CODE_TARGET,
al);
3425 Handle<JSFunction>
function) {
3432 Label generic_stub_call;
3435 __ LoadRoot(
r7, Heap::kUndefinedValueRootIndex);
3437 #ifdef ENABLE_DEBUGGER_SUPPORT
3444 __ b(
ne, &generic_stub_call);
3451 __ JumpIfSmi(
r2, &generic_stub_call);
3453 __ b(
ne, &generic_stub_call);
3462 __ Check(
ne,
"Function constructed by construct stub.");
3481 __ LoadRoot(
r6, Heap::kEmptyFixedArrayRootIndex);
3503 Handle<SharedFunctionInfo> shared(function->shared());
3504 for (
int i = 0; i < shared->this_property_assignments_count(); i++) {
3505 if (shared->IsThisPropertyAssignmentArgument(i)) {
3506 Label not_passed, next;
3508 int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3509 __ cmp(
r0, Operand(arg_number));
3510 __ b(
le, ¬_passed);
3515 __ bind(¬_passed);
3521 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3522 __ mov(
r2, Operand(constant));
3528 ASSERT(function->has_initial_map());
3529 for (
int i = shared->this_property_assignments_count();
3530 i <
function->initial_map()->inobject_properties();
3546 __ add(
sp,
sp, Operand(kPointerSize));
3547 Counters* counters = masm()->isolate()->counters();
3548 __ IncrementCounter(counters->constructed_objects(), 1,
r1,
r2);
3549 __ IncrementCounter(counters->constructed_objects_stub(), 1,
r1,
r2);
3554 __ bind(&generic_stub_call);
3555 Handle<Code> code = masm()->isolate()->builtins()->JSConstructStubGeneric();
3556 __ Jump(code, RelocInfo::CODE_TARGET);
3564 #define __ ACCESS_MASM(masm)
3568 MacroAssembler* masm) {
3574 Label slow, miss_force_generic;
3577 Register receiver =
r1;
3579 __ JumpIfNotSmi(key, &miss_force_generic);
3582 __ LoadFromNumberDictionary(&slow, r4, key,
r0,
r2, r3,
r5);
3586 __ IncrementCounter(
3587 masm->isolate()->counters()->keyed_load_external_array_slow(),
3595 Handle<Code> slow_ic =
3596 masm->isolate()->builtins()->KeyedLoadIC_Slow();
3597 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
3600 __ bind(&miss_force_generic);
3608 Handle<Code> miss_ic =
3609 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3610 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3614 static bool IsElementTypeSigned(
ElementsKind elements_kind) {
3615 switch (elements_kind) {
3644 static void GenerateSmiKeyCheck(MacroAssembler* masm,
3648 DwVfpRegister double_scratch0,
3649 DwVfpRegister double_scratch1,
3652 CpuFeatures::Scope scope(
VFP2);
3657 __ JumpIfSmi(key, &key_ok);
3660 Heap::kHeapNumberMapRootIndex,
3672 __ TrySmiTag(scratch0, fail, scratch1);
3673 __ mov(key, scratch0);
3677 __ JumpIfNotSmi(key, fail);
3683 MacroAssembler* masm,
3690 Label miss_force_generic, slow, failed_allocation;
3693 Register receiver =
r1;
3699 GenerateSmiKeyCheck(masm, key, r4,
r5,
d1,
d2, &miss_force_generic);
3708 __ b(
hs, &miss_force_generic);
3717 Register value =
r2;
3718 switch (elements_kind) {
3738 CpuFeatures::Scope scope(
VFP2);
3739 __ add(
r2, r3, Operand(key,
LSL, 1));
3747 CpuFeatures::Scope scope(
VFP2);
3748 __ add(
r2, r3, Operand(key,
LSL, 2));
3751 __ add(r4, r3, Operand(key,
LSL, 2));
3783 __ cmp(value, Operand(0xC0000000));
3791 CpuFeatures::Scope scope(
VFP2);
3795 __ LoadRoot(
r6, Heap::kHeapNumberMapRootIndex);
3808 __ LoadRoot(
r6, Heap::kHeapNumberMapRootIndex);
3833 CpuFeatures::Scope scope(
VFP2);
3834 Label box_int, done;
3835 __ tst(value, Operand(0xC0000000));
3846 __ LoadRoot(
r6, Heap::kHeapNumberMapRootIndex);
3856 Label box_int_0, box_int_1, done;
3857 __ tst(value, Operand(0x80000000));
3858 __ b(
ne, &box_int_0);
3859 __ tst(value, Operand(0x40000000));
3860 __ b(
ne, &box_int_1);
3865 Register hiword = value;
3866 Register loword =
r3;
3868 __ bind(&box_int_0);
3870 GenerateUInt2Double(masm, hiword, loword, r4, 0);
3873 __ bind(&box_int_1);
3875 GenerateUInt2Double(masm, hiword, loword, r4, 1);
3883 __ LoadRoot(
r6, Heap::kHeapNumberMapRootIndex);
3896 CpuFeatures::Scope scope(
VFP2);
3900 __ LoadRoot(
r6, Heap::kHeapNumberMapRootIndex);
3911 __ LoadRoot(
r6, Heap::kHeapNumberMapRootIndex);
3927 Label exponent_rebiased;
3928 __ teq(r1, Operand(0x00));
3929 __ b(
eq, &exponent_rebiased);
3931 __ teq(r1, Operand(0xff));
3933 __ b(
eq, &exponent_rebiased);
3940 __ bind(&exponent_rebiased);
3943 __ orr(
r2,
r2, Operand(r1,
LSL, HeapNumber::kMantissaBitsInTopWord));
3946 static const int kMantissaShiftForHiWord =
3949 static const int kMantissaShiftForLoWord =
3952 __ orr(
r2,
r2, Operand(
r0,
LSR, kMantissaShiftForHiWord));
3953 __ mov(
r0, Operand(
r0,
LSL, kMantissaShiftForLoWord));
3963 CpuFeatures::Scope scope(
VFP2);
3967 __ LoadRoot(
r6, Heap::kHeapNumberMapRootIndex);
3977 __ LoadRoot(
r7, Heap::kHeapNumberMapRootIndex);
3994 __ IncrementCounter(
3995 masm->isolate()->counters()->keyed_load_external_array_slow(),
4006 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
4008 __ bind(&miss_force_generic);
4010 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4011 __ Jump(stub, RelocInfo::CODE_TARGET);
4016 MacroAssembler* masm,
4024 Label slow, check_heap_number, miss_force_generic;
4027 Register value =
r0;
4029 Register receiver =
r2;
4036 GenerateSmiKeyCheck(masm, key, r4,
r5,
d1,
d2, &miss_force_generic);
4044 __ b(
hs, &miss_force_generic);
4051 __ JumpIfNotSmi(value, &slow);
4053 __ JumpIfNotSmi(value, &check_heap_number);
4055 __ SmiUntag(
r5, value);
4060 switch (elements_kind) {
4063 __ Usat(
r5, 8, Operand(
r5));
4080 __ SmiUntag(r4, key);
4081 StoreIntAsFloat(masm, r3, r4,
r5,
r6,
r7,
r9);
4084 __ add(r3, r3, Operand(key,
LSL, 2));
4093 masm,
r5, destination,
4097 CpuFeatures::Scope scope(
VFP2);
4121 __ bind(&check_heap_number);
4133 CpuFeatures::Scope scope(
VFP2);
4140 __ add(
r5, r3, Operand(key,
LSL, 1));
4146 __ add(
r5, r3, Operand(key,
LSL, 2));
4155 switch (elements_kind) {
4193 Label done, nan_or_infinity_or_zero;
4194 static const int kMantissaInHiWordShift =
4197 static const int kMantissaInLoWordShift =
4204 __ b(
eq, &nan_or_infinity_or_zero);
4208 __ b(
eq, &nan_or_infinity_or_zero);
4227 __ orr(
r7,
r7, Operand(
r5,
LSL, kMantissaInHiWordShift));
4228 __ orr(
r7,
r7, Operand(
r6,
LSR, kMantissaInLoWordShift));
4229 __ orr(
r5,
r7, Operand(
r9,
LSL, kBinary32ExponentShift));
4237 __ bind(&nan_or_infinity_or_zero);
4241 __ orr(
r9,
r9, Operand(
r5,
LSL, kMantissaInHiWordShift));
4242 __ orr(
r5,
r9, Operand(
r6,
LSR, kMantissaInLoWordShift));
4245 __ add(
r7, r3, Operand(key,
LSL, 2));
4251 bool is_signed_type = IsElementTypeSigned(elements_kind);
4253 int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
4276 __ cmp(
r9, Operand(meaningfull_bits - 1));
4282 __ orr(
r5,
r5, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
4284 __ rsb(
r9,
r9, Operand(HeapNumber::kMantissaBitsInTopWord),
SetCC);
4290 __ rsb(
r9,
r9, Operand(meaningfull_bits));
4298 switch (elements_kind) {
4331 __ IncrementCounter(
4332 masm->isolate()->counters()->keyed_load_external_array_slow(),
4340 Handle<Code> slow_ic =
4341 masm->isolate()->builtins()->KeyedStoreIC_Slow();
4342 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4345 __ bind(&miss_force_generic);
4353 Handle<Code> miss_ic =
4354 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4355 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4365 Label miss_force_generic;
4371 GenerateSmiKeyCheck(masm,
r0, r4,
r5,
d1,
d2, &miss_force_generic);
4375 __ AssertFastElements(
r2);
4379 __ cmp(
r0, Operand(r3));
4380 __ b(
hs, &miss_force_generic);
4387 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
4389 __ b(
eq, &miss_force_generic);
4393 __ bind(&miss_force_generic);
4395 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4396 __ Jump(stub, RelocInfo::CODE_TARGET);
4401 MacroAssembler* masm) {
4407 Label miss_force_generic, slow_allocate_heapnumber;
4409 Register key_reg =
r0;
4410 Register receiver_reg =
r1;
4411 Register elements_reg =
r2;
4412 Register heap_number_reg =
r2;
4413 Register indexed_double_offset =
r3;
4414 Register scratch =
r4;
4415 Register scratch2 =
r5;
4416 Register scratch3 =
r6;
4417 Register heap_number_map =
r7;
4423 GenerateSmiKeyCheck(masm, key_reg, r4,
r5,
d1,
d2, &miss_force_generic);
4426 __ ldr(elements_reg,
4431 __ cmp(key_reg, Operand(scratch));
4432 __ b(
hs, &miss_force_generic);
4435 __ add(indexed_double_offset, elements_reg,
4437 uint32_t upper_32_offset = FixedArray::kHeaderSize +
sizeof(
kHoleNanLower32);
4440 __ b(&miss_force_generic,
eq);
4443 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4444 __ AllocateHeapNumber(heap_number_reg, scratch2, scratch3,
4445 heap_number_map, &slow_allocate_heapnumber,
TAG_RESULT);
4452 FixedArray::kHeaderSize));
4456 __ mov(
r0, heap_number_reg);
4459 __ bind(&slow_allocate_heapnumber);
4460 Handle<Code> slow_ic =
4461 masm->isolate()->builtins()->KeyedLoadIC_Slow();
4462 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4464 __ bind(&miss_force_generic);
4465 Handle<Code> miss_ic =
4466 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4467 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4472 MacroAssembler* masm,
4484 Label miss_force_generic, transition_elements_kind, grow, slow;
4485 Label finish_store, check_capacity;
4487 Register value_reg =
r0;
4488 Register key_reg =
r1;
4489 Register receiver_reg =
r2;
4490 Register scratch =
r4;
4491 Register elements_reg =
r3;
4492 Register length_reg =
r5;
4493 Register scratch2 =
r6;
4499 GenerateSmiKeyCheck(masm, key_reg, r4,
r5,
d1,
d2, &miss_force_generic);
4502 __ JumpIfNotSmi(value_reg, &transition_elements_kind);
4506 __ ldr(elements_reg,
4514 __ cmp(key_reg, scratch);
4518 __ b(
hs, &miss_force_generic);
4522 __ CheckMap(elements_reg,
4524 Heap::kFixedArrayMapRootIndex,
4525 &miss_force_generic,
4528 __ bind(&finish_store);
4548 __ mov(receiver_reg, value_reg);
4549 __ RecordWrite(elements_reg,
4559 __ bind(&miss_force_generic);
4561 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4562 __ Jump(ic, RelocInfo::CODE_TARGET);
4564 __ bind(&transition_elements_kind);
4565 Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4566 __ Jump(ic_miss, RelocInfo::CODE_TARGET);
4574 __ b(
ne, &miss_force_generic);
4580 __ ldr(elements_reg,
4582 __ CompareRoot(elements_reg, Heap::kEmptyFixedArrayRootIndex);
4583 __ b(
ne, &check_capacity);
4586 __ AllocateInNewSpace(size, elements_reg, scratch, scratch2, &slow,
4589 __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
4593 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
4602 __ str(elements_reg,
4613 __ bind(&check_capacity);
4615 __ CheckMap(elements_reg,
4617 Heap::kFixedCOWArrayMapRootIndex,
4618 &miss_force_generic,
4622 __ cmp(length_reg, scratch);
4628 __ jmp(&finish_store);
4631 Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
4632 __ Jump(ic_slow, RelocInfo::CODE_TARGET);
4638 MacroAssembler* masm,
4650 Label miss_force_generic, transition_elements_kind, grow, slow;
4651 Label finish_store, check_capacity;
4653 Register value_reg =
r0;
4654 Register key_reg =
r1;
4655 Register receiver_reg =
r2;
4656 Register elements_reg =
r3;
4657 Register scratch1 =
r4;
4658 Register scratch2 =
r5;
4659 Register scratch3 =
r6;
4660 Register scratch4 =
r7;
4661 Register length_reg =
r7;
4667 GenerateSmiKeyCheck(masm, key_reg, r4,
r5,
d1,
d2, &miss_force_generic);
4669 __ ldr(elements_reg,
4681 __ cmp(key_reg, scratch1);
4685 __ b(
hs, &miss_force_generic);
4688 __ bind(&finish_store);
4689 __ StoreNumberToDoubleElements(value_reg,
4698 &transition_elements_kind);
4702 __ bind(&miss_force_generic);
4704 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4705 __ Jump(ic, RelocInfo::CODE_TARGET);
4707 __ bind(&transition_elements_kind);
4708 Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4709 __ Jump(ic_miss, RelocInfo::CODE_TARGET);
4717 __ b(
ne, &miss_force_generic);
4721 __ JumpIfSmi(value_reg, &value_is_smi);
4723 __ CompareRoot(scratch1, Heap::kHeapNumberMapRootIndex);
4724 __ b(
ne, &transition_elements_kind);
4725 __ bind(&value_is_smi);
4731 __ ldr(elements_reg,
4733 __ CompareRoot(elements_reg, Heap::kEmptyFixedArrayRootIndex);
4734 __ b(
ne, &check_capacity);
4737 __ AllocateInNewSpace(size, elements_reg, scratch1, scratch2, &slow,
4742 __ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex);
4745 Operand(
Smi::FromInt(JSArray::kPreallocatedArrayElements)));
4750 __ str(elements_reg,
4759 __ ldr(elements_reg,
4761 __ jmp(&finish_store);
4763 __ bind(&check_capacity);
4767 __ cmp(length_reg, scratch1);
4773 __ jmp(&finish_store);
4776 Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
4777 __ Jump(ic_slow, RelocInfo::CODE_TARGET);
4786 #endif // V8_TARGET_ARCH_ARM
static const int kBitFieldOffset
Handle< Code > CompileLoadFunctionPrototype(Handle< String > name)
Handle< Code > CompileLoadCallback(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< AccessorInfo > callback)
const intptr_t kSmiTagMask
static const int kNotFound
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< JSFunction > setter)
static const int kCodeEntryOffset
Handle< Code > CompileStoreField(Handle< JSObject > object, int index, Handle< Map > transition, Handle< String > name)
static const int kPrototypeOrInitialMapOffset
static int SlotOffset(int index)
static const int kElementsStartIndex
static const int kDataOffset
const int kBinary32ExponentShift
Handle< Code > CompileLoadNonexistent(Handle< String > name, Handle< JSObject > object, Handle< JSObject > last)
const int kDoubleSizeLog2
Handle< Code > CompileStoreElement(Handle< Map > receiver_map)
void GenerateProbe(MacroAssembler *masm, Code::Flags flags, Register receiver, Register name, Register scratch, Register extra, Register extra2=no_reg, Register extra3=no_reg)
static const uint32_t kExponentMask
static const int kFlagsOffset
const uint32_t kBinary32MantissaMask
const int kBinary32MaxExponent
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
#define LOG(isolate, Call)
static void GenerateStoreExternalArray(MacroAssembler *masm, ElementsKind elements_kind)
static const int kGlobalReceiverOffset
static void GenerateLoadFastDoubleElement(MacroAssembler *masm)
const int kBinary32MantissaBits
static StubType ExtractTypeFromFlags(Flags flags)
static const int kExponentBias
static bool IsSupported(CpuFeature f)
static const int kExternalPointerOffset
static const int kHasNamedInterceptor
static const int kIsAccessCheckNeeded
List< Handle< Map > > MapHandleList
#define ASSERT(condition)
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static const int kDebugInfoOffset
static const int kContextOffset
Handle< Code > CompileLoadField(Handle< JSObject > object, Handle< JSObject > holder, int index, Handle< String > name)
Handle< Code > CompileStoreInterceptor(Handle< JSObject > object, Handle< String > name)
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< String > name, Register scratch0)
Handle< Code > CompileStoreField(Handle< JSObject > object, int index, Handle< Map > transition, Handle< String > name)
static const int kHashFieldOffset
const uint32_t kVFPFlushToZeroMask
Handle< Code > CompileLoadInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
Handle< Code > CompileCallGlobal(Handle< JSObject > object, Handle< GlobalObject > holder, Handle< JSGlobalPropertyCell > cell, Handle< JSFunction > function, Handle< String > name)
static const int kExponentShift
Handle< Code > CompileLoadField(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, int index)
static const int kValueOffset
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< JSFunction > getter)
const uint32_t kHoleNanUpper32
Handle< Code > CompileStoreGlobal(Handle< GlobalObject > object, Handle< JSGlobalPropertyCell > holder, Handle< String > name)
Handle< Code > CompileLoadViaGetter(Handle< String > name, Handle< JSObject > receiver, Handle< JSObject > holder, Handle< JSFunction > getter)
Handle< Code > CompileLoadConstant(Handle< JSObject > object, Handle< JSObject > holder, Handle< JSFunction > value, Handle< String > name)
Handle< Code > CompileLoadConstant(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< JSFunction > value)
Handle< Code > CompileCallField(Handle< JSObject > object, Handle< JSObject > holder, int index, Handle< String > name)
static void GenerateStoreFastElement(MacroAssembler *masm, bool is_js_array, ElementsKind element_kind, KeyedAccessGrowMode grow_mode)
Handle< Code > CompileLoadStringLength(Handle< String > name)
const uint32_t kHoleNanLower32
static bool decode(uint32_t value)
static const int kPropertiesOffset
const int kBinary32MinExponent
Handle< Code > CompileLoadGlobal(Handle< JSObject > object, Handle< GlobalObject > holder, Handle< JSGlobalPropertyCell > cell, Handle< String > name, bool is_dont_delete)
Handle< Code > CompileStoreCallback(Handle< String > name, Handle< JSObject > receiver, Handle< JSObject > holder, Handle< AccessorInfo > callback)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
bool IsFastSmiElementsKind(ElementsKind kind)
const uint32_t kVFPExceptionMask
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
const int kBinary32ExponentBias
static const int kDataOffset
static int SizeFor(int length)
static const int kElementsOffset
const uint32_t kStringTag
static void GenerateLoadDictionaryElement(MacroAssembler *masm)
static void GenerateLoadExternalArray(MacroAssembler *masm, ElementsKind elements_kind)
static const int kLengthOffset
static int SizeFor(int length)
static const int kHeaderSize
static const int kMapOffset
static const int kMantissaBitsInTopWord
const uint32_t kIsNotStringMask
List< Handle< Code > > CodeHandleList
static const int kLengthOffset
static void ConvertIntToDouble(MacroAssembler *masm, Register int_scratch, Destination destination, DwVfpRegister double_dst, Register dst1, Register dst2, Register scratch2, SwVfpRegister single_scratch)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
Handle< Code > CompileCallInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
MemOperand FieldMemOperand(Register object, int offset)
static const int kDataOffset
static const int kContextOffset
static void GenerateLoadFastElement(MacroAssembler *masm)
static const uint32_t kSignMask
static void GenerateStoreFastDoubleElement(MacroAssembler *masm, bool is_js_array, KeyedAccessGrowMode grow_mode)
static const int kHeaderSize
static Handle< T > null()
#define ASSERT_EQ(v1, v2)
Handle< Code > CompileLoadArrayLength(Handle< String > name)
static const uint32_t kMantissaMask
Handle< Code > CompileCallConstant(Handle< Object > object, Handle< JSObject > holder, Handle< JSFunction > function, Handle< String > name, CheckType check)
static AccessorInfo * cast(Object *obj)
const uint32_t kBinary32ExponentMask
const uint32_t kBinary32SignMask
const int kHeapObjectTagSize
static const int kSizeInBytes
static Handle< JSGlobalPropertyCell > EnsurePropertyCell(Handle< GlobalObject > global, Handle< String > name)
static bool HasCustomCallGenerator(Handle< JSFunction > function)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static const int kPreallocatedArrayElements
static const int kPrototypeOffset
static const int kFlagsNotUsedInLookup
const int kInvalidProtoDepth
const uint32_t kVFPRoundingModeMask
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
static const int kNativeContextOffset
Handle< Code > CompileLoadCallback(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< AccessorInfo > callback)
Handle< Code > CompileLoadPolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_ics)
Handle< Code > CompileLoadInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
Handle< Code > CompileStorePolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_stubs, MapHandleList *transitioned_maps)
static const int kSharedFunctionInfoOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
static const int kMantissaBits
void check(i::Vector< const char > string)
static const int kExponentOffset
static const int kValueOffset
Handle< Code > CompileLoadElement(Handle< Map > receiver_map)
Handle< Code > CompileConstructStub(Handle< JSFunction > function)
Handle< Code > CompileStoreViaSetter(Handle< String > name, Handle< JSObject > receiver, Handle< JSObject > holder, Handle< JSFunction > setter)
static JSObject * cast(Object *obj)
static const int kInstanceTypeOffset
static const int kMantissaOffset
static JSFunction * cast(Object *obj)