30 #if defined(V8_TARGET_ARCH_MIPS)
39 #define __ ACCESS_MASM(masm)
42 static void ProbeTable(Isolate* isolate,
52 Register offset_scratch) {
53 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
54 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
55 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
57 uint32_t key_off_addr =
reinterpret_cast<uint32_t
>(key_offset.address());
58 uint32_t value_off_addr =
reinterpret_cast<uint32_t
>(value_offset.address());
59 uint32_t map_off_addr =
reinterpret_cast<uint32_t
>(map_offset.address());
62 ASSERT(value_off_addr > key_off_addr);
63 ASSERT((value_off_addr - key_off_addr) % 4 == 0);
64 ASSERT((value_off_addr - key_off_addr) < (256 * 4));
65 ASSERT(map_off_addr > key_off_addr);
66 ASSERT((map_off_addr - key_off_addr) % 4 == 0);
67 ASSERT((map_off_addr - key_off_addr) < (256 * 4));
70 Register base_addr = scratch;
74 __ sll(offset_scratch, offset, 1);
75 __ Addu(offset_scratch, offset_scratch, offset);
78 __ li(base_addr, Operand(key_offset));
80 __ Addu(base_addr, base_addr, at);
84 __ Branch(&miss,
ne, name, Operand(at));
87 __ lw(at,
MemOperand(base_addr, map_off_addr - key_off_addr));
89 __ Branch(&miss,
ne, at, Operand(scratch2));
92 Register
code = scratch2;
94 __ lw(code,
MemOperand(base_addr, value_off_addr - key_off_addr));
97 Register flags_reg = base_addr;
101 __ Branch(&miss,
ne, flags_reg, Operand(flags));
125 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
132 Counters* counters = masm->isolate()->counters();
133 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
134 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
138 const int kInterceptorOrAccessCheckNeededMask =
142 Register map = scratch1;
145 __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
146 __ Branch(miss_label,
ne, scratch0, Operand(zero_reg));
153 Register properties = scratch0;
157 Register tmp = properties;
158 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
159 __ Branch(miss_label,
ne, map, Operand(tmp));
173 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
185 Isolate* isolate = masm->isolate();
190 ASSERT(
sizeof(Entry) == 12);
196 ASSERT(!scratch.is(receiver));
197 ASSERT(!scratch.is(name));
198 ASSERT(!extra.is(receiver));
200 ASSERT(!extra.is(scratch));
201 ASSERT(!extra2.is(receiver));
203 ASSERT(!extra2.is(scratch));
204 ASSERT(!extra2.is(extra));
212 Counters* counters = masm->isolate()->counters();
213 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
217 __ JumpIfSmi(receiver, &miss);
222 __ Addu(scratch, scratch, at);
223 uint32_t mask = kPrimaryTableSize - 1;
228 __ And(scratch, scratch, Operand(mask));
244 __ Subu(scratch, scratch, at);
245 uint32_t mask2 = kSecondaryTableSize - 1;
247 __ And(scratch, scratch, Operand(mask2));
264 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
269 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
271 Register prototype) {
288 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
289 MacroAssembler* masm,
293 Isolate* isolate = masm->isolate();
297 ASSERT(!prototype.is(at));
298 __ li(at, isolate->global_object());
299 __ Branch(miss,
ne, prototype, Operand(at));
301 Handle<JSFunction>
function(
304 __ li(prototype, Handle<Map>(function->initial_map()));
313 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
316 Handle<JSObject> holder,
319 index -= holder->map()->inobject_properties();
322 int offset = holder->map()->instance_size() + (index *
kPointerSize);
333 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
338 __ JumpIfSmi(receiver, miss_label);
341 __ GetObjectType(receiver, scratch, scratch);
353 static void GenerateStringCheck(MacroAssembler* masm,
358 Label* non_string_object) {
360 __ JumpIfSmi(receiver, smi, t0);
367 __ Branch(non_string_object,
378 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
383 bool support_wrappers) {
388 GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
389 support_wrappers ? &check_wrapper : miss);
395 if (support_wrappers) {
397 __ bind(&check_wrapper);
402 GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
409 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
414 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
415 __ mov(v0, scratch1);
423 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
424 Handle<JSObject>
object,
426 Handle<Map> transition,
428 Register receiver_reg,
436 LookupResult lookup(masm->isolate());
437 object->Lookup(*name, &lookup);
438 if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
449 __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label,
453 if (object->IsJSGlobalProxy()) {
454 __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
458 if (!transition.is_null() &&
object->GetPrototype()->IsJSObject()) {
460 if (lookup.IsFound()) {
461 holder = lookup.holder();
467 }
while (holder->GetPrototype()->IsJSObject());
471 Label miss_pop, done_check;
472 CheckPrototypes(
object, receiver_reg, Handle<JSObject>(holder), name_reg,
473 scratch1, scratch2, name, &miss_pop);
478 __ bind(&done_check);
484 ASSERT(object->IsJSGlobalProxy() || !
object->IsAccessCheckNeeded());
487 if (!transition.is_null() && (
object->map()->unused_property_fields() == 0)) {
490 __ push(receiver_reg);
491 __ li(a2, Operand(transition));
493 __ TailCallExternalReference(
494 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
500 if (!transition.is_null()) {
502 __ li(scratch1, Operand(transition));
507 __ RecordWriteField(receiver_reg,
520 index -=
object->map()->inobject_properties();
524 int offset =
object->map()->instance_size() + (index *
kPointerSize);
528 __ JumpIfSmi(a0, &exit, scratch1);
532 __ mov(name_reg, a0);
533 __ RecordWriteField(receiver_reg,
548 __ JumpIfSmi(a0, &exit);
552 __ mov(name_reg, a0);
553 __ RecordWriteField(scratch1,
568 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm,
Code::Kind kind) {
569 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
570 Handle<Code> code = (kind == Code::LOAD_IC)
571 ? masm->isolate()->builtins()->LoadIC_Miss()
572 : masm->isolate()->builtins()->KeyedLoadIC_Miss();
573 __ Jump(code, RelocInfo::CODE_TARGET);
577 static void GenerateCallFunction(MacroAssembler* masm,
578 Handle<Object>
object,
579 const ParameterCount& arguments,
587 __ JumpIfSmi(a1, miss);
588 __ GetObjectType(a1, a3, a3);
593 if (object->IsGlobalObject()) {
602 __ InvokeFunction(a1, arguments,
JUMP_FUNCTION, NullCallWrapper(), call_kind);
606 static void PushInterceptorArguments(MacroAssembler* masm,
610 Handle<JSObject> holder_obj) {
612 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
613 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
614 Register scratch = name;
615 __ li(scratch, Operand(interceptor));
616 __ Push(scratch, receiver, holder);
619 __ li(scratch, Operand(ExternalReference::isolate_address()));
624 static void CompileCallLoadPropertyWithInterceptor(
625 MacroAssembler* masm,
629 Handle<JSObject> holder_obj) {
630 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
632 ExternalReference ref =
633 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
635 __ PrepareCEntryArgs(6);
636 __ PrepareCEntryFunction(ref);
643 static const int kFastApiCallArguments = 4;
650 static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
653 for (
int i = 0; i < kFastApiCallArguments; i++) {
660 static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
661 __ Drop(kFastApiCallArguments);
665 static void GenerateFastApiDirectCall(MacroAssembler* masm,
666 const CallOptimization& optimization,
679 Handle<JSFunction>
function = optimization.constant_function();
680 __ LoadHeapObject(t1,
function);
684 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
685 Handle<Object> call_data(api_call_info->data());
686 if (masm->isolate()->heap()->InNewSpace(*call_data)) {
687 __ li(a0, api_call_info);
690 __ li(t2, call_data);
693 __ li(t3, Operand(ExternalReference::isolate_address()));
704 const int kApiStackSpace = 4;
706 FrameScope frame_scope(masm, StackFrame::MANUAL);
707 __ EnterExitFrame(
false, kApiStackSpace);
724 __ li(t0, Operand(argc));
729 const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
730 Address function_address = v8::ToCData<Address>(api_call_info->callback());
731 ApiFunction fun(function_address);
732 ExternalReference ref =
733 ExternalReference(&fun,
734 ExternalReference::DIRECT_API_CALL,
736 AllowExternalCallThatCantCauseGC scope(masm);
737 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
742 CallInterceptorCompiler(StubCompiler* stub_compiler,
743 const ParameterCount& arguments,
746 : stub_compiler_(stub_compiler),
747 arguments_(arguments),
749 extra_ic_state_(extra_ic_state) {}
751 void Compile(MacroAssembler* masm,
752 Handle<JSObject>
object,
753 Handle<JSObject> holder,
755 LookupResult* lookup,
761 ASSERT(holder->HasNamedInterceptor());
762 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
765 __ JumpIfSmi(receiver, miss);
766 CallOptimization optimization(lookup);
767 if (optimization.is_constant_call()) {
768 CompileCacheable(masm,
object, receiver, scratch1, scratch2, scratch3,
769 holder, lookup, name, optimization, miss);
771 CompileRegular(masm,
object, receiver, scratch1, scratch2, scratch3,
777 void CompileCacheable(MacroAssembler* masm,
778 Handle<JSObject>
object,
783 Handle<JSObject> interceptor_holder,
784 LookupResult* lookup,
786 const CallOptimization& optimization,
788 ASSERT(optimization.is_constant_call());
789 ASSERT(!lookup->holder()->IsGlobalObject());
790 Counters* counters = masm->isolate()->counters();
793 bool can_do_fast_api_call =
false;
794 if (optimization.is_simple_api_call() &&
795 !lookup->holder()->IsGlobalObject()) {
796 depth1 = optimization.GetPrototypeDepthOfExpectedType(
797 object, interceptor_holder);
799 depth2 = optimization.GetPrototypeDepthOfExpectedType(
800 interceptor_holder, Handle<JSObject>(lookup->holder()));
802 can_do_fast_api_call =
806 __ IncrementCounter(counters->call_const_interceptor(), 1,
809 if (can_do_fast_api_call) {
810 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
812 ReserveSpaceForFastApiCall(masm, scratch1);
818 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
820 stub_compiler_->CheckPrototypes(
object, receiver, interceptor_holder,
821 scratch1, scratch2, scratch3,
826 Label regular_invoke;
827 LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
835 if (*interceptor_holder != lookup->holder()) {
836 stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
837 Handle<JSObject>(lookup->holder()),
838 scratch1, scratch2, scratch3,
849 if (can_do_fast_api_call) {
850 GenerateFastApiDirectCall(masm, optimization, arguments_.immediate());
855 __ InvokeFunction(optimization.constant_function(), arguments_,
860 if (can_do_fast_api_call) {
861 __ bind(&miss_cleanup);
862 FreeSpaceForFastApiCall(masm);
863 __ Branch(miss_label);
867 __ bind(®ular_invoke);
868 if (can_do_fast_api_call) {
869 FreeSpaceForFastApiCall(masm);
873 void CompileRegular(MacroAssembler* masm,
874 Handle<JSObject>
object,
880 Handle<JSObject> interceptor_holder,
883 stub_compiler_->CheckPrototypes(
object, receiver, interceptor_holder,
884 scratch1, scratch2, scratch3,
892 PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder);
894 __ CallExternalReference(
896 IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
904 void LoadWithInterceptor(MacroAssembler* masm,
907 Handle<JSObject> holder_obj,
909 Label* interceptor_succeeded) {
913 __ Push(holder, name_);
914 CompileCallLoadPropertyWithInterceptor(masm,
923 __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
924 __ Branch(interceptor_succeeded,
ne, v0, Operand(scratch));
927 StubCompiler* stub_compiler_;
928 const ParameterCount& arguments_;
938 static void GenerateCheckPropertyCell(MacroAssembler* masm,
939 Handle<GlobalObject> global,
943 Handle<JSGlobalPropertyCell> cell =
945 ASSERT(cell->value()->IsTheHole());
946 __ li(scratch, Operand(cell));
949 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
950 __ Branch(miss,
ne, scratch, Operand(at));
956 static void GenerateCheckPropertyCells(MacroAssembler* masm,
957 Handle<JSObject>
object,
958 Handle<JSObject> holder,
962 Handle<JSObject> current = object;
963 while (!current.is_identical_to(holder)) {
964 if (current->IsGlobalObject()) {
965 GenerateCheckPropertyCell(masm,
966 Handle<GlobalObject>::cast(current),
971 current = Handle<JSObject>(
JSObject::cast(current->GetPrototype()));
979 static void StoreIntAsFloat(MacroAssembler* masm,
987 CpuFeatures::Scope scope(
FPU);
990 __ sll(scratch1, wordoffset, 2);
991 __ addu(scratch1, dst, scratch1);
996 Label not_special, done;
1004 __ subu(scratch1, zero_reg, ival);
1005 __ Movn(ival, scratch1, fval);
1010 __ Branch(¬_special,
gt, ival, Operand(1));
1013 static const uint32_t exponent_word_for_1 =
1016 __ Xor(scratch1, ival, Operand(1));
1017 __ li(scratch2, exponent_word_for_1);
1018 __ or_(scratch2, fval, scratch2);
1019 __ Movz(fval, scratch2, scratch1);
1022 __ bind(¬_special);
1025 Register zeros = scratch2;
1026 __ Clz(zeros, ival);
1030 __ subu(scratch1, scratch1, zeros);
1032 __ sll(scratch1, scratch1, kBinary32ExponentShift);
1033 __ or_(fval, fval, scratch1);
1036 __ Addu(zeros, zeros, Operand(1));
1038 __ sllv(ival, ival, zeros);
1041 __ or_(fval, fval, scratch1);
1045 __ sll(scratch1, wordoffset, 2);
1046 __ addu(scratch1, dst, scratch1);
1057 static void GenerateUInt2Double(MacroAssembler* masm,
1061 int leading_zeroes) {
1062 const int meaningful_bits =
kBitsPerInt - leading_zeroes - 1;
1065 const int mantissa_shift_for_hi_word =
1068 const int mantissa_shift_for_lo_word =
1072 if (mantissa_shift_for_hi_word > 0) {
1073 __ sll(loword, hiword, mantissa_shift_for_lo_word);
1074 __ srl(hiword, hiword, mantissa_shift_for_hi_word);
1075 __ or_(hiword, scratch, hiword);
1077 __ mov(loword, zero_reg);
1078 __ sll(hiword, hiword, mantissa_shift_for_hi_word);
1079 __ or_(hiword, scratch, hiword);
1084 if (!(biased_exponent & 1)) {
1086 __ nor(scratch, scratch, scratch);
1087 __ and_(hiword, hiword, scratch);
1093 #define __ ACCESS_MASM(masm())
1096 Register StubCompiler::CheckPrototypes(Handle<JSObject>
object,
1097 Register object_reg,
1098 Handle<JSObject> holder,
1099 Register holder_reg,
1102 Handle<String> name,
1106 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1107 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1108 && !scratch2.is(scratch1));
1111 Register reg = object_reg;
1114 if (save_at_depth == depth) {
1120 Handle<JSObject> current = object;
1121 while (!current.is_identical_to(holder)) {
1126 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1128 Handle<JSObject> prototype(
JSObject::cast(current->GetPrototype()));
1129 if (!current->HasFastProperties() &&
1130 !current->IsJSGlobalObject() &&
1131 !current->IsJSGlobalProxy()) {
1132 if (!name->IsSymbol()) {
1133 name = factory()->LookupSymbol(name);
1135 ASSERT(current->property_dictionary()->FindEntry(*name) ==
1138 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
1139 scratch1, scratch2);
1145 Handle<Map> current_map(current->map());
1151 if (current->IsJSGlobalProxy()) {
1152 __ CheckAccessGlobalProxy(reg, scratch2, miss);
1156 if (heap()->InNewSpace(*prototype)) {
1162 __ li(reg, Operand(prototype));
1166 if (save_at_depth == depth) {
1171 current = prototype;
1175 LOG(masm()->isolate(), IntEvent(
"check-maps-depth", depth + 1));
1178 __ CheckMap(reg, scratch1, Handle<Map>(current->map()), miss,
1182 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1183 if (holder->IsJSGlobalProxy()) {
1184 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1190 GenerateCheckPropertyCells(masm(),
object, holder, name, scratch1, miss);
1197 void StubCompiler::GenerateLoadField(Handle<JSObject>
object,
1198 Handle<JSObject> holder,
1204 Handle<String> name,
1207 __ JumpIfSmi(receiver, miss);
1210 Register reg = CheckPrototypes(
1211 object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1212 GenerateFastPropertyLoad(masm(), v0, reg, holder, index);
1217 void StubCompiler::GenerateLoadConstant(Handle<JSObject>
object,
1218 Handle<JSObject> holder,
1223 Handle<JSFunction> value,
1224 Handle<String> name,
1227 __ JumpIfSmi(receiver, miss, scratch1);
1230 CheckPrototypes(
object, receiver, holder,
1231 scratch1, scratch2, scratch3, name, miss);
1234 __ LoadHeapObject(v0, value);
1239 void StubCompiler::GenerateDictionaryLoadCallback(Register receiver,
1244 Handle<AccessorInfo> callback,
1245 Handle<String> name,
1247 ASSERT(!receiver.is(scratch1));
1248 ASSERT(!receiver.is(scratch2));
1249 ASSERT(!receiver.is(scratch3));
1252 Register dictionary = scratch1;
1264 __ bind(&probe_done);
1268 Register pointer = scratch3;
1271 const int kValueOffset = kElementsStartOffset +
kPointerSize;
1273 __ Branch(miss,
ne, scratch2, Operand(callback));
1277 void StubCompiler::GenerateLoadCallback(Handle<JSObject>
object,
1278 Handle<JSObject> holder,
1285 Handle<AccessorInfo> callback,
1286 Handle<String> name,
1289 __ JumpIfSmi(receiver, miss, scratch1);
1292 Register reg = CheckPrototypes(
object, receiver, holder, scratch1,
1293 scratch2, scratch3, name, miss);
1295 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
1296 GenerateDictionaryLoadCallback(
1297 reg, name_reg, scratch2, scratch3, scratch4, callback, name, miss);
1303 __ mov(scratch2,
sp);
1304 if (heap()->InNewSpace(callback->data())) {
1305 __ li(scratch3, callback);
1308 __ li(scratch3, Handle<Object>(callback->data()));
1310 __ Subu(
sp,
sp, 4 * kPointerSize);
1313 __ li(scratch3, Operand(ExternalReference::isolate_address()));
1317 __ mov(a2, scratch2);
1325 const int kApiStackSpace = 1;
1326 FrameScope frame_scope(masm(), StackFrame::MANUAL);
1327 __ EnterExitFrame(
false, kApiStackSpace);
1333 __ Addu(a2,
sp, kPointerSize);
1335 const int kStackUnwindSpace = 5;
1336 Address getter_address = v8::ToCData<Address>(callback->getter());
1337 ApiFunction fun(getter_address);
1338 ExternalReference ref =
1339 ExternalReference(&fun,
1340 ExternalReference::DIRECT_GETTER_CALL,
1342 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
1346 void StubCompiler::GenerateLoadInterceptor(Handle<JSObject>
object,
1347 Handle<JSObject> interceptor_holder,
1348 LookupResult* lookup,
1354 Handle<String> name,
1356 ASSERT(interceptor_holder->HasNamedInterceptor());
1357 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1360 __ JumpIfSmi(receiver, miss);
1365 bool compile_followup_inline =
false;
1366 if (lookup->IsFound() && lookup->IsCacheable()) {
1367 if (lookup->IsField()) {
1368 compile_followup_inline =
true;
1369 }
else if (lookup->type() ==
CALLBACKS &&
1370 lookup->GetCallbackObject()->IsAccessorInfo()) {
1372 compile_followup_inline = callback->getter() !=
NULL &&
1373 callback->IsCompatibleReceiver(*
object);
1377 if (compile_followup_inline) {
1381 Register holder_reg = CheckPrototypes(
object, receiver, interceptor_holder,
1382 scratch1, scratch2, scratch3,
1384 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1390 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1391 bool must_preserve_receiver_reg = !receiver.is(holder_reg) &&
1392 (lookup->type() ==
CALLBACKS || must_perfrom_prototype_check);
1398 if (must_preserve_receiver_reg) {
1399 __ Push(receiver, holder_reg, name_reg);
1401 __ Push(holder_reg, name_reg);
1406 CompileCallLoadPropertyWithInterceptor(masm(),
1410 interceptor_holder);
1413 Label interceptor_failed;
1414 __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1415 __ Branch(&interceptor_failed,
eq, v0, Operand(scratch1));
1416 frame_scope.GenerateLeaveFrame();
1419 __ bind(&interceptor_failed);
1422 if (must_preserve_receiver_reg) {
1429 if (must_perfrom_prototype_check) {
1430 holder_reg = CheckPrototypes(interceptor_holder,
1432 Handle<JSObject>(lookup->holder()),
1440 if (lookup->IsField()) {
1443 GenerateFastPropertyLoad(masm(), v0, holder_reg,
1444 Handle<JSObject>(lookup->holder()),
1445 lookup->GetFieldIndex());
1451 Handle<AccessorInfo> callback(
1458 __ li(scratch2, callback);
1460 __ Push(receiver, holder_reg);
1463 __ li(scratch1, Operand(ExternalReference::isolate_address()));
1464 __ Push(scratch3, scratch1, scratch2, name_reg);
1466 ExternalReference ref =
1467 ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1469 __ TailCallExternalReference(ref, 6, 1);
1474 Register holder_reg = CheckPrototypes(
object, receiver, interceptor_holder,
1475 scratch1, scratch2, scratch3,
1477 PushInterceptorArguments(masm(), receiver, holder_reg,
1478 name_reg, interceptor_holder);
1480 ExternalReference ref = ExternalReference(
1481 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate());
1482 __ TailCallExternalReference(ref, 6, 1);
1487 void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
1488 if (kind_ == Code::KEYED_CALL_IC) {
1489 __ Branch(miss,
ne, a2, Operand(name));
1494 void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject>
object,
1495 Handle<JSObject> holder,
1496 Handle<String> name,
1498 ASSERT(holder->IsGlobalObject());
1501 const int argc = arguments().immediate();
1507 __ JumpIfSmi(a0, miss);
1508 CheckPrototypes(
object, a0, holder, a3, a1, t0, name, miss);
1512 void CallStubCompiler::GenerateLoadFunctionFromCell(
1513 Handle<JSGlobalPropertyCell> cell,
1514 Handle<JSFunction>
function,
1517 __ li(a3, Operand(cell));
1521 if (heap()->InNewSpace(*
function)) {
1527 __ JumpIfSmi(a1, miss);
1528 __ GetObjectType(a1, a3, a3);
1532 __ li(a3, Handle<SharedFunctionInfo>(function->shared()));
1534 __ Branch(miss,
ne, t0, Operand(a3));
1536 __ Branch(miss,
ne, a1, Operand(
function));
1541 void CallStubCompiler::GenerateMissBranch() {
1543 isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1546 __ Jump(code, RelocInfo::CODE_TARGET);
1551 Handle<JSObject> holder,
1553 Handle<String> name) {
1560 GenerateNameCheck(name, &miss);
1562 const int argc = arguments().immediate();
1567 __ JumpIfSmi(a0, &miss, t0);
1570 Register reg = CheckPrototypes(
object, a0, holder, a1, a3, t0, name, &miss);
1571 GenerateFastPropertyLoad(masm(), a1, reg, holder, index);
1573 GenerateCallFunction(masm(),
object, arguments(), &miss, extra_state_);
1577 GenerateMissBranch();
1584 Handle<Code> CallStubCompiler::CompileArrayPushCall(
1585 Handle<Object>
object,
1586 Handle<JSObject> holder,
1587 Handle<JSGlobalPropertyCell> cell,
1588 Handle<JSFunction>
function,
1589 Handle<String> name) {
1599 if (!object->IsJSArray() || !cell.is_null())
return Handle<Code>::null();
1603 GenerateNameCheck(name, &miss);
1605 Register receiver = a1;
1608 const int argc = arguments().immediate();
1612 __ JumpIfSmi(receiver, &miss);
1615 CheckPrototypes(Handle<JSObject>::cast(
object), receiver, holder, a3, v0, t0,
1626 Label attempt_to_grow_elements;
1628 Register elements = t2;
1629 Register end_elements = t1;
1634 __ CheckMap(elements,
1636 Heap::kFixedArrayMapRootIndex,
1650 __ Branch(&attempt_to_grow_elements,
gt, v0, Operand(t0));
1653 Label with_write_barrier;
1655 __ JumpIfNotSmi(t0, &with_write_barrier);
1664 __ Addu(end_elements, elements, end_elements);
1665 const int kEndElementsOffset =
1667 __ Addu(end_elements, end_elements, kEndElementsOffset);
1674 __ bind(&with_write_barrier);
1678 if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
1679 Label fast_object, not_fast_object;
1680 __ CheckFastObjectElements(a3, t3, ¬_fast_object);
1681 __ jmp(&fast_object);
1683 __ bind(¬_fast_object);
1684 __ CheckFastSmiElements(a3, t3, &call_builtin);
1687 Label try_holey_map;
1693 __ mov(a2, receiver);
1696 __ jmp(&fast_object);
1698 __ bind(&try_holey_map);
1704 __ mov(a2, receiver);
1707 __ bind(&fast_object);
1709 __ CheckFastObjectElements(a3, a3, &call_builtin);
1719 __ Addu(end_elements, elements, end_elements);
1720 __ Addu(end_elements, end_elements, kEndElementsOffset);
1723 __ RecordWrite(elements,
1733 __ bind(&attempt_to_grow_elements);
1737 if (!FLAG_inline_new) {
1738 __ Branch(&call_builtin);
1744 Label no_fast_elements_check;
1745 __ JumpIfSmi(a2, &no_fast_elements_check);
1747 __ CheckFastObjectElements(t3, t3, &call_builtin);
1748 __ bind(&no_fast_elements_check);
1750 ExternalReference new_space_allocation_top =
1751 ExternalReference::new_space_allocation_top_address(
1753 ExternalReference new_space_allocation_limit =
1754 ExternalReference::new_space_allocation_limit_address(
1757 const int kAllocationDelta = 4;
1760 __ Addu(end_elements, elements, end_elements);
1761 __ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
1762 __ li(t3, Operand(new_space_allocation_top));
1764 __ Branch(&call_builtin,
ne, end_elements, Operand(a3));
1766 __ li(t5, Operand(new_space_allocation_limit));
1768 __ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize));
1769 __ Branch(&call_builtin,
hi, a3, Operand(t5));
1777 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
1778 for (
int i = 1; i < kAllocationDelta; i++) {
1791 __ bind(&call_builtin);
1792 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1800 GenerateMissBranch();
1803 return GetCode(
function);
1807 Handle<Code> CallStubCompiler::CompileArrayPopCall(
1808 Handle<Object>
object,
1809 Handle<JSObject> holder,
1810 Handle<JSGlobalPropertyCell> cell,
1811 Handle<JSFunction>
function,
1812 Handle<String> name) {
1822 if (!object->IsJSArray() || !cell.is_null())
return Handle<Code>::null();
1824 Label miss, return_undefined, call_builtin;
1825 Register receiver = a1;
1826 Register elements = a3;
1827 GenerateNameCheck(name, &miss);
1830 const int argc = arguments().immediate();
1833 __ JumpIfSmi(receiver, &miss);
1836 CheckPrototypes(Handle<JSObject>::cast(
object), receiver, holder, elements,
1837 t0, v0, name, &miss);
1843 __ CheckMap(elements,
1845 Heap::kFixedArrayMapRootIndex,
1852 __ Branch(&return_undefined,
lt, t0, Operand(zero_reg));
1855 __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
1861 __ Addu(elements, elements, t1);
1863 __ Branch(&call_builtin,
eq, v0, Operand(t2));
1873 __ bind(&return_undefined);
1874 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
1878 __ bind(&call_builtin);
1879 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1886 GenerateMissBranch();
1889 return GetCode(
function);
1893 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
1894 Handle<Object>
object,
1895 Handle<JSObject> holder,
1896 Handle<JSGlobalPropertyCell> cell,
1897 Handle<JSFunction>
function,
1898 Handle<String> name) {
1908 if (!object->IsString() || !cell.is_null())
return Handle<Code>::null();
1910 const int argc = arguments().immediate();
1913 Label index_out_of_range;
1915 Label* index_out_of_range_label = &index_out_of_range;
1917 if (kind_ == Code::CALL_IC &&
1920 index_out_of_range_label = &miss;
1923 GenerateNameCheck(name, &name_miss);
1926 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1930 ASSERT(!
object.is_identical_to(holder));
1931 CheckPrototypes(Handle<JSObject>(
JSObject::cast(object->GetPrototype())),
1932 v0, holder, a1, a3, t0, name, &miss);
1934 Register receiver = a1;
1935 Register index = t1;
1936 Register result = v0;
1941 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1944 StringCharCodeAtGenerator
generator(receiver,
1949 index_out_of_range_label,
1955 StubRuntimeCallHelper call_helper;
1956 generator.GenerateSlow(masm(), call_helper);
1958 if (index_out_of_range.is_linked()) {
1959 __ bind(&index_out_of_range);
1960 __ LoadRoot(v0, Heap::kNanValueRootIndex);
1968 __ bind(&name_miss);
1969 GenerateMissBranch();
1972 return GetCode(
function);
1976 Handle<Code> CallStubCompiler::CompileStringCharAtCall(
1977 Handle<Object>
object,
1978 Handle<JSObject> holder,
1979 Handle<JSGlobalPropertyCell> cell,
1980 Handle<JSFunction>
function,
1981 Handle<String> name) {
1991 if (!object->IsString() || !cell.is_null())
return Handle<Code>::null();
1993 const int argc = arguments().immediate();
1996 Label index_out_of_range;
1997 Label* index_out_of_range_label = &index_out_of_range;
1998 if (kind_ == Code::CALL_IC &&
2001 index_out_of_range_label = &miss;
2003 GenerateNameCheck(name, &name_miss);
2006 GenerateDirectLoadGlobalFunctionPrototype(masm(),
2010 ASSERT(!
object.is_identical_to(holder));
2011 CheckPrototypes(Handle<JSObject>(
JSObject::cast(object->GetPrototype())),
2012 v0, holder, a1, a3, t0, name, &miss);
2014 Register receiver = v0;
2015 Register index = t1;
2016 Register scratch = a3;
2017 Register result = v0;
2022 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
2025 StringCharAtGenerator
generator(receiver,
2031 index_out_of_range_label,
2037 StubRuntimeCallHelper call_helper;
2038 generator.GenerateSlow(masm(), call_helper);
2040 if (index_out_of_range.is_linked()) {
2041 __ bind(&index_out_of_range);
2042 __ LoadRoot(v0, Heap::kEmptyStringRootIndex);
2050 __ bind(&name_miss);
2051 GenerateMissBranch();
2054 return GetCode(
function);
2058 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
2059 Handle<Object>
object,
2060 Handle<JSObject> holder,
2061 Handle<JSGlobalPropertyCell> cell,
2062 Handle<JSFunction>
function,
2063 Handle<String> name) {
2072 const int argc = arguments().immediate();
2076 if (!object->IsJSObject() || argc != 1)
return Handle<Code>::null();
2079 GenerateNameCheck(name, &miss);
2081 if (cell.is_null()) {
2085 __ JumpIfSmi(a1, &miss);
2087 CheckPrototypes(Handle<JSObject>::cast(
object), a1, holder, v0, a3, t0,
2090 ASSERT(cell->value() == *
function);
2091 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(
object), holder, name,
2093 GenerateLoadFunctionFromCell(cell,
function, &miss);
2103 __ JumpIfNotSmi(code, &slow);
2108 StringCharFromCodeGenerator
generator(code, v0);
2113 StubRuntimeCallHelper call_helper;
2114 generator.GenerateSlow(masm(), call_helper);
2124 GenerateMissBranch();
2127 return cell.is_null() ? GetCode(
function) : GetCode(Code::
NORMAL, name);
2131 Handle<Code> CallStubCompiler::CompileMathFloorCall(
2132 Handle<Object>
object,
2133 Handle<JSObject> holder,
2134 Handle<JSGlobalPropertyCell> cell,
2135 Handle<JSFunction>
function,
2136 Handle<String> name) {
2149 CpuFeatures::Scope scope_fpu(
FPU);
2150 const int argc = arguments().immediate();
2153 if (!object->IsJSObject() || argc != 1)
return Handle<Code>::null();
2156 GenerateNameCheck(name, &miss);
2158 if (cell.is_null()) {
2161 __ JumpIfSmi(a1, &miss);
2162 CheckPrototypes(Handle<JSObject>::cast(
object), a1, holder, a0, a3, t0,
2165 ASSERT(cell->value() == *
function);
2166 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(
object), holder, name,
2168 GenerateLoadFunctionFromCell(cell,
function, &miss);
2177 __ Drop(argc + 1,
eq, t0, Operand(zero_reg));
2178 __ Ret(
eq, t0, Operand(zero_reg));
2182 Label wont_fit_smi, no_fpu_error, restore_fcsr_and_return;
2200 __ srl(t2, t2, HeapNumber::kMantissaBitsInTopWord);
2205 __ Branch(&no_fpu_error,
eq, t5, Operand(zero_reg));
2211 >> HeapNumber::kMantissaBitsInTopWord));
2212 __ Branch(&restore_fcsr_and_return,
eq, t3, Operand(zero_reg));
2216 __ Branch(&restore_fcsr_and_return,
ge, t3,
2218 __ Branch(&wont_fit_smi);
2220 __ bind(&no_fpu_error);
2224 __ Addu(a1, v0, Operand(0x40000000));
2225 __ Branch(&wont_fit_smi,
lt, a1, Operand(zero_reg));
2231 __ Branch(&restore_fcsr_and_return,
ne, v0, Operand(zero_reg));
2236 __ Branch(&restore_fcsr_and_return,
eq, t0, Operand(zero_reg));
2239 __ bind(&restore_fcsr_and_return);
2246 __ bind(&wont_fit_smi);
2258 GenerateMissBranch();
2261 return cell.is_null() ? GetCode(
function) : GetCode(Code::
NORMAL, name);
2265 Handle<Code> CallStubCompiler::CompileMathAbsCall(
2266 Handle<Object>
object,
2267 Handle<JSObject> holder,
2268 Handle<JSGlobalPropertyCell> cell,
2269 Handle<JSFunction>
function,
2270 Handle<String> name) {
2279 const int argc = arguments().immediate();
2282 if (!object->IsJSObject() || argc != 1)
return Handle<Code>::null();
2286 GenerateNameCheck(name, &miss);
2287 if (cell.is_null()) {
2290 __ JumpIfSmi(a1, &miss);
2291 CheckPrototypes(Handle<JSObject>::cast(
object), a1, holder, v0, a3, t0,
2294 ASSERT(cell->value() == *
function);
2295 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(
object), holder, name,
2297 GenerateLoadFunctionFromCell(cell,
function, &miss);
2306 __ JumpIfNotSmi(v0, ¬_smi);
2314 __ Subu(v0, a1, t0);
2319 __ Branch(&slow,
lt, v0, Operand(zero_reg));
2333 Label negative_sign;
2335 __ Branch(&negative_sign,
ne, t0, Operand(zero_reg));
2341 __ bind(&negative_sign);
2344 __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex);
2345 __ AllocateHeapNumber(v0, t0, t1, t2, &slow);
2359 GenerateMissBranch();
2362 return cell.is_null() ? GetCode(
function) : GetCode(Code::
NORMAL, name);
2366 Handle<Code> CallStubCompiler::CompileFastApiCall(
2367 const CallOptimization& optimization,
2368 Handle<Object>
object,
2369 Handle<JSObject> holder,
2370 Handle<JSGlobalPropertyCell> cell,
2371 Handle<JSFunction>
function,
2372 Handle<String> name) {
2374 Counters* counters = isolate()->counters();
2376 ASSERT(optimization.is_simple_api_call());
2379 if (object->IsGlobalObject())
return Handle<Code>::null();
2380 if (!cell.is_null())
return Handle<Code>::null();
2381 if (!object->IsJSObject())
return Handle<Code>::null();
2382 int depth = optimization.GetPrototypeDepthOfExpectedType(
2383 Handle<JSObject>::cast(
object), holder);
2386 Label miss, miss_before_stack_reserved;
2388 GenerateNameCheck(name, &miss_before_stack_reserved);
2391 const int argc = arguments().immediate();
2395 __ JumpIfSmi(a1, &miss_before_stack_reserved);
2397 __ IncrementCounter(counters->call_const(), 1, a0, a3);
2398 __ IncrementCounter(counters->call_const_fast_api(), 1, a0, a3);
2400 ReserveSpaceForFastApiCall(masm(), a0);
2403 CheckPrototypes(Handle<JSObject>::cast(
object), a1, holder, a0, a3, t0, name,
2406 GenerateFastApiDirectCall(masm(), optimization, argc);
2409 FreeSpaceForFastApiCall(masm());
2411 __ bind(&miss_before_stack_reserved);
2412 GenerateMissBranch();
2415 return GetCode(
function);
2420 Handle<JSObject> holder,
2421 Handle<JSFunction>
function,
2422 Handle<String> name,
2429 Handle<Code> code = CompileCustomCall(
object, holder,
2430 Handle<JSGlobalPropertyCell>::null(),
2433 if (!code.is_null())
return code;
2438 GenerateNameCheck(name, &miss);
2441 const int argc = arguments().immediate();
2446 __ JumpIfSmi(a1, &miss);
2454 __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2458 CheckPrototypes(Handle<JSObject>::cast(
object), a1, holder, a0, a3, t0,
2463 if (object->IsGlobalObject()) {
2470 if (function->IsBuiltin() || !
function->shared()->is_classic_mode()) {
2472 __ GetObjectType(a1, a3, a3);
2475 GenerateDirectLoadGlobalFunctionPrototype(
2479 a0, holder, a3, a1, t0, name, &miss);
2488 if (function->IsBuiltin() || !
function->shared()->is_classic_mode()) {
2491 __ JumpIfSmi(a1, &fast);
2492 __ GetObjectType(a1, a0, a0);
2496 GenerateDirectLoadGlobalFunctionPrototype(
2500 a0, holder, a3, a1, t0, name, &miss);
2509 if (function->IsBuiltin() || !
function->shared()->is_classic_mode()) {
2512 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
2513 __ Branch(&fast,
eq, a1, Operand(t0));
2514 __ LoadRoot(t0, Heap::kFalseValueRootIndex);
2515 __ Branch(&miss,
ne, a1, Operand(t0));
2518 GenerateDirectLoadGlobalFunctionPrototype(
2522 a0, holder, a3, a1, t0, name, &miss);
2535 function, arguments(),
JUMP_FUNCTION, NullCallWrapper(), call_kind);
2540 GenerateMissBranch();
2543 return GetCode(
function);
2548 Handle<JSObject> holder,
2549 Handle<String> name) {
2557 GenerateNameCheck(name, &miss);
2560 const int argc = arguments().immediate();
2561 LookupResult lookup(isolate());
2562 LookupPostInterceptor(holder, name, &lookup);
2567 CallInterceptorCompiler compiler(
this, arguments(), a2, extra_state_);
2568 compiler.Compile(masm(),
object, holder, name, &lookup, a1, a3, t0, a0,
2576 GenerateCallFunction(masm(),
object, arguments(), &miss, extra_state_);
2580 GenerateMissBranch();
2588 Handle<JSObject>
object,
2589 Handle<GlobalObject> holder,
2590 Handle<JSGlobalPropertyCell> cell,
2591 Handle<JSFunction>
function,
2592 Handle<String> name) {
2599 Handle<Code> code = CompileCustomCall(
object, holder, cell,
function, name);
2601 if (!code.is_null())
return code;
2605 GenerateNameCheck(name, &miss);
2608 const int argc = arguments().immediate();
2609 GenerateGlobalReceiverCheck(
object, holder, name, &miss);
2610 GenerateLoadFunctionFromCell(cell,
function, &miss);
2614 if (object->IsGlobalObject()) {
2623 Counters* counters = masm()->isolate()->counters();
2624 __ IncrementCounter(counters->call_global_inline(), 1, a3, t0);
2625 ParameterCount expected(function->shared()->formal_parameter_count());
2634 NullCallWrapper(), call_kind);
2638 __ IncrementCounter(counters->call_global_inline_miss(), 1, a1, a3);
2639 GenerateMissBranch();
2648 Handle<Map> transition,
2649 Handle<String> name) {
2659 GenerateStoreField(masm(),
2667 __ li(a2, Operand(Handle<String>(name)));
2668 Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2669 __ Jump(ic, RelocInfo::CODE_TARGET);
2672 return GetCode(transition.is_null()
2679 Handle<String> name,
2680 Handle<JSObject> receiver,
2681 Handle<JSObject> holder,
2682 Handle<AccessorInfo> callback) {
2691 __ JumpIfSmi(a1, &miss, a3);
2692 CheckPrototypes(receiver, a1, holder, a3, t0, t1, name, &miss);
2696 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
2699 __ li(a3, Operand(callback));
2700 __ Push(a3, a2, a0);
2703 ExternalReference store_callback_property =
2704 ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2706 __ TailCallExternalReference(store_callback_property, 4, 1);
2710 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2711 __ Jump(ic, RelocInfo::CODE_TARGET);
2719 #define __ ACCESS_MASM(masm)
2723 MacroAssembler* masm,
2724 Handle<JSFunction> setter) {
2737 if (!setter.is_null()) {
2741 ParameterCount actual(1);
2747 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
2761 #define __ ACCESS_MASM(masm())
2765 Handle<String> name,
2766 Handle<JSObject> receiver,
2767 Handle<JSObject> holder,
2768 Handle<JSFunction> setter) {
2778 __ JumpIfSmi(a1, &miss);
2779 CheckPrototypes(receiver, a1, holder, a3, t0, t1, name, &miss);
2784 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2785 __ Jump(ic, RelocInfo::CODE_TARGET);
2793 Handle<JSObject> receiver,
2794 Handle<String> name) {
2804 __ CheckMap(a1, a3, Handle<Map>(receiver->map()), &miss,
2808 if (receiver->IsJSGlobalProxy()) {
2809 __ CheckAccessGlobalProxy(a1, a3, &miss);
2814 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2816 __ Push(a1, a2, a0);
2822 ExternalReference store_ic_property =
2823 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2825 __ TailCallExternalReference(store_ic_property, 4, 1);
2829 Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2830 __ Jump(ic, RelocInfo::CODE_TARGET);
2838 Handle<GlobalObject>
object,
2839 Handle<JSGlobalPropertyCell> cell,
2840 Handle<String> name) {
2851 __ Branch(&miss,
ne, a3, Operand(Handle<Map>(object->map())));
2857 __ li(t0, Operand(cell));
2858 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
2860 __ Branch(&miss,
eq, t1, Operand(t2));
2867 Counters* counters = masm()->isolate()->counters();
2868 __ IncrementCounter(counters->named_store_global_inline(), 1, a1, a3);
2873 __ IncrementCounter(counters->named_store_global_inline_miss(), 1, a1, a3);
2874 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2875 __ Jump(ic, RelocInfo::CODE_TARGET);
2883 Handle<JSObject>
object,
2884 Handle<JSObject> last) {
2892 __ JumpIfSmi(a0, &miss);
2895 CheckPrototypes(
object, a0, last, a3, a1, t0, name, &miss);
2899 if (last->IsGlobalObject()) {
2900 GenerateCheckPropertyCell(
2901 masm(), Handle<GlobalObject>::cast(last), name, a1, &miss);
2905 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2909 GenerateLoadMiss(masm(), Code::LOAD_IC);
2917 Handle<JSObject> holder,
2919 Handle<String> name) {
2929 GenerateLoadField(
object, holder, v0, a3, a1, t0, index, name, &miss);
2931 GenerateLoadMiss(masm(), Code::LOAD_IC);
2939 Handle<String> name,
2940 Handle<JSObject>
object,
2941 Handle<JSObject> holder,
2942 Handle<AccessorInfo> callback) {
2949 GenerateLoadCallback(
object, holder, a0, a2, a3, a1, t0, t1, callback, name,
2952 GenerateLoadMiss(masm(), Code::LOAD_IC);
2960 #define __ ACCESS_MASM(masm)
2964 Handle<JSFunction> getter) {
2973 if (!getter.is_null()) {
2976 ParameterCount actual(0);
2982 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
2993 #define __ ACCESS_MASM(masm())
2997 Handle<String> name,
2998 Handle<JSObject> receiver,
2999 Handle<JSObject> holder,
3000 Handle<JSFunction> getter) {
3009 __ JumpIfSmi(a0, &miss);
3010 CheckPrototypes(receiver, a0, holder, a3, t0, a1, name, &miss);
3015 GenerateLoadMiss(masm(), Code::LOAD_IC);
3023 Handle<JSObject> holder,
3024 Handle<JSFunction> value,
3025 Handle<String> name) {
3033 GenerateLoadConstant(
object, holder, a0, a3, a1, t0, value, name, &miss);
3035 GenerateLoadMiss(masm(), Code::LOAD_IC);
3043 Handle<JSObject> holder,
3044 Handle<String> name) {
3053 LookupResult lookup(isolate());
3054 LookupPostInterceptor(holder, name, &lookup);
3055 GenerateLoadInterceptor(
object, holder, &lookup, a0, a2, a3, a1, t0, name,
3058 GenerateLoadMiss(masm(), Code::LOAD_IC);
3066 Handle<JSObject>
object,
3067 Handle<GlobalObject> holder,
3068 Handle<JSGlobalPropertyCell> cell,
3069 Handle<String> name,
3070 bool is_dont_delete) {
3079 __ JumpIfSmi(a0, &miss);
3080 CheckPrototypes(
object, a0, holder, a3, t0, a1, name, &miss);
3083 __ li(a3, Operand(cell));
3087 if (!is_dont_delete) {
3088 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
3089 __ Branch(&miss,
eq, t0, Operand(at));
3093 Counters* counters = masm()->isolate()->counters();
3094 __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
3098 __ IncrementCounter(counters->named_load_global_stub_miss(), 1, a1, a3);
3099 GenerateLoadMiss(masm(), Code::LOAD_IC);
3107 Handle<JSObject> receiver,
3108 Handle<JSObject> holder,
3118 __ Branch(&miss,
ne, a0, Operand(name));
3120 GenerateLoadField(receiver, holder, a1, a2, a3, t0, index, name, &miss);
3122 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3129 Handle<String> name,
3130 Handle<JSObject> receiver,
3131 Handle<JSObject> holder,
3132 Handle<AccessorInfo> callback) {
3141 __ Branch(&miss,
ne, a0, Operand(name));
3143 GenerateLoadCallback(receiver, holder, a1, a0, a2, a3, t0, t1, callback,
3146 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3153 Handle<String> name,
3154 Handle<JSObject> receiver,
3155 Handle<JSObject> holder,
3156 Handle<JSFunction> value) {
3165 __ Branch(&miss,
ne, a0, Operand(name));
3167 GenerateLoadConstant(receiver, holder, a1, a2, a3, t0, value, name, &miss);
3169 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3177 Handle<JSObject> receiver,
3178 Handle<JSObject> holder,
3179 Handle<String> name) {
3188 __ Branch(&miss,
ne, a0, Operand(name));
3190 LookupResult lookup(isolate());
3191 LookupPostInterceptor(holder, name, &lookup);
3192 GenerateLoadInterceptor(receiver, holder, &lookup, a1, a0, a2, a3, t0, name,
3195 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3202 Handle<String> name) {
3211 __ Branch(&miss,
ne, a0, Operand(name));
3213 GenerateLoadArrayLength(masm(), a1, a2, &miss);
3215 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3222 Handle<String> name) {
3230 Counters* counters = masm()->isolate()->counters();
3231 __ IncrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
3234 __ Branch(&miss,
ne, a0, Operand(name));
3236 GenerateLoadStringLength(masm(), a1, a2, a3, &miss,
true);
3238 __ DecrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
3240 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3247 Handle<String> name) {
3255 Counters* counters = masm()->isolate()->counters();
3256 __ IncrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3259 __ Branch(&miss,
ne, a0, Operand(name));
3261 GenerateLoadFunctionPrototype(masm(), a1, a2, a3, &miss);
3263 __ DecrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3264 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3271 Handle<Map> receiver_map) {
3277 ElementsKind elements_kind = receiver_map->elements_kind();
3278 Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode();
3282 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3283 __ Jump(ic, RelocInfo::CODE_TARGET);
3286 return GetCode(
Code::NORMAL, factory()->empty_string());
3299 __ JumpIfSmi(a1, &miss);
3301 int receiver_count = receiver_maps->length();
3303 for (
int current = 0; current < receiver_count; ++current) {
3304 __ Jump(handler_ics->at(current), RelocInfo::CODE_TARGET,
3305 eq, a2, Operand(receiver_maps->at(current)));
3309 Handle<Code> miss_ic = isolate()->builtins()->KeyedLoadIC_Miss();
3310 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3319 Handle<Map> transition,
3320 Handle<String> name) {
3330 Counters* counters = masm()->isolate()->counters();
3331 __ IncrementCounter(counters->keyed_store_field(), 1, a3, t0);
3334 __ Branch(&miss,
ne, a1, Operand(name));
3338 GenerateStoreField(masm(),
3347 __ DecrementCounter(counters->keyed_store_field(), 1, a3, t0);
3348 Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3349 __ Jump(ic, RelocInfo::CODE_TARGET);
3352 return GetCode(transition.is_null()
3359 Handle<Map> receiver_map) {
3367 ElementsKind elements_kind = receiver_map->elements_kind();
3368 bool is_js_array = receiver_map->instance_type() ==
JS_ARRAY_TYPE;
3370 KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode();
3374 Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
3375 __ Jump(ic, RelocInfo::CODE_TARGET);
3378 return GetCode(
Code::NORMAL, factory()->empty_string());
3394 __ JumpIfSmi(a2, &miss);
3396 int receiver_count = receiver_maps->length();
3398 for (
int i = 0; i < receiver_count; ++i) {
3399 if (transitioned_maps->at(i).is_null()) {
3400 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET,
eq,
3401 a3, Operand(receiver_maps->at(i)));
3404 __ Branch(&next_map,
ne, a3, Operand(receiver_maps->at(i)));
3405 __ li(a3, Operand(transitioned_maps->at(i)));
3406 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET);
3412 Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
3413 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3421 Handle<JSFunction>
function) {
3426 Label generic_stub_call;
3429 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
3431 #ifdef ENABLE_DEBUGGER_SUPPORT
3437 __ Branch(&generic_stub_call,
ne, a2, Operand(t7));
3444 __ JumpIfSmi(a2, &generic_stub_call);
3445 __ GetObjectType(a2, a3, t0);
3446 __ Branch(&generic_stub_call,
ne, t0, Operand(
MAP_TYPE));
3455 __ Check(
ne,
"Function constructed by construct stub.",
3465 __ AllocateInNewSpace(a3, t4, t5, t6, &generic_stub_call,
SIZE_IN_WORDS);
3475 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
3480 __ Addu(t5, t5, Operand(3 * kPointerSize));
3489 __ Addu(a1, a1,
sp);
3500 Handle<SharedFunctionInfo> shared(function->shared());
3501 for (
int i = 0; i < shared->this_property_assignments_count(); i++) {
3502 if (shared->IsThisPropertyAssignmentArgument(i)) {
3503 Label not_passed, next;
3505 int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3506 __ Branch(¬_passed,
less_equal, a0, Operand(arg_number));
3508 __ lw(a2,
MemOperand(a1, (arg_number + 1) * -kPointerSize));
3510 __ Addu(t5, t5, kPointerSize);
3512 __ bind(¬_passed);
3515 __ Addu(t5, t5, Operand(kPointerSize));
3519 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3520 __ li(a2, Operand(constant));
3522 __ Addu(t5, t5, kPointerSize);
3527 ASSERT(function->has_initial_map());
3528 for (
int i = shared->this_property_assignments_count();
3529 i <
function->initial_map()->inobject_properties();
3532 __ Addu(t5, t5, kPointerSize);
3547 __ Addu(
sp,
sp, Operand(kPointerSize));
3548 Counters* counters = masm()->isolate()->counters();
3549 __ IncrementCounter(counters->constructed_objects(), 1, a1, a2);
3550 __ IncrementCounter(counters->constructed_objects_stub(), 1, a1, a2);
3555 __ bind(&generic_stub_call);
3556 Handle<Code> generic_construct_stub =
3557 masm()->isolate()->builtins()->JSConstructStubGeneric();
3558 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3566 #define __ ACCESS_MASM(masm)
3570 MacroAssembler* masm) {
3576 Label slow, miss_force_generic;
3579 Register receiver = a1;
3581 __ JumpIfNotSmi(key, &miss_force_generic);
3584 __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
3589 __ IncrementCounter(
3590 masm->isolate()->counters()->keyed_load_external_array_slow(),
3598 Handle<Code> slow_ic =
3599 masm->isolate()->builtins()->KeyedLoadIC_Slow();
3600 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
3603 __ bind(&miss_force_generic);
3611 Handle<Code> miss_ic =
3612 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3613 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3617 static bool IsElementTypeSigned(
ElementsKind elements_kind) {
3618 switch (elements_kind) {
3647 static void GenerateSmiKeyCheck(MacroAssembler* masm,
3651 FPURegister double_scratch0,
3654 CpuFeatures::Scope scope(
FPU);
3659 __ JumpIfSmi(key, &key_ok);
3662 Heap::kHeapNumberMapRootIndex,
3673 __ Branch(fail,
ne, scratch1, Operand(zero_reg));
3675 __ mfc1(scratch0, double_scratch0);
3676 __ SmiTagCheckOverflow(key, scratch0, scratch1);
3677 __ BranchOnOverflow(fail, scratch1);
3681 __ JumpIfNotSmi(key, fail);
3687 MacroAssembler* masm,
3694 Label miss_force_generic, slow, failed_allocation;
3697 Register receiver = a1;
3703 GenerateSmiKeyCheck(masm, key, t0, t1,
f2, &miss_force_generic);
3721 Register value = a2;
3722 switch (elements_kind) {
3725 __ addu(t3, a3, t2);
3731 __ addu(t3, a3, t2);
3735 __ addu(t3, a3, key);
3739 __ addu(t3, a3, key);
3745 __ addu(t3, a3, t2);
3750 __ addu(t3, a3, t3);
3752 CpuFeatures::Scope scope(
FPU);
3760 __ addu(t3, a3, t2);
3762 CpuFeatures::Scope scope(
FPU);
3796 __ Subu(t3, value, Operand(0xC0000000));
3797 __ Branch(&box_int,
lt, t3, Operand(zero_reg));
3807 __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex);
3808 __ AllocateHeapNumber(v0, a3, t0, t1, &slow);
3811 CpuFeatures::Scope scope(
FPU);
3838 CpuFeatures::Scope scope(
FPU);
3840 __ And(t2, value, Operand(0xC0000000));
3841 __ Branch(&pl_box_int,
ne, t2, Operand(zero_reg));
3848 __ bind(&pl_box_int);
3852 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3853 __ AllocateHeapNumber(v0, t2, t3, t6, &slow);
3867 Label box_int_0, box_int_1, done;
3868 __ And(t2, value, Operand(0x80000000));
3869 __ Branch(&box_int_0,
ne, t2, Operand(zero_reg));
3870 __ And(t2, value, Operand(0x40000000));
3871 __ Branch(&box_int_1,
ne, t2, Operand(zero_reg));
3877 Register hiword = value;
3878 Register loword = a3;
3880 __ bind(&box_int_0);
3882 GenerateUInt2Double(masm, hiword, loword, t0, 0);
3885 __ bind(&box_int_1);
3887 GenerateUInt2Double(masm, hiword, loword, t0, 1);
3895 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3896 __ AllocateHeapNumber(t2, t3, t5, t6, &slow);
3908 CpuFeatures::Scope scope(
FPU);
3912 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3913 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3922 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3923 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3936 Label exponent_rebiased;
3937 __ Branch(&exponent_rebiased,
eq, t5, Operand(zero_reg));
3940 __ Xor(t1, t5, Operand(0xFF));
3941 __ Movz(t5, t0, t1);
3942 __ Branch(&exponent_rebiased,
eq, t1, Operand(zero_reg));
3949 __ bind(&exponent_rebiased);
3952 __ sll(t0, t5, HeapNumber::kMantissaBitsInTopWord);
3956 static const int kMantissaShiftForHiWord =
3959 static const int kMantissaShiftForLoWord =
3962 __ srl(t0, t4, kMantissaShiftForHiWord);
3964 __ sll(a0, t4, kMantissaShiftForLoWord);
3973 CpuFeatures::Scope scope(
FPU);
3977 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3978 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3986 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3987 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
4002 __ IncrementCounter(
4003 masm->isolate()->counters()->keyed_load_external_array_slow(),
4014 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
4016 __ bind(&miss_force_generic);
4018 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4019 __ Jump(stub, RelocInfo::CODE_TARGET);
4024 MacroAssembler* masm,
4033 Label slow, check_heap_number, miss_force_generic;
4036 Register value = a0;
4038 Register receiver = a2;
4045 GenerateSmiKeyCheck(masm, key, t0, t1,
f2, &miss_force_generic);
4060 __ JumpIfNotSmi(value, &slow);
4062 __ JumpIfNotSmi(value, &check_heap_number);
4064 __ SmiUntag(t1, value);
4070 switch (elements_kind) {
4075 __ li(v0, Operand(255));
4077 __ Branch(&done,
gt, t1, Operand(v0));
4080 __ mov(v0, zero_reg);
4086 __ addu(t8, a3, t8);
4093 __ addu(t8, a3, t8);
4098 __ addu(t8, a3, key);
4104 __ addu(t8, a3, t8);
4109 __ SmiUntag(t0, key);
4110 StoreIntAsFloat(masm, a3, t0, t1, t2, t3, t4);
4114 __ addu(a3, a3, t8);
4123 masm, t1, destination,
4127 CpuFeatures::Scope scope(
FPU);
4152 __ bind(&check_heap_number);
4153 __ GetObjectType(value, t1, t2);
4165 CpuFeatures::Scope scope(
FPU);
4172 __ addu(t8, a3, t8);
4176 __ addu(t8, a3, t8);
4179 __ EmitECMATruncate(t3,
f0,
f2, t2, t1, t5);
4181 switch (elements_kind) {
4185 __ addu(t8, a3, t8);
4190 __ addu(t8, a3, key);
4196 __ addu(t8, a3, t8);
4226 Label done, nan_or_infinity_or_zero;
4227 static const int kMantissaInHiWordShift =
4230 static const int kMantissaInLoWordShift =
4236 __ and_(t6, t3, t5);
4237 __ Branch(&nan_or_infinity_or_zero,
eq, t6, Operand(zero_reg));
4239 __ xor_(t1, t6, t5);
4241 __ Movz(t6, t2, t1);
4242 __ Branch(&nan_or_infinity_or_zero,
eq, t1, Operand(zero_reg));
4254 __ Movn(t3, t2, t1);
4259 __ Movn(t3, t2, t1);
4264 __ sll(t3, t3, kMantissaInHiWordShift);
4266 __ srl(t4, t4, kMantissaInLoWordShift);
4268 __ sll(t6, t6, kBinary32ExponentShift);
4273 __ addu(t9, a3, t9);
4281 __ bind(&nan_or_infinity_or_zero);
4285 __ sll(t3, t3, kMantissaInHiWordShift);
4287 __ srl(t4, t4, kMantissaInLoWordShift);
4292 __ addu(t8, a3, t8);
4299 bool is_signed_type = IsElementTypeSigned(elements_kind);
4301 int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
4308 __ and_(t6, t3, t5);
4309 __ Movz(t3, zero_reg, t6);
4310 __ Branch(&done,
eq, t6, Operand(zero_reg));
4312 __ xor_(t2, t6, t5);
4313 __ Movz(t3, zero_reg, t2);
4314 __ Branch(&done,
eq, t6, Operand(t5));
4320 __ slt(t2, t6, zero_reg);
4321 __ Movn(t3, zero_reg, t2);
4322 __ Branch(&done,
lt, t6, Operand(zero_reg));
4325 __ slti(t1, t6, meaningfull_bits - 1);
4326 __ li(t2, min_value);
4327 __ Movz(t3, t2, t1);
4328 __ Branch(&done,
ge, t6, Operand(meaningfull_bits - 1));
4332 __ Or(t3, t3, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
4334 __ li(t9, HeapNumber::kMantissaBitsInTopWord);
4335 __ subu(t6, t9, t6);
4336 __ slt(t1, t6, zero_reg);
4337 __ srlv(t2, t3, t6);
4338 __ Movz(t3, t2, t1);
4339 __ Branch(&sign,
ge, t6, Operand(zero_reg));
4341 __ subu(t6, zero_reg, t6);
4342 __ sllv(t3, t3, t6);
4343 __ li(t9, meaningfull_bits);
4344 __ subu(t6, t9, t6);
4345 __ srlv(t4, t4, t6);
4349 __ subu(t2, t3, zero_reg);
4350 __ Movz(t3, t2, t5);
4356 switch (elements_kind) {
4360 __ addu(t8, a3, t8);
4365 __ addu(t8, a3, key);
4371 __ addu(t8, a3, t8);
4394 __ IncrementCounter(
4395 masm->isolate()->counters()->keyed_load_external_array_slow(),
4403 Handle<Code> slow_ic =
4404 masm->isolate()->builtins()->KeyedStoreIC_Slow();
4405 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4408 __ bind(&miss_force_generic);
4416 Handle<Code> miss_ic =
4417 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4418 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4428 Label miss_force_generic;
4434 GenerateSmiKeyCheck(masm, a0, t0, t1,
f2, &miss_force_generic);
4438 __ AssertFastElements(a2);
4448 __ Addu(t0, t0, a3);
4450 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
4451 __ Branch(&miss_force_generic,
eq, t0, Operand(t1));
4455 __ bind(&miss_force_generic);
4457 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4458 __ Jump(stub, RelocInfo::CODE_TARGET);
4463 MacroAssembler* masm) {
4469 Label miss_force_generic, slow_allocate_heapnumber;
4471 Register key_reg = a0;
4472 Register receiver_reg = a1;
4473 Register elements_reg = a2;
4474 Register heap_number_reg = a2;
4475 Register indexed_double_offset = a3;
4476 Register scratch = t0;
4477 Register scratch2 = t1;
4478 Register scratch3 = t2;
4479 Register heap_number_map = t3;
4485 GenerateSmiKeyCheck(masm, key_reg, t0, t1,
f2, &miss_force_generic);
4493 __ Branch(&miss_force_generic,
hs, key_reg, Operand(scratch));
4497 __ Addu(indexed_double_offset, elements_reg, Operand(scratch2));
4498 uint32_t upper_32_offset = FixedArray::kHeaderSize +
sizeof(
kHoleNanLower32);
4503 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4504 __ AllocateHeapNumber(heap_number_reg, scratch2, scratch3,
4505 heap_number_map, &slow_allocate_heapnumber);
4512 FixedArray::kHeaderSize));
4516 __ mov(v0, heap_number_reg);
4519 __ bind(&slow_allocate_heapnumber);
4520 Handle<Code> slow_ic =
4521 masm->isolate()->builtins()->KeyedLoadIC_Slow();
4522 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4524 __ bind(&miss_force_generic);
4525 Handle<Code> miss_ic =
4526 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4527 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4532 MacroAssembler* masm,
4544 Label miss_force_generic, transition_elements_kind, grow, slow;
4545 Label finish_store, check_capacity;
4547 Register value_reg = a0;
4548 Register key_reg = a1;
4549 Register receiver_reg = a2;
4550 Register scratch = t0;
4551 Register elements_reg = a3;
4552 Register length_reg = t1;
4553 Register scratch2 = t2;
4559 GenerateSmiKeyCheck(masm, key_reg, t0, t1,
f2, &miss_force_generic);
4562 __ JumpIfNotSmi(value_reg, &transition_elements_kind);
4575 __ Branch(&grow,
hs, key_reg, Operand(scratch));
4577 __ Branch(&miss_force_generic,
hs, key_reg, Operand(scratch));
4581 __ CheckMap(elements_reg,
4583 Heap::kFixedArrayMapRootIndex,
4584 &miss_force_generic,
4587 __ bind(&finish_store);
4595 __ Addu(scratch, scratch, scratch2);
4604 __ Addu(scratch, scratch, scratch2);
4606 __ mov(receiver_reg, value_reg);
4607 __ RecordWrite(elements_reg,
4617 __ bind(&miss_force_generic);
4619 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4620 __ Jump(ic, RelocInfo::CODE_TARGET);
4622 __ bind(&transition_elements_kind);
4623 Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4624 __ Jump(ic_miss, RelocInfo::CODE_TARGET);
4632 __ Branch(&miss_force_generic,
ne, key_reg, Operand(scratch));
4640 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
4641 __ Branch(&check_capacity,
ne, elements_reg, Operand(at));
4644 __ AllocateInNewSpace(size, elements_reg, scratch, scratch2, &slow,
4647 __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
4651 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
4671 __ bind(&check_capacity);
4673 __ CheckMap(elements_reg,
4675 Heap::kFixedCOWArrayMapRootIndex,
4676 &miss_force_generic,
4680 __ Branch(&slow,
hs, length_reg, Operand(scratch));
4685 __ jmp(&finish_store);
4688 Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
4689 __ Jump(ic_slow, RelocInfo::CODE_TARGET);
4695 MacroAssembler* masm,
4709 Label miss_force_generic, transition_elements_kind, grow, slow;
4710 Label finish_store, check_capacity;
4712 Register value_reg = a0;
4713 Register key_reg = a1;
4714 Register receiver_reg = a2;
4715 Register elements_reg = a3;
4716 Register scratch1 = t0;
4717 Register scratch2 = t1;
4718 Register scratch3 = t2;
4719 Register scratch4 = t3;
4720 Register length_reg = t3;
4726 GenerateSmiKeyCheck(masm, key_reg, t0, t1,
f2, &miss_force_generic);
4741 __ Branch(&grow,
hs, key_reg, Operand(scratch1));
4743 __ Branch(&miss_force_generic,
hs, key_reg, Operand(scratch1));
4746 __ bind(&finish_store);
4748 __ StoreNumberToDoubleElements(value_reg,
4757 &transition_elements_kind);
4760 __ mov(v0, value_reg);
4763 __ bind(&miss_force_generic);
4765 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4766 __ Jump(ic, RelocInfo::CODE_TARGET);
4768 __ bind(&transition_elements_kind);
4769 Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4770 __ Jump(ic_miss, RelocInfo::CODE_TARGET);
4778 __ Branch(&miss_force_generic,
ne, key_reg, Operand(scratch1));
4782 __ JumpIfSmi(value_reg, &value_is_smi);
4784 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4785 __ Branch(&transition_elements_kind,
ne, scratch1, Operand(at));
4786 __ bind(&value_is_smi);
4794 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
4795 __ Branch(&check_capacity,
ne, elements_reg, Operand(at));
4798 __ AllocateInNewSpace(size, elements_reg, scratch1, scratch2, &slow,
4803 __ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex);
4805 __ li(scratch1, Operand(
Smi::FromInt(JSArray::kPreallocatedArrayElements)));
4821 __ jmp(&finish_store);
4823 __ bind(&check_capacity);
4827 __ Branch(&slow,
hs, length_reg, Operand(scratch1));
4832 __ jmp(&finish_store);
4835 Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
4836 __ Jump(ic_slow, RelocInfo::CODE_TARGET);
4845 #endif // V8_TARGET_ARCH_MIPS
static const int kBitFieldOffset
Handle< Code > CompileLoadFunctionPrototype(Handle< String > name)
Handle< Code > CompileLoadCallback(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< AccessorInfo > callback)
const intptr_t kSmiTagMask
static const int kNotFound
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< JSFunction > setter)
static const int kCodeEntryOffset
Handle< Code > CompileStoreField(Handle< JSObject > object, int index, Handle< Map > transition, Handle< String > name)
static const int kPrototypeOrInitialMapOffset
static int SlotOffset(int index)
static const int kElementsStartIndex
static const int kDataOffset
const int kBinary32ExponentShift
Handle< Code > CompileLoadNonexistent(Handle< String > name, Handle< JSObject > object, Handle< JSObject > last)
const int kDoubleSizeLog2
Handle< Code > CompileStoreElement(Handle< Map > receiver_map)
void GenerateProbe(MacroAssembler *masm, Code::Flags flags, Register receiver, Register name, Register scratch, Register extra, Register extra2=no_reg, Register extra3=no_reg)
static const uint32_t kExponentMask
static const int kFlagsOffset
const uint32_t kBinary32MantissaMask
const int kBinary32MaxExponent
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
#define LOG(isolate, Call)
static void GenerateStoreExternalArray(MacroAssembler *masm, ElementsKind elements_kind)
static const int kGlobalReceiverOffset
static void GenerateLoadFastDoubleElement(MacroAssembler *masm)
const int kBinary32MantissaBits
const uint32_t kFCSRExceptionFlagMask
static StubType ExtractTypeFromFlags(Flags flags)
static const int kExponentBias
static bool IsSupported(CpuFeature f)
static const int kExternalPointerOffset
static const int kHasNamedInterceptor
static const int kIsAccessCheckNeeded
List< Handle< Map > > MapHandleList
#define ASSERT(condition)
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static const int kDebugInfoOffset
static const int kContextOffset
Handle< Code > CompileLoadField(Handle< JSObject > object, Handle< JSObject > holder, int index, Handle< String > name)
Handle< Code > CompileStoreInterceptor(Handle< JSObject > object, Handle< String > name)
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< String > name, Register scratch0)
Handle< Code > CompileStoreField(Handle< JSObject > object, int index, Handle< Map > transition, Handle< String > name)
static const int kHashFieldOffset
Handle< Code > CompileLoadInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
Handle< Code > CompileCallGlobal(Handle< JSObject > object, Handle< GlobalObject > holder, Handle< JSGlobalPropertyCell > cell, Handle< JSFunction > function, Handle< String > name)
static const int kExponentShift
Handle< Code > CompileLoadField(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, int index)
static const int kValueOffset
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< JSFunction > getter)
const uint32_t kHoleNanUpper32
Handle< Code > CompileStoreGlobal(Handle< GlobalObject > object, Handle< JSGlobalPropertyCell > holder, Handle< String > name)
Handle< Code > CompileLoadViaGetter(Handle< String > name, Handle< JSObject > receiver, Handle< JSObject > holder, Handle< JSFunction > getter)
Handle< Code > CompileLoadConstant(Handle< JSObject > object, Handle< JSObject > holder, Handle< JSFunction > value, Handle< String > name)
Handle< Code > CompileLoadConstant(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< JSFunction > value)
Handle< Code > CompileCallField(Handle< JSObject > object, Handle< JSObject > holder, int index, Handle< String > name)
static void GenerateStoreFastElement(MacroAssembler *masm, bool is_js_array, ElementsKind element_kind, KeyedAccessGrowMode grow_mode)
Handle< Code > CompileLoadStringLength(Handle< String > name)
const uint32_t kHoleNanLower32
static bool decode(uint32_t value)
static const int kPropertiesOffset
const int kBinary32MinExponent
Handle< Code > CompileLoadGlobal(Handle< JSObject > object, Handle< GlobalObject > holder, Handle< JSGlobalPropertyCell > cell, Handle< String > name, bool is_dont_delete)
Handle< Code > CompileStoreCallback(Handle< String > name, Handle< JSObject > receiver, Handle< JSObject > holder, Handle< AccessorInfo > callback)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
bool IsFastSmiElementsKind(ElementsKind kind)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
const int kBinary32ExponentBias
static const int kDataOffset
static int SizeFor(int length)
static const int kElementsOffset
const uint32_t kStringTag
static void GenerateLoadDictionaryElement(MacroAssembler *masm)
static void GenerateLoadExternalArray(MacroAssembler *masm, ElementsKind elements_kind)
static const int kLengthOffset
static int SizeFor(int length)
static const int kHeaderSize
static const int kMapOffset
static const int kMantissaBitsInTopWord
const uint32_t kIsNotStringMask
List< Handle< Code > > CodeHandleList
static const int kLengthOffset
static void ConvertIntToDouble(MacroAssembler *masm, Register int_scratch, Destination destination, DwVfpRegister double_dst, Register dst1, Register dst2, Register scratch2, SwVfpRegister single_scratch)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
Handle< Code > CompileCallInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
MemOperand FieldMemOperand(Register object, int offset)
static const int kDataOffset
const FPUControlRegister FCSR
static const int kContextOffset
static void GenerateLoadFastElement(MacroAssembler *masm)
static const uint32_t kSignMask
static void GenerateStoreFastDoubleElement(MacroAssembler *masm, bool is_js_array, KeyedAccessGrowMode grow_mode)
static const int kHeaderSize
static Handle< T > null()
#define ASSERT_EQ(v1, v2)
Handle< Code > CompileLoadArrayLength(Handle< String > name)
static const uint32_t kMantissaMask
Handle< Code > CompileCallConstant(Handle< Object > object, Handle< JSObject > holder, Handle< JSFunction > function, Handle< String > name, CheckType check)
static AccessorInfo * cast(Object *obj)
const uint32_t kBinary32ExponentMask
const uint32_t kBinary32SignMask
const int kHeapObjectTagSize
static const int kSizeInBytes
static Handle< JSGlobalPropertyCell > EnsurePropertyCell(Handle< GlobalObject > global, Handle< String > name)
static bool HasCustomCallGenerator(Handle< JSFunction > function)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static const int kPreallocatedArrayElements
static const int kPrototypeOffset
static const int kFlagsNotUsedInLookup
const int kInvalidProtoDepth
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
static const int kNativeContextOffset
Handle< Code > CompileLoadCallback(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< AccessorInfo > callback)
Handle< Code > CompileLoadPolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_ics)
Handle< Code > CompileLoadInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
Handle< Code > CompileStorePolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_stubs, MapHandleList *transitioned_maps)
static const int kSharedFunctionInfoOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
static const int kMantissaBits
void check(i::Vector< const char > string)
static const int kExponentOffset
static const int kValueOffset
Handle< Code > CompileLoadElement(Handle< Map > receiver_map)
Handle< Code > CompileConstructStub(Handle< JSFunction > function)
Handle< Code > CompileStoreViaSetter(Handle< String > name, Handle< JSObject > receiver, Handle< JSObject > holder, Handle< JSFunction > setter)
static JSObject * cast(Object *obj)
static const int kInstanceTypeOffset
static const int kMantissaOffset
static JSFunction * cast(Object *obj)