30 #if defined(V8_TARGET_ARCH_MIPS)
39 #define __ ACCESS_MASM(masm)
42 static void ProbeTable(Isolate* isolate,
52 Register offset_scratch) {
53 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
54 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
55 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
57 uint32_t key_off_addr =
reinterpret_cast<uint32_t
>(key_offset.address());
58 uint32_t value_off_addr =
reinterpret_cast<uint32_t
>(value_offset.address());
59 uint32_t map_off_addr =
reinterpret_cast<uint32_t
>(map_offset.address());
62 ASSERT(value_off_addr > key_off_addr);
63 ASSERT((value_off_addr - key_off_addr) % 4 == 0);
64 ASSERT((value_off_addr - key_off_addr) < (256 * 4));
65 ASSERT(map_off_addr > key_off_addr);
66 ASSERT((map_off_addr - key_off_addr) % 4 == 0);
67 ASSERT((map_off_addr - key_off_addr) < (256 * 4));
70 Register base_addr = scratch;
74 __ sll(offset_scratch, offset, 1);
75 __ Addu(offset_scratch, offset_scratch, offset);
78 __ li(base_addr, Operand(key_offset));
80 __ Addu(base_addr, base_addr, at);
84 __ Branch(&miss,
ne, name, Operand(at));
87 __ lw(at,
MemOperand(base_addr, map_off_addr - key_off_addr));
89 __ Branch(&miss,
ne, at, Operand(scratch2));
92 Register code = scratch2;
94 __ lw(code,
MemOperand(base_addr, value_off_addr - key_off_addr));
97 Register flags_reg = base_addr;
101 __ Branch(&miss,
ne, flags_reg, Operand(flags));
125 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
132 Counters* counters = masm->isolate()->counters();
133 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
134 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
138 const int kInterceptorOrAccessCheckNeededMask =
142 Register map = scratch1;
145 __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
146 __ Branch(miss_label,
ne, scratch0, Operand(zero_reg));
153 Register properties = scratch0;
157 Register tmp = properties;
158 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
159 __ Branch(miss_label,
ne, map, Operand(tmp));
173 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
185 Isolate* isolate = masm->isolate();
190 ASSERT(
sizeof(Entry) == 12);
196 ASSERT(!scratch.is(receiver));
197 ASSERT(!scratch.is(name));
198 ASSERT(!extra.is(receiver));
200 ASSERT(!extra.is(scratch));
201 ASSERT(!extra2.is(receiver));
203 ASSERT(!extra2.is(scratch));
204 ASSERT(!extra2.is(extra));
212 Counters* counters = masm->isolate()->counters();
213 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
217 __ JumpIfSmi(receiver, &miss);
222 __ Addu(scratch, scratch, at);
223 uint32_t mask = kPrimaryTableSize - 1;
228 __ And(scratch, scratch, Operand(mask));
244 __ Subu(scratch, scratch, at);
245 uint32_t mask2 = kSecondaryTableSize - 1;
247 __ And(scratch, scratch, Operand(mask2));
264 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
269 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
271 Register prototype) {
287 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
288 MacroAssembler* masm,
292 Isolate* isolate = masm->isolate();
295 ASSERT(!prototype.is(at));
296 __ li(at, isolate->global());
297 __ Branch(miss,
ne, prototype, Operand(at));
299 Handle<JSFunction>
function(
302 __ li(prototype, Handle<Map>(function->initial_map()));
311 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
314 Handle<JSObject> holder,
317 index -= holder->map()->inobject_properties();
320 int offset = holder->map()->instance_size() + (index *
kPointerSize);
331 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
336 __ JumpIfSmi(receiver, miss_label);
339 __ GetObjectType(receiver, scratch, scratch);
351 static void GenerateStringCheck(MacroAssembler* masm,
356 Label* non_string_object) {
358 __ JumpIfSmi(receiver, smi, t0);
365 __ Branch(non_string_object,
376 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
381 bool support_wrappers) {
386 GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
387 support_wrappers ? &check_wrapper : miss);
393 if (support_wrappers) {
395 __ bind(&check_wrapper);
400 GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
407 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
412 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
413 __ mov(v0, scratch1);
421 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
422 Handle<JSObject>
object,
424 Handle<Map> transition,
426 Register receiver_reg,
434 LookupResult lookup(masm->isolate());
435 object->Lookup(*name, &lookup);
436 if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
447 __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label,
451 if (object->IsJSGlobalProxy()) {
452 __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
456 if (!transition.is_null() &&
object->GetPrototype()->IsJSObject()) {
458 if (lookup.IsFound()) {
459 holder = lookup.holder();
465 }
while (holder->GetPrototype()->IsJSObject());
469 Label miss_pop, done_check;
470 CheckPrototypes(
object, receiver_reg, Handle<JSObject>(holder), name_reg,
471 scratch1, scratch2, name, &miss_pop);
476 __ bind(&done_check);
482 ASSERT(object->IsJSGlobalProxy() || !
object->IsAccessCheckNeeded());
485 if (!transition.is_null() && (
object->map()->unused_property_fields() == 0)) {
488 __ push(receiver_reg);
489 __ li(a2, Operand(transition));
491 __ TailCallExternalReference(
492 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
498 if (!transition.is_null()) {
500 __ li(scratch1, Operand(transition));
505 __ RecordWriteField(receiver_reg,
518 index -=
object->map()->inobject_properties();
522 int offset =
object->map()->instance_size() + (index *
kPointerSize);
526 __ JumpIfSmi(a0, &exit, scratch1);
530 __ mov(name_reg, a0);
531 __ RecordWriteField(receiver_reg,
546 __ JumpIfSmi(a0, &exit);
550 __ mov(name_reg, a0);
551 __ RecordWriteField(scratch1,
566 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm,
Code::Kind kind) {
569 ? masm->isolate()->builtins()->LoadIC_Miss()
570 : masm->isolate()->builtins()->KeyedLoadIC_Miss();
571 __ Jump(code, RelocInfo::CODE_TARGET);
575 static void GenerateCallFunction(MacroAssembler* masm,
576 Handle<Object>
object,
577 const ParameterCount& arguments,
585 __ JumpIfSmi(a1, miss);
586 __ GetObjectType(a1, a3, a3);
591 if (object->IsGlobalObject()) {
600 __ InvokeFunction(a1, arguments,
JUMP_FUNCTION, NullCallWrapper(), call_kind);
604 static void PushInterceptorArguments(MacroAssembler* masm,
608 Handle<JSObject> holder_obj) {
610 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
611 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
612 Register scratch =
name;
613 __ li(scratch, Operand(interceptor));
614 __ Push(scratch, receiver, holder);
617 __ li(scratch, Operand(ExternalReference::isolate_address()));
622 static void CompileCallLoadPropertyWithInterceptor(
623 MacroAssembler* masm,
627 Handle<JSObject> holder_obj) {
628 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
630 ExternalReference ref =
631 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
633 __ PrepareCEntryArgs(6);
634 __ PrepareCEntryFunction(ref);
641 static const int kFastApiCallArguments = 4;
648 static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
651 for (
int i = 0; i < kFastApiCallArguments; i++) {
658 static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
659 __ Drop(kFastApiCallArguments);
663 static void GenerateFastApiDirectCall(MacroAssembler* masm,
664 const CallOptimization& optimization,
677 Handle<JSFunction>
function = optimization.constant_function();
678 __ LoadHeapObject(t1,
function);
682 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
683 Handle<Object> call_data(api_call_info->data());
684 if (masm->isolate()->heap()->InNewSpace(*call_data)) {
685 __ li(a0, api_call_info);
688 __ li(t2, call_data);
691 __ li(t3, Operand(ExternalReference::isolate_address()));
702 const int kApiStackSpace = 4;
704 FrameScope frame_scope(masm, StackFrame::MANUAL);
705 __ EnterExitFrame(
false, kApiStackSpace);
722 __ li(t0, Operand(argc));
727 const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
728 Address function_address = v8::ToCData<Address>(api_call_info->callback());
729 ApiFunction fun(function_address);
730 ExternalReference ref =
731 ExternalReference(&fun,
732 ExternalReference::DIRECT_API_CALL,
734 AllowExternalCallThatCantCauseGC scope(masm);
735 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
740 CallInterceptorCompiler(StubCompiler* stub_compiler,
741 const ParameterCount& arguments,
744 : stub_compiler_(stub_compiler),
745 arguments_(arguments),
747 extra_ic_state_(extra_ic_state) {}
749 void Compile(MacroAssembler* masm,
750 Handle<JSObject>
object,
751 Handle<JSObject> holder,
753 LookupResult* lookup,
759 ASSERT(holder->HasNamedInterceptor());
760 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
763 __ JumpIfSmi(receiver, miss);
764 CallOptimization optimization(lookup);
765 if (optimization.is_constant_call()) {
766 CompileCacheable(masm,
object, receiver, scratch1, scratch2, scratch3,
767 holder, lookup, name, optimization, miss);
769 CompileRegular(masm,
object, receiver, scratch1, scratch2, scratch3,
775 void CompileCacheable(MacroAssembler* masm,
776 Handle<JSObject>
object,
781 Handle<JSObject> interceptor_holder,
782 LookupResult* lookup,
784 const CallOptimization& optimization,
786 ASSERT(optimization.is_constant_call());
787 ASSERT(!lookup->holder()->IsGlobalObject());
788 Counters* counters = masm->isolate()->counters();
791 bool can_do_fast_api_call =
false;
792 if (optimization.is_simple_api_call() &&
793 !lookup->holder()->IsGlobalObject()) {
794 depth1 = optimization.GetPrototypeDepthOfExpectedType(
795 object, interceptor_holder);
797 depth2 = optimization.GetPrototypeDepthOfExpectedType(
798 interceptor_holder, Handle<JSObject>(lookup->holder()));
800 can_do_fast_api_call =
804 __ IncrementCounter(counters->call_const_interceptor(), 1,
807 if (can_do_fast_api_call) {
808 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
810 ReserveSpaceForFastApiCall(masm, scratch1);
816 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
818 stub_compiler_->CheckPrototypes(
object, receiver, interceptor_holder,
819 scratch1, scratch2, scratch3,
824 Label regular_invoke;
825 LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
833 if (*interceptor_holder != lookup->holder()) {
834 stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
835 Handle<JSObject>(lookup->holder()),
836 scratch1, scratch2, scratch3,
847 if (can_do_fast_api_call) {
848 GenerateFastApiDirectCall(masm, optimization, arguments_.immediate());
853 __ InvokeFunction(optimization.constant_function(), arguments_,
858 if (can_do_fast_api_call) {
859 __ bind(&miss_cleanup);
860 FreeSpaceForFastApiCall(masm);
861 __ Branch(miss_label);
865 __ bind(®ular_invoke);
866 if (can_do_fast_api_call) {
867 FreeSpaceForFastApiCall(masm);
871 void CompileRegular(MacroAssembler* masm,
872 Handle<JSObject>
object,
878 Handle<JSObject> interceptor_holder,
881 stub_compiler_->CheckPrototypes(
object, receiver, interceptor_holder,
882 scratch1, scratch2, scratch3,
890 PushInterceptorArguments(masm, receiver, holder,
name_, interceptor_holder);
892 __ CallExternalReference(
894 IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
902 void LoadWithInterceptor(MacroAssembler* masm,
905 Handle<JSObject> holder_obj,
907 Label* interceptor_succeeded) {
912 CompileCallLoadPropertyWithInterceptor(masm,
921 __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
922 __ Branch(interceptor_succeeded,
ne, v0, Operand(scratch));
925 StubCompiler* stub_compiler_;
926 const ParameterCount& arguments_;
936 static void GenerateCheckPropertyCell(MacroAssembler* masm,
937 Handle<GlobalObject> global,
941 Handle<JSGlobalPropertyCell> cell =
943 ASSERT(cell->value()->IsTheHole());
944 __ li(scratch, Operand(cell));
947 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
948 __ Branch(miss,
ne, scratch, Operand(at));
954 static void GenerateCheckPropertyCells(MacroAssembler* masm,
955 Handle<JSObject>
object,
956 Handle<JSObject> holder,
960 Handle<JSObject> current = object;
961 while (!current.is_identical_to(holder)) {
962 if (current->IsGlobalObject()) {
963 GenerateCheckPropertyCell(masm,
964 Handle<GlobalObject>::cast(current),
969 current = Handle<JSObject>(
JSObject::cast(current->GetPrototype()));
977 static void StoreIntAsFloat(MacroAssembler* masm,
985 CpuFeatures::Scope scope(
FPU);
988 __ sll(scratch1, wordoffset, 2);
989 __ addu(scratch1, dst, scratch1);
994 Label not_special, done;
1002 __ subu(scratch1, zero_reg, ival);
1003 __ Movn(ival, scratch1, fval);
1008 __ Branch(¬_special,
gt, ival, Operand(1));
1011 static const uint32_t exponent_word_for_1 =
1014 __ Xor(scratch1, ival, Operand(1));
1015 __ li(scratch2, exponent_word_for_1);
1016 __ or_(scratch2, fval, scratch2);
1017 __ Movz(fval, scratch2, scratch1);
1020 __ bind(¬_special);
1023 Register zeros = scratch2;
1024 __ Clz(zeros, ival);
1028 __ subu(scratch1, scratch1, zeros);
1030 __ sll(scratch1, scratch1, kBinary32ExponentShift);
1031 __ or_(fval, fval, scratch1);
1034 __ Addu(zeros, zeros, Operand(1));
1036 __ sllv(ival, ival, zeros);
1039 __ or_(fval, fval, scratch1);
1043 __ sll(scratch1, wordoffset, 2);
1044 __ addu(scratch1, dst, scratch1);
1055 static void GenerateUInt2Double(MacroAssembler* masm,
1059 int leading_zeroes) {
1060 const int meaningful_bits =
kBitsPerInt - leading_zeroes - 1;
1063 const int mantissa_shift_for_hi_word =
1066 const int mantissa_shift_for_lo_word =
1070 if (mantissa_shift_for_hi_word > 0) {
1071 __ sll(loword, hiword, mantissa_shift_for_lo_word);
1072 __ srl(hiword, hiword, mantissa_shift_for_hi_word);
1073 __ or_(hiword, scratch, hiword);
1075 __ mov(loword, zero_reg);
1076 __ sll(hiword, hiword, mantissa_shift_for_hi_word);
1077 __ or_(hiword, scratch, hiword);
1082 if (!(biased_exponent & 1)) {
1084 __ nor(scratch, scratch, scratch);
1085 __ and_(hiword, hiword, scratch);
1091 #define __ ACCESS_MASM(masm())
1094 Register StubCompiler::CheckPrototypes(Handle<JSObject>
object,
1095 Register object_reg,
1096 Handle<JSObject> holder,
1097 Register holder_reg,
1100 Handle<String> name,
1104 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1105 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1106 && !scratch2.is(scratch1));
1109 Register reg = object_reg;
1112 if (save_at_depth == depth) {
1118 Handle<JSObject> current = object;
1119 while (!current.is_identical_to(holder)) {
1124 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1126 Handle<JSObject> prototype(
JSObject::cast(current->GetPrototype()));
1127 if (!current->HasFastProperties() &&
1128 !current->IsJSGlobalObject() &&
1129 !current->IsJSGlobalProxy()) {
1130 if (!name->IsSymbol()) {
1131 name = factory()->LookupSymbol(name);
1133 ASSERT(current->property_dictionary()->FindEntry(*name) ==
1136 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
1137 scratch1, scratch2);
1143 Handle<Map> current_map(current->map());
1149 if (current->IsJSGlobalProxy()) {
1150 __ CheckAccessGlobalProxy(reg, scratch2, miss);
1154 if (heap()->InNewSpace(*prototype)) {
1160 __ li(reg, Operand(prototype));
1164 if (save_at_depth == depth) {
1169 current = prototype;
1173 LOG(masm()->isolate(), IntEvent(
"check-maps-depth", depth + 1));
1176 __ CheckMap(reg, scratch1, Handle<Map>(current->map()), miss,
1180 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1181 if (holder->IsJSGlobalProxy()) {
1182 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1188 GenerateCheckPropertyCells(masm(),
object, holder, name, scratch1, miss);
1195 void StubCompiler::GenerateLoadField(Handle<JSObject>
object,
1196 Handle<JSObject> holder,
1202 Handle<String> name,
1205 __ JumpIfSmi(receiver, miss);
1208 Register reg = CheckPrototypes(
1209 object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1210 GenerateFastPropertyLoad(masm(), v0, reg, holder, index);
1215 void StubCompiler::GenerateLoadConstant(Handle<JSObject>
object,
1216 Handle<JSObject> holder,
1221 Handle<JSFunction> value,
1222 Handle<String> name,
1225 __ JumpIfSmi(receiver, miss, scratch1);
1228 CheckPrototypes(
object, receiver, holder,
1229 scratch1, scratch2, scratch3, name, miss);
1232 __ LoadHeapObject(v0, value);
1237 void StubCompiler::GenerateLoadCallback(Handle<JSObject>
object,
1238 Handle<JSObject> holder,
1244 Handle<AccessorInfo> callback,
1245 Handle<String> name,
1248 __ JumpIfSmi(receiver, miss, scratch1);
1251 Register reg = CheckPrototypes(
object, receiver, holder, scratch1,
1252 scratch2, scratch3, name, miss);
1257 __ mov(scratch2,
sp);
1258 if (heap()->InNewSpace(callback->data())) {
1259 __ li(scratch3, callback);
1262 __ li(scratch3, Handle<Object>(callback->data()));
1267 __ li(scratch3, Operand(ExternalReference::isolate_address()));
1271 __ mov(a2, scratch2);
1279 const int kApiStackSpace = 1;
1280 FrameScope frame_scope(masm(), StackFrame::MANUAL);
1281 __ EnterExitFrame(
false, kApiStackSpace);
1289 const int kStackUnwindSpace = 5;
1290 Address getter_address = v8::ToCData<Address>(callback->getter());
1291 ApiFunction fun(getter_address);
1292 ExternalReference ref =
1293 ExternalReference(&fun,
1294 ExternalReference::DIRECT_GETTER_CALL,
1296 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
1300 void StubCompiler::GenerateLoadInterceptor(Handle<JSObject>
object,
1301 Handle<JSObject> interceptor_holder,
1302 LookupResult* lookup,
1308 Handle<String> name,
1310 ASSERT(interceptor_holder->HasNamedInterceptor());
1311 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1314 __ JumpIfSmi(receiver, miss);
1319 bool compile_followup_inline =
false;
1320 if (lookup->IsFound() && lookup->IsCacheable()) {
1321 if (lookup->type() ==
FIELD) {
1322 compile_followup_inline =
true;
1323 }
else if (lookup->type() ==
CALLBACKS &&
1324 lookup->GetCallbackObject()->IsAccessorInfo()) {
1326 compile_followup_inline = callback->getter() !=
NULL &&
1327 callback->IsCompatibleReceiver(*
object);
1331 if (compile_followup_inline) {
1335 Register holder_reg = CheckPrototypes(
object, receiver, interceptor_holder,
1336 scratch1, scratch2, scratch3,
1338 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1344 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1345 bool must_preserve_receiver_reg = !receiver.is(holder_reg) &&
1346 (lookup->type() ==
CALLBACKS || must_perfrom_prototype_check);
1352 if (must_preserve_receiver_reg) {
1353 __ Push(receiver, holder_reg, name_reg);
1355 __ Push(holder_reg, name_reg);
1360 CompileCallLoadPropertyWithInterceptor(masm(),
1364 interceptor_holder);
1367 Label interceptor_failed;
1368 __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1369 __ Branch(&interceptor_failed,
eq, v0, Operand(scratch1));
1370 frame_scope.GenerateLeaveFrame();
1373 __ bind(&interceptor_failed);
1376 if (must_preserve_receiver_reg) {
1383 if (must_perfrom_prototype_check) {
1384 holder_reg = CheckPrototypes(interceptor_holder,
1386 Handle<JSObject>(lookup->holder()),
1394 if (lookup->type() ==
FIELD) {
1397 GenerateFastPropertyLoad(masm(), v0, holder_reg,
1398 Handle<JSObject>(lookup->holder()),
1399 lookup->GetFieldIndex());
1405 Handle<AccessorInfo> callback(
1412 __ li(scratch2, callback);
1414 __ Push(receiver, holder_reg);
1417 __ li(scratch1, Operand(ExternalReference::isolate_address()));
1418 __ Push(scratch3, scratch1, scratch2, name_reg);
1420 ExternalReference ref =
1421 ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1423 __ TailCallExternalReference(ref, 6, 1);
1428 Register holder_reg = CheckPrototypes(
object, receiver, interceptor_holder,
1429 scratch1, scratch2, scratch3,
1431 PushInterceptorArguments(masm(), receiver, holder_reg,
1432 name_reg, interceptor_holder);
1434 ExternalReference ref = ExternalReference(
1435 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate());
1436 __ TailCallExternalReference(ref, 6, 1);
1441 void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
1443 __ Branch(miss,
ne, a2, Operand(name));
1448 void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject>
object,
1449 Handle<JSObject> holder,
1450 Handle<String> name,
1452 ASSERT(holder->IsGlobalObject());
1455 const int argc = arguments().immediate();
1461 __ JumpIfSmi(a0, miss);
1462 CheckPrototypes(
object, a0, holder, a3, a1, t0, name, miss);
1466 void CallStubCompiler::GenerateLoadFunctionFromCell(
1467 Handle<JSGlobalPropertyCell> cell,
1468 Handle<JSFunction>
function,
1471 __ li(a3, Operand(cell));
1475 if (heap()->InNewSpace(*
function)) {
1481 __ JumpIfSmi(a1, miss);
1482 __ GetObjectType(a1, a3, a3);
1486 __ li(a3, Handle<SharedFunctionInfo>(function->shared()));
1488 __ Branch(miss,
ne, t0, Operand(a3));
1490 __ Branch(miss,
ne, a1, Operand(
function));
1495 void CallStubCompiler::GenerateMissBranch() {
1497 isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1500 __ Jump(code, RelocInfo::CODE_TARGET);
1505 Handle<JSObject> holder,
1507 Handle<String> name) {
1514 GenerateNameCheck(name, &miss);
1516 const int argc = arguments().immediate();
1521 __ JumpIfSmi(a0, &miss, t0);
1524 Register reg = CheckPrototypes(
object, a0, holder, a1, a3, t0, name, &miss);
1525 GenerateFastPropertyLoad(masm(), a1, reg, holder, index);
1527 GenerateCallFunction(masm(),
object, arguments(), &miss, extra_state_);
1531 GenerateMissBranch();
1534 return GetCode(
FIELD, name);
1538 Handle<Code> CallStubCompiler::CompileArrayPushCall(
1539 Handle<Object>
object,
1540 Handle<JSObject> holder,
1541 Handle<JSGlobalPropertyCell> cell,
1542 Handle<JSFunction>
function,
1543 Handle<String> name) {
1553 if (!object->IsJSArray() || !cell.is_null())
return Handle<Code>::null();
1557 GenerateNameCheck(name, &miss);
1559 Register receiver = a1;
1562 const int argc = arguments().immediate();
1566 __ JumpIfSmi(receiver, &miss);
1569 CheckPrototypes(Handle<JSObject>::cast(
object), receiver, holder, a3, v0, t0,
1580 Label attempt_to_grow_elements;
1582 Register elements = t2;
1583 Register end_elements = t1;
1588 __ CheckMap(elements,
1590 Heap::kFixedArrayMapRootIndex,
1604 __ Branch(&attempt_to_grow_elements,
gt, v0, Operand(t0));
1607 Label with_write_barrier;
1609 __ JumpIfNotSmi(t0, &with_write_barrier);
1618 __ Addu(end_elements, elements, end_elements);
1619 const int kEndElementsOffset =
1621 __ Addu(end_elements, end_elements, kEndElementsOffset);
1628 __ bind(&with_write_barrier);
1632 if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
1633 Label fast_object, not_fast_object;
1634 __ CheckFastObjectElements(a3, t3, ¬_fast_object);
1635 __ jmp(&fast_object);
1637 __ bind(¬_fast_object);
1638 __ CheckFastSmiElements(a3, t3, &call_builtin);
1641 Label try_holey_map;
1647 __ mov(a2, receiver);
1650 __ jmp(&fast_object);
1652 __ bind(&try_holey_map);
1658 __ mov(a2, receiver);
1661 __ bind(&fast_object);
1663 __ CheckFastObjectElements(a3, a3, &call_builtin);
1673 __ Addu(end_elements, elements, end_elements);
1674 __ Addu(end_elements, end_elements, kEndElementsOffset);
1677 __ RecordWrite(elements,
1687 __ bind(&attempt_to_grow_elements);
1691 if (!FLAG_inline_new) {
1692 __ Branch(&call_builtin);
1698 Label no_fast_elements_check;
1699 __ JumpIfSmi(a2, &no_fast_elements_check);
1701 __ CheckFastObjectElements(t3, t3, &call_builtin);
1702 __ bind(&no_fast_elements_check);
1704 ExternalReference new_space_allocation_top =
1705 ExternalReference::new_space_allocation_top_address(
1707 ExternalReference new_space_allocation_limit =
1708 ExternalReference::new_space_allocation_limit_address(
1711 const int kAllocationDelta = 4;
1714 __ Addu(end_elements, elements, end_elements);
1715 __ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
1716 __ li(t3, Operand(new_space_allocation_top));
1718 __ Branch(&call_builtin,
ne, end_elements, Operand(a3));
1720 __ li(t5, Operand(new_space_allocation_limit));
1722 __ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize));
1723 __ Branch(&call_builtin,
hi, a3, Operand(t5));
1731 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
1732 for (
int i = 1; i < kAllocationDelta; i++) {
1745 __ bind(&call_builtin);
1746 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1754 GenerateMissBranch();
1757 return GetCode(
function);
1761 Handle<Code> CallStubCompiler::CompileArrayPopCall(
1762 Handle<Object>
object,
1763 Handle<JSObject> holder,
1764 Handle<JSGlobalPropertyCell> cell,
1765 Handle<JSFunction>
function,
1766 Handle<String> name) {
1776 if (!object->IsJSArray() || !cell.is_null())
return Handle<Code>::null();
1778 Label miss, return_undefined, call_builtin;
1779 Register receiver = a1;
1780 Register elements = a3;
1781 GenerateNameCheck(name, &miss);
1784 const int argc = arguments().immediate();
1787 __ JumpIfSmi(receiver, &miss);
1790 CheckPrototypes(Handle<JSObject>::cast(
object), receiver, holder, elements,
1791 t0, v0, name, &miss);
1797 __ CheckMap(elements,
1799 Heap::kFixedArrayMapRootIndex,
1806 __ Branch(&return_undefined,
lt, t0, Operand(zero_reg));
1809 __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
1815 __ Addu(elements, elements, t1);
1817 __ Branch(&call_builtin,
eq, v0, Operand(t2));
1827 __ bind(&return_undefined);
1828 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
1832 __ bind(&call_builtin);
1833 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1840 GenerateMissBranch();
1843 return GetCode(
function);
1847 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
1848 Handle<Object>
object,
1849 Handle<JSObject> holder,
1850 Handle<JSGlobalPropertyCell> cell,
1851 Handle<JSFunction>
function,
1852 Handle<String> name) {
1862 if (!object->IsString() || !cell.is_null())
return Handle<Code>::null();
1864 const int argc = arguments().immediate();
1867 Label index_out_of_range;
1869 Label* index_out_of_range_label = &index_out_of_range;
1874 index_out_of_range_label = &miss;
1877 GenerateNameCheck(name, &name_miss);
1880 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1884 ASSERT(!
object.is_identical_to(holder));
1885 CheckPrototypes(Handle<JSObject>(
JSObject::cast(object->GetPrototype())),
1886 v0, holder, a1, a3, t0,
name, &miss);
1888 Register receiver = a1;
1889 Register index = t1;
1890 Register result = v0;
1895 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1898 StringCharCodeAtGenerator generator(receiver,
1903 index_out_of_range_label,
1905 generator.GenerateFast(masm());
1909 StubRuntimeCallHelper call_helper;
1910 generator.GenerateSlow(masm(), call_helper);
1912 if (index_out_of_range.is_linked()) {
1913 __ bind(&index_out_of_range);
1914 __ LoadRoot(v0, Heap::kNanValueRootIndex);
1922 __ bind(&name_miss);
1923 GenerateMissBranch();
1926 return GetCode(
function);
1930 Handle<Code> CallStubCompiler::CompileStringCharAtCall(
1931 Handle<Object>
object,
1932 Handle<JSObject> holder,
1933 Handle<JSGlobalPropertyCell> cell,
1934 Handle<JSFunction>
function,
1935 Handle<String> name) {
1945 if (!object->IsString() || !cell.is_null())
return Handle<Code>::null();
1947 const int argc = arguments().immediate();
1950 Label index_out_of_range;
1951 Label* index_out_of_range_label = &index_out_of_range;
1955 index_out_of_range_label = &miss;
1957 GenerateNameCheck(name, &name_miss);
1960 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1964 ASSERT(!
object.is_identical_to(holder));
1965 CheckPrototypes(Handle<JSObject>(
JSObject::cast(object->GetPrototype())),
1966 v0, holder, a1, a3, t0,
name, &miss);
1968 Register receiver = v0;
1969 Register index = t1;
1970 Register scratch = a3;
1971 Register result = v0;
1976 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1979 StringCharAtGenerator generator(receiver,
1985 index_out_of_range_label,
1987 generator.GenerateFast(masm());
1991 StubRuntimeCallHelper call_helper;
1992 generator.GenerateSlow(masm(), call_helper);
1994 if (index_out_of_range.is_linked()) {
1995 __ bind(&index_out_of_range);
1996 __ LoadRoot(v0, Heap::kEmptyStringRootIndex);
2004 __ bind(&name_miss);
2005 GenerateMissBranch();
2008 return GetCode(
function);
2012 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
2013 Handle<Object>
object,
2014 Handle<JSObject> holder,
2015 Handle<JSGlobalPropertyCell> cell,
2016 Handle<JSFunction>
function,
2017 Handle<String> name) {
2026 const int argc = arguments().immediate();
2030 if (!object->IsJSObject() || argc != 1)
return Handle<Code>::null();
2033 GenerateNameCheck(name, &miss);
2035 if (cell.is_null()) {
2039 __ JumpIfSmi(a1, &miss);
2041 CheckPrototypes(Handle<JSObject>::cast(
object), a1, holder, v0, a3, t0,
2044 ASSERT(cell->value() == *
function);
2045 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(
object), holder, name,
2047 GenerateLoadFunctionFromCell(cell,
function, &miss);
2057 __ JumpIfNotSmi(code, &slow);
2062 StringCharFromCodeGenerator generator(code, v0);
2063 generator.GenerateFast(masm());
2067 StubRuntimeCallHelper call_helper;
2068 generator.GenerateSlow(masm(), call_helper);
2078 GenerateMissBranch();
2081 return cell.is_null() ? GetCode(
function) : GetCode(
NORMAL, name);
2085 Handle<Code> CallStubCompiler::CompileMathFloorCall(
2086 Handle<Object>
object,
2087 Handle<JSObject> holder,
2088 Handle<JSGlobalPropertyCell> cell,
2089 Handle<JSFunction>
function,
2090 Handle<String> name) {
2103 CpuFeatures::Scope scope_fpu(
FPU);
2104 const int argc = arguments().immediate();
2107 if (!object->IsJSObject() || argc != 1)
return Handle<Code>::null();
2110 GenerateNameCheck(name, &miss);
2112 if (cell.is_null()) {
2115 __ JumpIfSmi(a1, &miss);
2116 CheckPrototypes(Handle<JSObject>::cast(
object), a1, holder, a0, a3, t0,
2119 ASSERT(cell->value() == *
function);
2120 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(
object), holder, name,
2122 GenerateLoadFunctionFromCell(cell,
function, &miss);
2131 __ Drop(argc + 1,
eq, t0, Operand(zero_reg));
2132 __ Ret(
eq, t0, Operand(zero_reg));
2136 Label wont_fit_smi, no_fpu_error, restore_fcsr_and_return;
2154 __ srl(t2, t2, HeapNumber::kMantissaBitsInTopWord);
2159 __ Branch(&no_fpu_error,
eq, t5, Operand(zero_reg));
2165 >> HeapNumber::kMantissaBitsInTopWord));
2166 __ Branch(&restore_fcsr_and_return,
eq, t3, Operand(zero_reg));
2170 __ Branch(&restore_fcsr_and_return,
ge, t3,
2172 __ Branch(&wont_fit_smi);
2174 __ bind(&no_fpu_error);
2178 __ Addu(a1, v0, Operand(0x40000000));
2179 __ Branch(&wont_fit_smi,
lt, a1, Operand(zero_reg));
2185 __ Branch(&restore_fcsr_and_return,
ne, v0, Operand(zero_reg));
2190 __ Branch(&restore_fcsr_and_return,
eq, t0, Operand(zero_reg));
2193 __ bind(&restore_fcsr_and_return);
2200 __ bind(&wont_fit_smi);
2212 GenerateMissBranch();
2215 return cell.is_null() ? GetCode(
function) : GetCode(
NORMAL, name);
2219 Handle<Code> CallStubCompiler::CompileMathAbsCall(
2220 Handle<Object>
object,
2221 Handle<JSObject> holder,
2222 Handle<JSGlobalPropertyCell> cell,
2223 Handle<JSFunction>
function,
2224 Handle<String> name) {
2233 const int argc = arguments().immediate();
2236 if (!object->IsJSObject() || argc != 1)
return Handle<Code>::null();
2240 GenerateNameCheck(name, &miss);
2241 if (cell.is_null()) {
2244 __ JumpIfSmi(a1, &miss);
2245 CheckPrototypes(Handle<JSObject>::cast(
object), a1, holder, v0, a3, t0,
2248 ASSERT(cell->value() == *
function);
2249 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(
object), holder, name,
2251 GenerateLoadFunctionFromCell(cell,
function, &miss);
2260 __ JumpIfNotSmi(v0, ¬_smi);
2268 __ Subu(v0, a1, t0);
2273 __ Branch(&slow,
lt, v0, Operand(zero_reg));
2287 Label negative_sign;
2289 __ Branch(&negative_sign,
ne, t0, Operand(zero_reg));
2295 __ bind(&negative_sign);
2298 __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex);
2299 __ AllocateHeapNumber(v0, t0, t1, t2, &slow);
2313 GenerateMissBranch();
2316 return cell.is_null() ? GetCode(
function) : GetCode(
NORMAL, name);
2320 Handle<Code> CallStubCompiler::CompileFastApiCall(
2321 const CallOptimization& optimization,
2322 Handle<Object>
object,
2323 Handle<JSObject> holder,
2324 Handle<JSGlobalPropertyCell> cell,
2325 Handle<JSFunction>
function,
2326 Handle<String> name) {
2328 Counters* counters = isolate()->counters();
2330 ASSERT(optimization.is_simple_api_call());
2333 if (object->IsGlobalObject())
return Handle<Code>::null();
2334 if (!cell.is_null())
return Handle<Code>::null();
2335 if (!object->IsJSObject())
return Handle<Code>::null();
2336 int depth = optimization.GetPrototypeDepthOfExpectedType(
2337 Handle<JSObject>::cast(
object), holder);
2340 Label miss, miss_before_stack_reserved;
2342 GenerateNameCheck(name, &miss_before_stack_reserved);
2345 const int argc = arguments().immediate();
2349 __ JumpIfSmi(a1, &miss_before_stack_reserved);
2351 __ IncrementCounter(counters->call_const(), 1, a0, a3);
2352 __ IncrementCounter(counters->call_const_fast_api(), 1, a0, a3);
2354 ReserveSpaceForFastApiCall(masm(), a0);
2357 CheckPrototypes(Handle<JSObject>::cast(
object), a1, holder, a0, a3, t0, name,
2360 GenerateFastApiDirectCall(masm(), optimization, argc);
2363 FreeSpaceForFastApiCall(masm());
2365 __ bind(&miss_before_stack_reserved);
2366 GenerateMissBranch();
2369 return GetCode(
function);
2374 Handle<JSObject> holder,
2375 Handle<JSFunction>
function,
2376 Handle<String> name,
2383 Handle<Code> code = CompileCustomCall(
object, holder,
2384 Handle<JSGlobalPropertyCell>::null(),
2387 if (!code.is_null())
return code;
2392 GenerateNameCheck(name, &miss);
2395 const int argc = arguments().immediate();
2400 __ JumpIfSmi(a1, &miss);
2408 __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2412 CheckPrototypes(Handle<JSObject>::cast(
object), a1, holder, a0, a3, t0,
2417 if (object->IsGlobalObject()) {
2424 if (function->IsBuiltin() || !
function->shared()->is_classic_mode()) {
2426 __ GetObjectType(a1, a3, a3);
2429 GenerateDirectLoadGlobalFunctionPrototype(
2433 a0, holder, a3, a1, t0,
name, &miss);
2442 if (function->IsBuiltin() || !
function->shared()->is_classic_mode()) {
2445 __ JumpIfSmi(a1, &fast);
2446 __ GetObjectType(a1, a0, a0);
2450 GenerateDirectLoadGlobalFunctionPrototype(
2454 a0, holder, a3, a1, t0,
name, &miss);
2463 if (function->IsBuiltin() || !
function->shared()->is_classic_mode()) {
2466 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
2467 __ Branch(&fast,
eq, a1, Operand(t0));
2468 __ LoadRoot(t0, Heap::kFalseValueRootIndex);
2469 __ Branch(&miss,
ne, a1, Operand(t0));
2472 GenerateDirectLoadGlobalFunctionPrototype(
2476 a0, holder, a3, a1, t0,
name, &miss);
2489 function, arguments(),
JUMP_FUNCTION, NullCallWrapper(), call_kind);
2494 GenerateMissBranch();
2497 return GetCode(
function);
2502 Handle<JSObject> holder,
2503 Handle<String> name) {
2511 GenerateNameCheck(name, &miss);
2514 const int argc = arguments().immediate();
2515 LookupResult lookup(isolate());
2516 LookupPostInterceptor(holder, name, &lookup);
2521 CallInterceptorCompiler compiler(
this, arguments(), a2, extra_state_);
2522 compiler.Compile(masm(),
object, holder, name, &lookup, a1, a3, t0, a0,
2530 GenerateCallFunction(masm(),
object, arguments(), &miss, extra_state_);
2534 GenerateMissBranch();
2542 Handle<JSObject>
object,
2543 Handle<GlobalObject> holder,
2544 Handle<JSGlobalPropertyCell> cell,
2545 Handle<JSFunction>
function,
2546 Handle<String> name) {
2553 Handle<Code> code = CompileCustomCall(
object, holder, cell,
function, name);
2555 if (!code.is_null())
return code;
2559 GenerateNameCheck(name, &miss);
2562 const int argc = arguments().immediate();
2563 GenerateGlobalReceiverCheck(
object, holder, name, &miss);
2564 GenerateLoadFunctionFromCell(cell,
function, &miss);
2568 if (object->IsGlobalObject()) {
2577 Counters* counters = masm()->isolate()->counters();
2578 __ IncrementCounter(counters->call_global_inline(), 1, a3, t0);
2579 ParameterCount expected(function->shared()->formal_parameter_count());
2588 NullCallWrapper(), call_kind);
2592 __ IncrementCounter(counters->call_global_inline_miss(), 1, a1, a3);
2593 GenerateMissBranch();
2596 return GetCode(
NORMAL, name);
2602 Handle<Map> transition,
2603 Handle<String> name) {
2613 GenerateStoreField(masm(),
2621 __ li(a2, Operand(Handle<String>(name)));
2622 Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2623 __ Jump(ic, RelocInfo::CODE_TARGET);
2631 Handle<JSObject>
object,
2632 Handle<AccessorInfo> callback,
2633 Handle<String> name) {
2643 __ CheckMap(a1, a3, Handle<Map>(object->map()), &miss,
2647 if (object->IsJSGlobalProxy()) {
2648 __ CheckAccessGlobalProxy(a1, a3, &miss);
2653 ASSERT(object->IsJSGlobalProxy() || !
object->IsAccessCheckNeeded());
2656 __ li(a3, Operand(callback));
2657 __ Push(a3, a2, a0);
2660 ExternalReference store_callback_property =
2661 ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2663 __ TailCallExternalReference(store_callback_property, 4, 1);
2667 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2668 __ Jump(ic, RelocInfo::CODE_TARGET);
2676 Handle<JSObject> receiver,
2677 Handle<JSFunction> setter,
2678 Handle<String> name) {
2688 __ CheckMap(a1, a3, Handle<Map>(receiver->map()), &miss,
DO_SMI_CHECK,
2700 ParameterCount actual(1);
2713 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2714 __ Jump(ic, RelocInfo::CODE_TARGET);
2722 Handle<JSObject> receiver,
2723 Handle<String> name) {
2733 __ CheckMap(a1, a3, Handle<Map>(receiver->map()), &miss,
2737 if (receiver->IsJSGlobalProxy()) {
2738 __ CheckAccessGlobalProxy(a1, a3, &miss);
2743 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2745 __ Push(a1, a2, a0);
2751 ExternalReference store_ic_property =
2752 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2754 __ TailCallExternalReference(store_ic_property, 4, 1);
2758 Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2759 __ Jump(ic, RelocInfo::CODE_TARGET);
2767 Handle<GlobalObject>
object,
2768 Handle<JSGlobalPropertyCell> cell,
2769 Handle<String> name) {
2780 __ Branch(&miss,
ne, a3, Operand(Handle<Map>(object->map())));
2786 __ li(t0, Operand(cell));
2787 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
2789 __ Branch(&miss,
eq, t1, Operand(t2));
2796 Counters* counters = masm()->isolate()->counters();
2797 __ IncrementCounter(counters->named_store_global_inline(), 1, a1, a3);
2802 __ IncrementCounter(counters->named_store_global_inline_miss(), 1, a1, a3);
2803 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2804 __ Jump(ic, RelocInfo::CODE_TARGET);
2807 return GetCode(
NORMAL, name);
2812 Handle<JSObject>
object,
2813 Handle<JSObject> last) {
2821 __ JumpIfSmi(a0, &miss);
2824 CheckPrototypes(
object, a0, last, a3, a1, t0, name, &miss);
2828 if (last->IsGlobalObject()) {
2829 GenerateCheckPropertyCell(
2830 masm(), Handle<GlobalObject>::cast(last), name, a1, &miss);
2834 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2841 return GetCode(
NONEXISTENT, factory()->empty_string());
2846 Handle<JSObject> holder,
2848 Handle<String> name) {
2858 GenerateLoadField(
object, holder, v0, a3, a1, t0, index, name, &miss);
2863 return GetCode(
FIELD, name);
2868 Handle<String> name,
2869 Handle<JSObject>
object,
2870 Handle<JSObject> holder,
2871 Handle<AccessorInfo> callback) {
2878 GenerateLoadCallback(
object, holder, a0, a2, a3, a1, t0, callback, name,
2889 Handle<String> name,
2890 Handle<JSObject> receiver,
2891 Handle<JSObject> holder,
2892 Handle<JSFunction> getter) {
2901 __ JumpIfSmi(a0, &miss);
2902 CheckPrototypes(receiver, a0, holder, a3, t0, a1, name, &miss);
2909 ParameterCount actual(0);
2927 Handle<JSObject> holder,
2928 Handle<JSFunction> value,
2929 Handle<String> name) {
2937 GenerateLoadConstant(
object, holder, a0, a3, a1, t0, value, name, &miss);
2947 Handle<JSObject> holder,
2948 Handle<String> name) {
2957 LookupResult lookup(isolate());
2958 LookupPostInterceptor(holder, name, &lookup);
2959 GenerateLoadInterceptor(
object, holder, &lookup, a0, a2, a3, a1, t0, name,
2970 Handle<JSObject>
object,
2971 Handle<GlobalObject> holder,
2972 Handle<JSGlobalPropertyCell> cell,
2973 Handle<String> name,
2974 bool is_dont_delete) {
2983 __ JumpIfSmi(a0, &miss);
2984 CheckPrototypes(
object, a0, holder, a3, t0, a1, name, &miss);
2987 __ li(a3, Operand(cell));
2991 if (!is_dont_delete) {
2992 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2993 __ Branch(&miss,
eq, t0, Operand(at));
2997 Counters* counters = masm()->isolate()->counters();
2998 __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
3002 __ IncrementCounter(counters->named_load_global_stub_miss(), 1, a1, a3);
3006 return GetCode(
NORMAL, name);
3011 Handle<JSObject> receiver,
3012 Handle<JSObject> holder,
3022 __ Branch(&miss,
ne, a0, Operand(name));
3024 GenerateLoadField(receiver, holder, a1, a2, a3, t0, index, name, &miss);
3028 return GetCode(
FIELD, name);
3033 Handle<String> name,
3034 Handle<JSObject> receiver,
3035 Handle<JSObject> holder,
3036 Handle<AccessorInfo> callback) {
3045 __ Branch(&miss,
ne, a0, Operand(name));
3047 GenerateLoadCallback(receiver, holder, a1, a0, a2, a3, t0, callback, name,
3057 Handle<String> name,
3058 Handle<JSObject> receiver,
3059 Handle<JSObject> holder,
3060 Handle<JSFunction> value) {
3069 __ Branch(&miss,
ne, a0, Operand(name));
3071 GenerateLoadConstant(receiver, holder, a1, a2, a3, t0, value, name, &miss);
3081 Handle<JSObject> receiver,
3082 Handle<JSObject> holder,
3083 Handle<String> name) {
3092 __ Branch(&miss,
ne, a0, Operand(name));
3094 LookupResult lookup(isolate());
3095 LookupPostInterceptor(holder, name, &lookup);
3096 GenerateLoadInterceptor(receiver, holder, &lookup, a1, a0, a2, a3, t0, name,
3106 Handle<String> name) {
3115 __ Branch(&miss,
ne, a0, Operand(name));
3117 GenerateLoadArrayLength(masm(), a1, a2, &miss);
3126 Handle<String> name) {
3134 Counters* counters = masm()->isolate()->counters();
3135 __ IncrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
3138 __ Branch(&miss,
ne, a0, Operand(name));
3140 GenerateLoadStringLength(masm(), a1, a2, a3, &miss,
true);
3142 __ DecrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
3151 Handle<String> name) {
3159 Counters* counters = masm()->isolate()->counters();
3160 __ IncrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3163 __ Branch(&miss,
ne, a0, Operand(name));
3165 GenerateLoadFunctionPrototype(masm(), a1, a2, a3, &miss);
3167 __ DecrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3175 Handle<Map> receiver_map) {
3181 ElementsKind elements_kind = receiver_map->elements_kind();
3182 Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode();
3186 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3187 __ Jump(ic, RelocInfo::CODE_TARGET);
3190 return GetCode(
NORMAL, factory()->empty_string());
3203 __ JumpIfSmi(a1, &miss);
3205 int receiver_count = receiver_maps->length();
3207 for (
int current = 0; current < receiver_count; ++current) {
3208 __ Jump(handler_ics->at(current), RelocInfo::CODE_TARGET,
3209 eq, a2, Operand(receiver_maps->at(current)));
3213 Handle<Code> miss_ic = isolate()->builtins()->KeyedLoadIC_Miss();
3214 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3223 Handle<Map> transition,
3224 Handle<String> name) {
3234 Counters* counters = masm()->isolate()->counters();
3235 __ IncrementCounter(counters->keyed_store_field(), 1, a3, t0);
3238 __ Branch(&miss,
ne, a1, Operand(name));
3242 GenerateStoreField(masm(),
3251 __ DecrementCounter(counters->keyed_store_field(), 1, a3, t0);
3252 Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3253 __ Jump(ic, RelocInfo::CODE_TARGET);
3261 Handle<Map> receiver_map) {
3269 ElementsKind elements_kind = receiver_map->elements_kind();
3270 bool is_js_array = receiver_map->instance_type() ==
JS_ARRAY_TYPE;
3272 KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode();
3276 Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
3277 __ Jump(ic, RelocInfo::CODE_TARGET);
3280 return GetCode(
NORMAL, factory()->empty_string());
3296 __ JumpIfSmi(a2, &miss);
3298 int receiver_count = receiver_maps->length();
3300 for (
int i = 0; i < receiver_count; ++i) {
3301 if (transitioned_maps->at(i).is_null()) {
3302 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET,
eq,
3303 a3, Operand(receiver_maps->at(i)));
3306 __ Branch(&next_map,
ne, a3, Operand(receiver_maps->at(i)));
3307 __ li(a3, Operand(transitioned_maps->at(i)));
3308 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET);
3314 Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
3315 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3323 Handle<JSFunction>
function) {
3328 Label generic_stub_call;
3331 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
3333 #ifdef ENABLE_DEBUGGER_SUPPORT
3339 __ Branch(&generic_stub_call,
ne, a2, Operand(t7));
3346 __ JumpIfSmi(a2, &generic_stub_call);
3347 __ GetObjectType(a2, a3, t0);
3348 __ Branch(&generic_stub_call,
ne, t0, Operand(
MAP_TYPE));
3357 __ Check(
ne,
"Function constructed by construct stub.",
3367 __ AllocateInNewSpace(a3, t4, t5, t6, &generic_stub_call,
SIZE_IN_WORDS);
3377 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
3382 __ Addu(t5, t5, Operand(3 * kPointerSize));
3391 __ Addu(a1, a1,
sp);
3402 Handle<SharedFunctionInfo> shared(function->shared());
3403 for (
int i = 0; i < shared->this_property_assignments_count(); i++) {
3404 if (shared->IsThisPropertyAssignmentArgument(i)) {
3405 Label not_passed, next;
3407 int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3408 __ Branch(¬_passed,
less_equal, a0, Operand(arg_number));
3410 __ lw(a2,
MemOperand(a1, (arg_number + 1) * -kPointerSize));
3412 __ Addu(t5, t5, kPointerSize);
3414 __ bind(¬_passed);
3417 __ Addu(t5, t5, Operand(kPointerSize));
3421 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3422 __ li(a2, Operand(constant));
3424 __ Addu(t5, t5, kPointerSize);
3429 ASSERT(function->has_initial_map());
3430 for (
int i = shared->this_property_assignments_count();
3431 i <
function->initial_map()->inobject_properties();
3434 __ Addu(t5, t5, kPointerSize);
3449 __ Addu(
sp,
sp, Operand(kPointerSize));
3450 Counters* counters = masm()->isolate()->counters();
3451 __ IncrementCounter(counters->constructed_objects(), 1, a1, a2);
3452 __ IncrementCounter(counters->constructed_objects_stub(), 1, a1, a2);
3457 __ bind(&generic_stub_call);
3458 Handle<Code> generic_construct_stub =
3459 masm()->isolate()->builtins()->JSConstructStubGeneric();
3460 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3468 #define __ ACCESS_MASM(masm)
3472 MacroAssembler* masm) {
3478 Label slow, miss_force_generic;
3481 Register receiver = a1;
3483 __ JumpIfNotSmi(key, &miss_force_generic);
3486 __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
3491 __ IncrementCounter(
3492 masm->isolate()->counters()->keyed_load_external_array_slow(),
3500 Handle<Code> slow_ic =
3501 masm->isolate()->builtins()->KeyedLoadIC_Slow();
3502 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
3505 __ bind(&miss_force_generic);
3513 Handle<Code> miss_ic =
3514 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3515 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3519 static bool IsElementTypeSigned(
ElementsKind elements_kind) {
3520 switch (elements_kind) {
3549 static void GenerateSmiKeyCheck(MacroAssembler* masm,
3553 FPURegister double_scratch0,
3556 CpuFeatures::Scope scope(
FPU);
3561 __ JumpIfSmi(key, &key_ok);
3564 Heap::kHeapNumberMapRootIndex,
3575 __ Branch(fail,
ne, scratch1, Operand(zero_reg));
3577 __ mfc1(scratch0, double_scratch0);
3578 __ SmiTagCheckOverflow(key, scratch0, scratch1);
3579 __ BranchOnOverflow(fail, scratch1);
3583 __ JumpIfNotSmi(key, fail);
3589 MacroAssembler* masm,
3596 Label miss_force_generic, slow, failed_allocation;
3599 Register receiver = a1;
3605 GenerateSmiKeyCheck(masm, key, t0, t1,
f2, &miss_force_generic);
3623 Register value = a2;
3624 switch (elements_kind) {
3627 __ addu(t3, a3, t2);
3633 __ addu(t3, a3, t2);
3637 __ addu(t3, a3, key);
3641 __ addu(t3, a3, key);
3647 __ addu(t3, a3, t2);
3652 __ addu(t3, a3, t3);
3654 CpuFeatures::Scope scope(
FPU);
3662 __ addu(t3, a3, t2);
3664 CpuFeatures::Scope scope(
FPU);
3698 __ Subu(t3, value, Operand(0xC0000000));
3699 __ Branch(&box_int,
lt, t3, Operand(zero_reg));
3709 __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex);
3710 __ AllocateHeapNumber(v0, a3, t0, t1, &slow);
3713 CpuFeatures::Scope scope(
FPU);
3740 CpuFeatures::Scope scope(
FPU);
3742 __ And(t2, value, Operand(0xC0000000));
3743 __ Branch(&pl_box_int,
ne, t2, Operand(zero_reg));
3750 __ bind(&pl_box_int);
3754 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3755 __ AllocateHeapNumber(v0, t2, t3, t6, &slow);
3769 Label box_int_0, box_int_1, done;
3770 __ And(t2, value, Operand(0x80000000));
3771 __ Branch(&box_int_0,
ne, t2, Operand(zero_reg));
3772 __ And(t2, value, Operand(0x40000000));
3773 __ Branch(&box_int_1,
ne, t2, Operand(zero_reg));
3779 Register hiword = value;
3780 Register loword = a3;
3782 __ bind(&box_int_0);
3784 GenerateUInt2Double(masm, hiword, loword, t0, 0);
3787 __ bind(&box_int_1);
3789 GenerateUInt2Double(masm, hiword, loword, t0, 1);
3797 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3798 __ AllocateHeapNumber(t2, t3, t5, t6, &slow);
3810 CpuFeatures::Scope scope(
FPU);
3814 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3815 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3824 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3825 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3838 Label exponent_rebiased;
3839 __ Branch(&exponent_rebiased,
eq, t5, Operand(zero_reg));
3842 __ Xor(t1, t5, Operand(0xFF));
3843 __ Movz(t5, t0, t1);
3844 __ Branch(&exponent_rebiased,
eq, t1, Operand(zero_reg));
3851 __ bind(&exponent_rebiased);
3854 __ sll(t0, t5, HeapNumber::kMantissaBitsInTopWord);
3858 static const int kMantissaShiftForHiWord =
3861 static const int kMantissaShiftForLoWord =
3864 __ srl(t0, t4, kMantissaShiftForHiWord);
3866 __ sll(a0, t4, kMantissaShiftForLoWord);
3875 CpuFeatures::Scope scope(
FPU);
3879 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3880 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3888 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3889 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3904 __ IncrementCounter(
3905 masm->isolate()->counters()->keyed_load_external_array_slow(),
3916 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3918 __ bind(&miss_force_generic);
3920 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3921 __ Jump(stub, RelocInfo::CODE_TARGET);
3926 MacroAssembler* masm,
3935 Label slow, check_heap_number, miss_force_generic;
3938 Register value = a0;
3940 Register receiver = a2;
3947 GenerateSmiKeyCheck(masm, key, t0, t1,
f2, &miss_force_generic);
3962 __ JumpIfNotSmi(value, &slow);
3964 __ JumpIfNotSmi(value, &check_heap_number);
3966 __ SmiUntag(t1, value);
3972 switch (elements_kind) {
3977 __ li(v0, Operand(255));
3979 __ Branch(&done,
gt, t1, Operand(v0));
3982 __ mov(v0, zero_reg);
3988 __ addu(t8, a3, t8);
3995 __ addu(t8, a3, t8);
4000 __ addu(t8, a3, key);
4006 __ addu(t8, a3, t8);
4011 __ SmiUntag(t0, key);
4012 StoreIntAsFloat(masm, a3, t0, t1, t2, t3, t4);
4016 __ addu(a3, a3, t8);
4025 masm, t1, destination,
4029 CpuFeatures::Scope scope(
FPU);
4054 __ bind(&check_heap_number);
4055 __ GetObjectType(value, t1, t2);
4067 CpuFeatures::Scope scope(
FPU);
4074 __ addu(t8, a3, t8);
4078 __ addu(t8, a3, t8);
4081 __ EmitECMATruncate(t3,
f0,
f2, t2, t1, t5);
4083 switch (elements_kind) {
4087 __ addu(t8, a3, t8);
4092 __ addu(t8, a3, key);
4098 __ addu(t8, a3, t8);
4128 Label done, nan_or_infinity_or_zero;
4129 static const int kMantissaInHiWordShift =
4132 static const int kMantissaInLoWordShift =
4138 __ and_(t6, t3, t5);
4139 __ Branch(&nan_or_infinity_or_zero,
eq, t6, Operand(zero_reg));
4141 __ xor_(t1, t6, t5);
4143 __ Movz(t6, t2, t1);
4144 __ Branch(&nan_or_infinity_or_zero,
eq, t1, Operand(zero_reg));
4156 __ Movn(t3, t2, t1);
4161 __ Movn(t3, t2, t1);
4166 __ sll(t3, t3, kMantissaInHiWordShift);
4168 __ srl(t4, t4, kMantissaInLoWordShift);
4170 __ sll(t6, t6, kBinary32ExponentShift);
4175 __ addu(t9, a3, t9);
4183 __ bind(&nan_or_infinity_or_zero);
4187 __ sll(t3, t3, kMantissaInHiWordShift);
4189 __ srl(t4, t4, kMantissaInLoWordShift);
4194 __ addu(t8, a3, t8);
4201 bool is_signed_type = IsElementTypeSigned(elements_kind);
4203 int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
4210 __ and_(t6, t3, t5);
4211 __ Movz(t3, zero_reg, t6);
4212 __ Branch(&done,
eq, t6, Operand(zero_reg));
4214 __ xor_(t2, t6, t5);
4215 __ Movz(t3, zero_reg, t2);
4216 __ Branch(&done,
eq, t6, Operand(t5));
4222 __ slt(t2, t6, zero_reg);
4223 __ Movn(t3, zero_reg, t2);
4224 __ Branch(&done,
lt, t6, Operand(zero_reg));
4227 __ slti(t1, t6, meaningfull_bits - 1);
4228 __ li(t2, min_value);
4229 __ Movz(t3, t2, t1);
4230 __ Branch(&done,
ge, t6, Operand(meaningfull_bits - 1));
4234 __ Or(t3, t3, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
4236 __ li(t9, HeapNumber::kMantissaBitsInTopWord);
4237 __ subu(t6, t9, t6);
4238 __ slt(t1, t6, zero_reg);
4239 __ srlv(t2, t3, t6);
4240 __ Movz(t3, t2, t1);
4241 __ Branch(&sign,
ge, t6, Operand(zero_reg));
4243 __ subu(t6, zero_reg, t6);
4244 __ sllv(t3, t3, t6);
4245 __ li(t9, meaningfull_bits);
4246 __ subu(t6, t9, t6);
4247 __ srlv(t4, t4, t6);
4251 __ subu(t2, t3, zero_reg);
4252 __ Movz(t3, t2, t5);
4258 switch (elements_kind) {
4262 __ addu(t8, a3, t8);
4267 __ addu(t8, a3, key);
4273 __ addu(t8, a3, t8);
4296 __ IncrementCounter(
4297 masm->isolate()->counters()->keyed_load_external_array_slow(),
4305 Handle<Code> slow_ic =
4306 masm->isolate()->builtins()->KeyedStoreIC_Slow();
4307 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4310 __ bind(&miss_force_generic);
4318 Handle<Code> miss_ic =
4319 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4320 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4330 Label miss_force_generic;
4336 GenerateSmiKeyCheck(masm, a0, t0, t1,
f2, &miss_force_generic);
4340 __ AssertFastElements(a2);
4350 __ Addu(t0, t0, a3);
4352 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
4353 __ Branch(&miss_force_generic,
eq, t0, Operand(t1));
4357 __ bind(&miss_force_generic);
4359 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4360 __ Jump(stub, RelocInfo::CODE_TARGET);
4365 MacroAssembler* masm) {
4371 Label miss_force_generic, slow_allocate_heapnumber;
4373 Register key_reg = a0;
4374 Register receiver_reg = a1;
4375 Register elements_reg = a2;
4376 Register heap_number_reg = a2;
4377 Register indexed_double_offset = a3;
4378 Register scratch = t0;
4379 Register scratch2 = t1;
4380 Register scratch3 = t2;
4381 Register heap_number_map = t3;
4387 GenerateSmiKeyCheck(masm, key_reg, t0, t1,
f2, &miss_force_generic);
4395 __ Branch(&miss_force_generic,
hs, key_reg, Operand(scratch));
4399 __ Addu(indexed_double_offset, elements_reg, Operand(scratch2));
4400 uint32_t upper_32_offset = FixedArray::kHeaderSize +
sizeof(
kHoleNanLower32);
4405 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4406 __ AllocateHeapNumber(heap_number_reg, scratch2, scratch3,
4407 heap_number_map, &slow_allocate_heapnumber);
4414 FixedArray::kHeaderSize));
4418 __ mov(v0, heap_number_reg);
4421 __ bind(&slow_allocate_heapnumber);
4422 Handle<Code> slow_ic =
4423 masm->isolate()->builtins()->KeyedLoadIC_Slow();
4424 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4426 __ bind(&miss_force_generic);
4427 Handle<Code> miss_ic =
4428 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4429 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4434 MacroAssembler* masm,
4446 Label miss_force_generic, transition_elements_kind, grow, slow;
4447 Label finish_store, check_capacity;
4449 Register value_reg = a0;
4450 Register key_reg = a1;
4451 Register receiver_reg = a2;
4452 Register scratch = t0;
4453 Register elements_reg = a3;
4454 Register length_reg = t1;
4455 Register scratch2 = t2;
4461 GenerateSmiKeyCheck(masm, key_reg, t0, t1,
f2, &miss_force_generic);
4464 __ JumpIfNotSmi(value_reg, &transition_elements_kind);
4477 __ Branch(&grow,
hs, key_reg, Operand(scratch));
4479 __ Branch(&miss_force_generic,
hs, key_reg, Operand(scratch));
4483 __ CheckMap(elements_reg,
4485 Heap::kFixedArrayMapRootIndex,
4486 &miss_force_generic,
4489 __ bind(&finish_store);
4497 __ Addu(scratch, scratch, scratch2);
4506 __ Addu(scratch, scratch, scratch2);
4508 __ mov(receiver_reg, value_reg);
4509 __ RecordWrite(elements_reg,
4519 __ bind(&miss_force_generic);
4521 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4522 __ Jump(ic, RelocInfo::CODE_TARGET);
4524 __ bind(&transition_elements_kind);
4525 Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4526 __ Jump(ic_miss, RelocInfo::CODE_TARGET);
4534 __ Branch(&miss_force_generic,
ne, key_reg, Operand(scratch));
4542 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
4543 __ Branch(&check_capacity,
ne, elements_reg, Operand(at));
4546 __ AllocateInNewSpace(size, elements_reg, scratch, scratch2, &slow,
4549 __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
4553 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
4573 __ bind(&check_capacity);
4575 __ CheckMap(elements_reg,
4577 Heap::kFixedCOWArrayMapRootIndex,
4578 &miss_force_generic,
4582 __ Branch(&slow,
hs, length_reg, Operand(scratch));
4587 __ jmp(&finish_store);
4590 Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
4591 __ Jump(ic_slow, RelocInfo::CODE_TARGET);
4597 MacroAssembler* masm,
4611 Label miss_force_generic, transition_elements_kind, grow, slow;
4612 Label finish_store, check_capacity;
4614 Register value_reg = a0;
4615 Register key_reg = a1;
4616 Register receiver_reg = a2;
4617 Register elements_reg = a3;
4618 Register scratch1 = t0;
4619 Register scratch2 = t1;
4620 Register scratch3 = t2;
4621 Register scratch4 = t3;
4622 Register length_reg = t3;
4628 GenerateSmiKeyCheck(masm, key_reg, t0, t1,
f2, &miss_force_generic);
4643 __ Branch(&grow,
hs, key_reg, Operand(scratch1));
4645 __ Branch(&miss_force_generic,
hs, key_reg, Operand(scratch1));
4648 __ bind(&finish_store);
4650 __ StoreNumberToDoubleElements(value_reg,
4658 &transition_elements_kind);
4661 __ mov(v0, value_reg);
4664 __ bind(&miss_force_generic);
4666 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4667 __ Jump(ic, RelocInfo::CODE_TARGET);
4669 __ bind(&transition_elements_kind);
4670 Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4671 __ Jump(ic_miss, RelocInfo::CODE_TARGET);
4679 __ Branch(&miss_force_generic,
ne, key_reg, Operand(scratch1));
4683 __ JumpIfSmi(value_reg, &value_is_smi);
4685 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4686 __ Branch(&transition_elements_kind,
ne, scratch1, Operand(at));
4687 __ bind(&value_is_smi);
4695 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
4696 __ Branch(&check_capacity,
ne, elements_reg, Operand(at));
4699 __ AllocateInNewSpace(size, elements_reg, scratch1, scratch2, &slow,
4704 __ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex);
4706 __ li(scratch1, Operand(
Smi::FromInt(JSArray::kPreallocatedArrayElements)));
4722 __ jmp(&finish_store);
4724 __ bind(&check_capacity);
4728 __ Branch(&slow,
hs, length_reg, Operand(scratch1));
4733 __ jmp(&finish_store);
4736 Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
4737 __ Jump(ic_slow, RelocInfo::CODE_TARGET);
4746 #endif // V8_TARGET_ARCH_MIPS
static const int kBitFieldOffset
Handle< Code > CompileLoadFunctionPrototype(Handle< String > name)
Handle< Code > CompileLoadCallback(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< AccessorInfo > callback)
const intptr_t kSmiTagMask
static const int kNotFound
static const int kCodeEntryOffset
Handle< Code > CompileStoreField(Handle< JSObject > object, int index, Handle< Map > transition, Handle< String > name)
static const int kPrototypeOrInitialMapOffset
static int SlotOffset(int index)
static const int kDataOffset
const int kBinary32ExponentShift
Handle< Code > CompileLoadNonexistent(Handle< String > name, Handle< JSObject > object, Handle< JSObject > last)
const int kDoubleSizeLog2
Handle< Code > CompileStoreElement(Handle< Map > receiver_map)
void GenerateProbe(MacroAssembler *masm, Code::Flags flags, Register receiver, Register name, Register scratch, Register extra, Register extra2=no_reg, Register extra3=no_reg)
static const uint32_t kExponentMask
static const int kFlagsOffset
const uint32_t kBinary32MantissaMask
const int kBinary32MaxExponent
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
#define LOG(isolate, Call)
static void GenerateStoreExternalArray(MacroAssembler *masm, ElementsKind elements_kind)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Handle< Code > CompileStoreViaSetter(Handle< JSObject > receiver, Handle< JSFunction > setter, Handle< String > name)
static const int kGlobalReceiverOffset
static void GenerateLoadFastDoubleElement(MacroAssembler *masm)
static PropertyType ExtractTypeFromFlags(Flags flags)
const int kBinary32MantissaBits
const uint32_t kFCSRExceptionFlagMask
static const int kExponentBias
static bool IsSupported(CpuFeature f)
static const int kExternalPointerOffset
static const int kHasNamedInterceptor
static const int kIsAccessCheckNeeded
List< Handle< Map > > MapHandleList
#define ASSERT(condition)
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< AccessorInfo > callback, Handle< String > name)
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static const int kDebugInfoOffset
static const int kGlobalContextOffset
static const int kContextOffset
Handle< Code > CompileLoadField(Handle< JSObject > object, Handle< JSObject > holder, int index, Handle< String > name)
Handle< Code > CompileStoreInterceptor(Handle< JSObject > object, Handle< String > name)
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< String > name, Register scratch0)
Handle< Code > CompileStoreField(Handle< JSObject > object, int index, Handle< Map > transition, Handle< String > name)
static const int kHashFieldOffset
Handle< Code > CompileLoadInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
Handle< Code > CompileCallGlobal(Handle< JSObject > object, Handle< GlobalObject > holder, Handle< JSGlobalPropertyCell > cell, Handle< JSFunction > function, Handle< String > name)
static const int kExponentShift
Handle< Code > CompileLoadField(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, int index)
static const int kValueOffset
const uint32_t kHoleNanUpper32
Handle< Code > CompileStoreGlobal(Handle< GlobalObject > object, Handle< JSGlobalPropertyCell > holder, Handle< String > name)
Handle< Code > CompileLoadViaGetter(Handle< String > name, Handle< JSObject > receiver, Handle< JSObject > holder, Handle< JSFunction > getter)
Handle< Code > CompileLoadConstant(Handle< JSObject > object, Handle< JSObject > holder, Handle< JSFunction > value, Handle< String > name)
Handle< Code > CompileLoadConstant(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< JSFunction > value)
Handle< Code > CompileCallField(Handle< JSObject > object, Handle< JSObject > holder, int index, Handle< String > name)
static void GenerateStoreFastElement(MacroAssembler *masm, bool is_js_array, ElementsKind element_kind, KeyedAccessGrowMode grow_mode)
Handle< Code > CompileLoadStringLength(Handle< String > name)
const uint32_t kHoleNanLower32
static bool decode(uint32_t value)
static const int kPropertiesOffset
const int kBinary32MinExponent
Handle< Code > CompileLoadGlobal(Handle< JSObject > object, Handle< GlobalObject > holder, Handle< JSGlobalPropertyCell > cell, Handle< String > name, bool is_dont_delete)
bool IsFastSmiElementsKind(ElementsKind kind)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
const int kBinary32ExponentBias
static const int kDataOffset
static int SizeFor(int length)
static const int kElementsOffset
const uint32_t kStringTag
static void GenerateLoadDictionaryElement(MacroAssembler *masm)
static void GenerateLoadExternalArray(MacroAssembler *masm, ElementsKind elements_kind)
static const int kLengthOffset
static int SizeFor(int length)
static const int kHeaderSize
static const int kMapOffset
static const int kMantissaBitsInTopWord
const uint32_t kIsNotStringMask
List< Handle< Code > > CodeHandleList
static const int kLengthOffset
static void ConvertIntToDouble(MacroAssembler *masm, Register int_scratch, Destination destination, DwVfpRegister double_dst, Register dst1, Register dst2, Register scratch2, SwVfpRegister single_scratch)
Handle< Code > CompileCallInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
MemOperand FieldMemOperand(Register object, int offset)
static const int kDataOffset
const FPUControlRegister FCSR
static const int kContextOffset
static void GenerateLoadFastElement(MacroAssembler *masm)
static const uint32_t kSignMask
static void GenerateStoreFastDoubleElement(MacroAssembler *masm, bool is_js_array, KeyedAccessGrowMode grow_mode)
static const int kHeaderSize
static Handle< T > null()
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Handle< Code > CompileLoadArrayLength(Handle< String > name)
static const uint32_t kMantissaMask
Handle< Code > CompileCallConstant(Handle< Object > object, Handle< JSObject > holder, Handle< JSFunction > function, Handle< String > name, CheckType check)
static AccessorInfo * cast(Object *obj)
const uint32_t kBinary32ExponentMask
const uint32_t kBinary32SignMask
const int kHeapObjectTagSize
static const int kSizeInBytes
static Handle< JSGlobalPropertyCell > EnsurePropertyCell(Handle< GlobalObject > global, Handle< String > name)
static bool HasCustomCallGenerator(Handle< JSFunction > function)
static const int kPreallocatedArrayElements
static const int kPrototypeOffset
static const int kFlagsNotUsedInLookup
const int kInvalidProtoDepth
static const int kValueOffset
Handle< Code > CompileLoadCallback(Handle< String > name, Handle< JSObject > object, Handle< JSObject > holder, Handle< AccessorInfo > callback)
Handle< Code > CompileLoadPolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_ics)
Handle< Code > CompileLoadInterceptor(Handle< JSObject > object, Handle< JSObject > holder, Handle< String > name)
Handle< Code > CompileStorePolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_stubs, MapHandleList *transitioned_maps)
static const int kSharedFunctionInfoOffset
static const int kMantissaBits
void check(i::Vector< const char > string)
static const int kExponentOffset
static const int kValueOffset
Handle< Code > CompileLoadElement(Handle< Map > receiver_map)
Handle< Code > CompileConstructStub(Handle< JSFunction > function)
static JSObject * cast(Object *obj)
static const int kInstanceTypeOffset
static const int kMantissaOffset
static JSFunction * cast(Object *obj)