30 #if defined(V8_TARGET_ARCH_MIPS)
38 #define __ ACCESS_MASM(masm)
61 ASSERT(!masm->has_frame());
62 masm->set_has_frame(
true);
69 masm->set_has_frame(
false);
76 MacroAssembler* masm) {
87 __ RecordWriteField(a2,
99 MacroAssembler* masm, Label* fail) {
108 Label loop, entry, convert_hole, gc_required, only_change_map, done;
111 Register scratch = t6;
116 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
117 __ Branch(&only_change_map,
eq, at, Operand(t0));
125 __ sll(scratch, t1, 2);
130 __ LoadRoot(t5, Heap::kFixedDoubleArrayMapRootIndex);
136 __ RecordWriteField(a2,
147 __ RecordWriteField(a2,
169 if (!fpu_supported)
__ Push(a1, a0);
173 __ bind(&only_change_map);
175 __ RecordWriteField(a2,
186 __ bind(&gc_required);
195 __ UntagAndJumpIfNotSmi(t5, t5, &convert_hole);
199 CpuFeatures::Scope scope(
FPU);
220 __ bind(&convert_hole);
221 if (FLAG_debug_code) {
224 __ Or(t5, t5, Operand(1));
225 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
226 __ Assert(
eq,
"object found in smi-only array", at, Operand(t5));
233 __ Branch(&loop,
lt, t3, Operand(t2));
235 if (!fpu_supported)
__ Pop(a1, a0);
242 MacroAssembler* masm, Label* fail) {
251 Label entry, loop, convert_hole, gc_required, only_change_map;
256 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
257 __ Branch(&only_change_map,
eq, at, Operand(t0));
259 __ MultiPush(a0.bit() | a1.bit() | a2.bit() | a3.bit() | ra.bit());
271 __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex);
281 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex);
282 __ LoadRoot(t5, Heap::kHeapNumberMapRootIndex);
293 __ bind(&gc_required);
294 __ MultiPop(a0.bit() | a1.bit() | a2.bit() | a3.bit() | ra.bit());
306 __ AllocateHeapNumber(a2, a0, t6, t5, &gc_required);
324 __ bind(&convert_hole);
329 __ Branch(&loop,
lt, a3, Operand(t1));
331 __ MultiPop(a2.bit() | a3.bit() | a0.bit() | a1.bit());
334 __ RecordWriteField(a2,
344 __ bind(&only_change_map);
347 __ RecordWriteField(a2,
362 Label* call_runtime) {
368 Label check_sequential;
370 __ Branch(&check_sequential,
eq, at, Operand(zero_reg));
375 __ Branch(&cons_string,
eq, at, Operand(zero_reg));
378 Label indirect_string_loaded;
382 __ Addu(index, index, at);
383 __ jmp(&indirect_string_loaded);
390 __ bind(&cons_string);
392 __ LoadRoot(at, Heap::kEmptyStringRootIndex);
393 __ Branch(call_runtime,
ne, result, Operand(at));
397 __ bind(&indirect_string_loaded);
404 Label external_string, check_encoding;
405 __ bind(&check_sequential);
408 __ Branch(&external_string,
ne, at, Operand(zero_reg));
415 __ jmp(&check_encoding);
418 __ bind(&external_string);
419 if (FLAG_debug_code) {
423 __ Assert(
eq,
"external string expected, but not found",
424 at, Operand(zero_reg));
429 __ Branch(call_runtime,
ne, at, Operand(zero_reg));
433 __ bind(&check_encoding);
436 __ Branch(&ascii,
ne, at, Operand(zero_reg));
438 __ sll(at, index, 1);
439 __ Addu(at,
string, at);
444 __ Addu(at,
string, index);
453 #endif // V8_TARGET_ARCH_MIPS
static const int kResourceDataOffset
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
const uint32_t kTwoByteStringTag
static bool IsSupported(CpuFeature f)
#define ASSERT(condition)
virtual void AfterCall(MacroAssembler *masm) const
double(* UnaryMathFunction)(double x)
const uint32_t kStringRepresentationMask
const uint32_t kShortExternalStringMask
static const int kFirstOffset
static const int kParentOffset
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
const uint32_t kHoleNanUpper32
const uint32_t kIsIndirectStringMask
const uint32_t kHoleNanLower32
UnaryMathFunction CreateSqrtFunction()
static void GenerateSmiToDouble(MacroAssembler *masm, Label *fail)
const uint32_t kShortExternalStringTag
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kHeaderSize
static const int kElementsOffset
static const int kOffsetOffset
static const int kHeaderSize
static const int kMapOffset
static void GenerateDoubleToObject(MacroAssembler *masm, Label *fail)
const uint32_t kSlicedNotConsMask
static const int kLengthOffset
static void ConvertIntToDouble(MacroAssembler *masm, Register int_scratch, Destination destination, DwVfpRegister double_dst, Register dst1, Register dst2, Register scratch2, SwVfpRegister single_scratch)
static const int kSecondOffset
MemOperand FieldMemOperand(Register object, int offset)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
virtual void BeforeCall(MacroAssembler *masm) const
static const int kExponentOffset
UnaryMathFunction CreateTranscendentalFunction(TranscendentalCache::Type type)
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset
static const int kMantissaOffset