36 using namespace v8::internal;
49 static void InitializeVM() {
73 CHECK(code->IsCode());
79 ::printf(
"f() = %d\n", res);
97 __ sub(
r1,
r1, Operand(1));
110 CHECK(code->IsCode());
116 ::printf(
"f() = %d\n", res);
129 __ mov(
r0, Operand(1));
134 __ sub(
r1,
r1, Operand(1));
142 __ RecordComment(
"dead code, just testing relocations");
144 __ RecordComment(
"dead code, just testing immediate operands");
145 __ mov(
r0, Operand(-1));
146 __ mov(
r0, Operand(0xFF000000));
147 __ mov(
r0, Operand(0xF0F0F0F0));
148 __ mov(
r0, Operand(0xFFF0FFFF));
156 CHECK(code->IsCode());
162 ::printf(
"f() = %d\n", res);
183 __ sub(
fp,
ip, Operand(4));
204 CHECK(code->IsCode());
213 ::printf(
"f() = %d\n", res);
250 CpuFeatures::Scope scope(
VFP3);
254 __ sub(
fp,
ip, Operand(4));
281 __ vmov(
d4, 1.000000059604644775390625);
290 __ mov(
lr, Operand(42));
319 CHECK(code->IsCode());
348 CHECK_EQ(1.000000059604644775390625, t.d);
364 CpuFeatures::Scope scope(
ARMv7);
369 __ mov(
r1, Operand(7));
379 CHECK(code->IsCode());
384 int res =
reinterpret_cast<int>(
386 ::printf(
"f() = %d\n", res);
400 CpuFeatures::Scope scope(
ARMv7);
401 __ usat(
r1, 8, Operand(
r0));
414 CHECK(code->IsCode());
419 int res =
reinterpret_cast<int>(
421 ::printf(
"f() = %d\n", res);
432 static void TestRoundingMode(
VCVTTypes types,
436 bool expected_exception =
false) {
443 CpuFeatures::Scope scope(
VFP3);
445 Label wrong_exception;
450 __ orr(
r2,
r2, Operand(mode));
472 __ b(&wrong_exception,
473 expected_exception ?
eq :
ne);
480 __ bind(&wrong_exception);
481 __ mov(
r0, Operand(11223344));
490 CHECK(code->IsCode());
495 int res =
reinterpret_cast<int>(
497 ::printf(
"res = %d\n", res);
513 TestRoundingMode(
s32_f64,
RN, 123.7, 124);
514 TestRoundingMode(
s32_f64,
RN, -123.7, -124);
515 TestRoundingMode(
s32_f64,
RN, 123456.2, 123456);
516 TestRoundingMode(
s32_f64,
RN, -123456.2, -123456);
529 TestRoundingMode(
s32_f64,
RM, 123.7, 123);
530 TestRoundingMode(
s32_f64,
RM, -123.7, -124);
531 TestRoundingMode(
s32_f64,
RM, 123456.2, 123456);
532 TestRoundingMode(
s32_f64,
RM, -123456.2, -123457);
543 TestRoundingMode(
s32_f64,
RZ, 123.7, 123);
544 TestRoundingMode(
s32_f64,
RZ, -123.7, -123);
545 TestRoundingMode(
s32_f64,
RZ, 123456.2, 123456);
546 TestRoundingMode(
s32_f64,
RZ, -123456.2, -123456);
559 TestRoundingMode(
u32_f64,
RN, -123456.7, 0,
true);
563 TestRoundingMode(
u32_f64,
RM, -0.5, 0,
true);
564 TestRoundingMode(
u32_f64,
RM, -123456.7, 0,
true);
569 TestRoundingMode(
u32_f64,
RZ, -123456.7, 0,
true);
575 static const uint32_t kMaxUInt = 0xffffffffu;
578 TestRoundingMode(
u32_f64,
RZ, 123.7, 123);
579 TestRoundingMode(
u32_f64,
RZ, 123456.2, 123456);
583 static_cast<uint32_t>(
kMaxInt) + 1);
584 TestRoundingMode(
u32_f64,
RZ, (kMaxUInt + 0.5), kMaxUInt);
585 TestRoundingMode(
u32_f64,
RZ, (kMaxUInt + 1.0), kMaxUInt,
true);
589 TestRoundingMode(
u32_f64,
RM, 123.7, 123);
590 TestRoundingMode(
u32_f64,
RM, 123456.2, 123456);
594 static_cast<uint32_t>(
kMaxInt) + 1);
595 TestRoundingMode(
u32_f64,
RM, (kMaxUInt + 0.5), kMaxUInt);
596 TestRoundingMode(
u32_f64,
RM, (kMaxUInt + 1.0), kMaxUInt,
true);
601 TestRoundingMode(
u32_f64,
RN, 123.7, 124);
602 TestRoundingMode(
u32_f64,
RN, 123456.2, 123456);
606 static_cast<uint32_t>(
kMaxInt) + 1);
607 TestRoundingMode(
u32_f64,
RN, (kMaxUInt + 0.49), kMaxUInt);
608 TestRoundingMode(
u32_f64,
RN, (kMaxUInt + 0.5), kMaxUInt,
true);
609 TestRoundingMode(
u32_f64,
RN, (kMaxUInt + 1.0), kMaxUInt,
true);
646 CpuFeatures::Scope scope(
VFP2);
650 __ sub(
fp,
ip, Operand(4));
676 CHECK(code->IsCode());
757 CpuFeatures::Scope scope(
VFP2);
761 __ sub(
fp,
ip, Operand(4));
765 __ add(
r4,
r4, Operand(4 * 8));
770 __ add(
r4,
r4, Operand(2 * 8));
775 __ add(
r4,
r4, Operand(4 * 4));
780 __ add(
r4,
r4, Operand(2 * 4));
791 CHECK(code->IsCode());
872 CpuFeatures::Scope scope(
VFP2);
876 __ sub(
fp,
ip, Operand(4));
902 CHECK(code->IsCode());
979 __ mov(
r1, Operand(0xffffffff));
980 __ mov(
r2, Operand(0));
985 __ mov(
r1, Operand(0xffffffff));
986 __ mov(
r2, Operand(0));
999 CHECK(code->IsCode());
1008 CHECK_EQ(static_cast<int32_t>(0xabcd0000) >> 1, i.b);
Object *(* F3)(void *p0, int p1, int p2, int p3, int p4)
#define CHECK_EQ(expected, value)
static bool IsSupported(CpuFeature f)
Object *(* F2)(int x, int y, int p2, int p3, int p4)
static Code * cast(Object *obj)
#define OFFSET_OF(type, field)
void GetCode(CodeDesc *desc)
Object *(* F1)(int x, int p1, int p2, int p3, int p4)
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
#define CALL_GENERATED_CODE(entry, p0, p1, p2, p3, p4)
const uint32_t kVFPExceptionMask
#define T(name, string, precedence)
Object *(* F4)(void *p0, void *p1, int p2, int p3, int p4)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static Persistent< Context > New(ExtensionConfiguration *extensions=NULL, Handle< ObjectTemplate > global_template=Handle< ObjectTemplate >(), Handle< Value > global_object=Handle< Value >())
const uint32_t kVFPRoundingModeMask
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments