28 #ifndef V8_ARM_MACRO_ASSEMBLER_ARM_H_ 
   29 #define V8_ARM_MACRO_ASSEMBLER_ARM_H_ 
  127                                             RelocInfo::Mode rmode,
 
  133                RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
 
  137             RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
 
  176   void Call(Label* target);
 
  195     if (object->IsHeapObject()) {
 
  198       Move(result, 
object);
 
  227                      Label* condition_met);
 
  234     InNewSpace(
object, scratch, 
ne, branch);
 
  242     InNewSpace(
object, scratch, 
eq, branch);
 
  267                       Label* object_is_white_and_not_data);
 
  273                         Label* not_data_object);
 
  307                      remembered_set_action,
 
  351       Push(src2, src3, cond);
 
  380         Push(src3, src4, cond);
 
  384       Push(src2, src3, src4, cond);
 
  412       Pop(src2, src3, cond);
 
  441         Pop(src3, src4, cond);
 
  445       Pop(src2, src3, src4, cond);
 
  527       Label* no_map_match);
 
  533                            bool can_have_holes);
 
  544     ExternalReference roots_array_start =
 
  545         ExternalReference::roots_array_start(
isolate());
 
  559                   const ParameterCount& expected,
 
  560                   const ParameterCount& actual,
 
  566                   const ParameterCount& expected,
 
  567                   const ParameterCount& actual,
 
  568                   RelocInfo::Mode rmode,
 
  575                       const ParameterCount& actual,
 
  581                       const ParameterCount& actual,
 
  599 #ifdef ENABLE_DEBUGGER_SUPPORT 
  654     return IsNop(instr, type);
 
  659     int dst_reg_offset = 12;
 
  660     int dst_mask = 0xf << dst_reg_offset;
 
  662     int dst_reg = (instr & dst_mask) >> dst_reg_offset;
 
  663     int src_reg = instr & src_mask;
 
  664     uint32_t non_register_mask = ~(dst_mask | src_mask);
 
  665     uint32_t mov_mask = 
al | 13 << 21;
 
  668     int type = ((instr & non_register_mask) == mov_mask) &&
 
  669                (dst_reg == src_reg) &&
 
  788                                bool miss_on_bound_function = 
false);
 
  850                   Label* early_success,
 
  857                   Label* early_success,
 
 1035                                 int num_double_arguments);
 
 1048                             int num_double_registers,
 
 1066   void CallCFunction(ExternalReference 
function, 
int num_arguments);
 
 1069                      int num_reg_arguments,
 
 1070                      int num_double_arguments);
 
 1072                      int num_reg_arguments,
 
 1073                      int num_double_arguments);
 
 1101     return code_object_;
 
 1129   void Abort(
const char* msg);
 
 1142 #if USE_EABI_HARDFLOAT 
 1158                                  Label* not_power_of_two_or_zero);
 
 1167                                        Label* zero_and_neg,
 
 1168                                        Label* not_power_of_two);
 
 1174     add(reg, reg, Operand(reg), s);
 
 1177     add(dst, src, Operand(src), s);
 
 1213     b(
ne, not_smi_label);
 
 1239                            Label* on_not_heap_number);
 
 1258                                            Label* not_flat_ascii_strings);
 
 1263       Register first_object_instance_type,
 
 1264       Register second_object_instance_type,
 
 1296   template<
typename Field>
 
 1298     static const int shift = Field::kShift;
 
 1299     static const int mask = (Field::kMask >> shift) << 
kSmiTagSize;
 
 1300     mov(reg, Operand(reg, 
LSR, shift));
 
 1301     and_(reg, reg, Operand(mask));
 
 1313   void CallCFunctionHelper(
Register function,
 
 1314                            int num_reg_arguments,
 
 1315                            int num_double_arguments);
 
 1317   void Jump(intptr_t target, RelocInfo::Mode rmode, 
Condition cond = 
al);
 
 1320   void InvokePrologue(
const ParameterCount& expected,
 
 1321                       const ParameterCount& actual,
 
 1325                       bool* definitely_mismatches,
 
 1330   void InitializeNewString(
Register string,
 
 1345   inline void GetMarkBits(
Register addr_reg,
 
 1351   void JumpToHandlerEntry();
 
 1354   static int SafepointRegisterStackIndex(
int reg_code);
 
 1358   bool generating_stub_;
 
 1359   bool allow_stub_calls_;
 
 1414 #ifdef GENERATED_CODE_COVERAGE 
 1415 #define CODE_COVERAGE_STRINGIFY(x) #x 
 1416 #define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x) 
 1417 #define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__) 
 1418 #define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm-> 
 1420 #define ACCESS_MASM(masm) masm-> 
 1426 #endif  // V8_ARM_MACRO_ASSEMBLER_ARM_H_ 
void CallRuntime(const Runtime::Function *f, int num_arguments)
void ClampDoubleToUint8(Register result_reg, DoubleRegister input_reg, DoubleRegister temp_double_reg)
void Push(Handle< Object > handle)
void SmiUntag(Register reg, SBit s=LeaveCC)
void IntegerToDoubleConversionWithVFP3(Register inReg, Register outHighReg, Register outLowReg)
void ClampUint8(Register output_reg, Register input_reg)
Isolate * isolate() const 
const intptr_t kSmiTagMask
void PushSafepointRegisters()
void Assert(Condition cond, const char *msg)
static int SlotOffset(int index)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 instructions(ARM only)") DEFINE_bool(enable_vfp2
void GetRelocatedValueLocation(Register ldr_location, Register result)
void GetCFunctionDoubleResult(const DoubleRegister dst)
void SmiTag(Register reg, SBit s=LeaveCC)
void AllocateTwoByteSlicedString(Register result, Register length, Register scratch1, Register scratch2, Label *gc_required)
void mov(Register rd, Register rt)
void IncrementalMarkingRecordWriteHelper(Register object, Register value, Register address)
void Ubfx(Register dst, Register src, int lsb, int width, Condition cond=al)
void Bfc(Register dst, Register src, int lsb, int width, Condition cond=al)
void JumpIfInNewSpace(Register object, Register scratch, Label *branch)
void LeaveExitFrame(bool save_doubles, Register argument_count)
void AssertString(Register object)
void IsObjectJSStringType(Register object, Register scratch, Label *fail)
static TypeFeedbackId None()
void JumpToExternalReference(const ExternalReference &builtin)
void UntagAndJumpIfSmi(Register dst, Register src, Label *smi_case)
void LoadInstanceDescriptors(Register map, Register descriptors)
void ObjectToDoubleVFPRegister(Register object, DwVfpRegister value, Register scratch1, Register scratch2, Register heap_number_map, SwVfpRegister scratch3, Label *not_number, ObjectToDoubleFlags flags=NO_OBJECT_TO_DOUBLE_FLAGS)
void AllocateAsciiString(Register result, Register length, Register scratch1, Register scratch2, Register scratch3, Label *gc_required)
void TrySmiTag(Register reg, Label *not_a_smi, Register scratch)
void JumpIfNotBothSequentialAsciiStrings(Register first, Register second, Register scratch1, Register scratch2, Label *not_flat_ascii_strings)
void LoadFromNumberDictionary(Label *miss, Register elements, Register key, Register result, Register t0, Register t1, Register t2)
void SetCallCDoubleArguments(DoubleRegister dreg)
void CountLeadingZeros(Register zeros, Register source, Register scratch)
void GetBuiltinEntry(Register target, Builtins::JavaScript id)
void b(int branch_offset, Condition cond=al)
void JumpIfSmi(Register value, Label *smi_label)
void DispatchMap(Register obj, Register scratch, Handle< Map > map, Handle< Code > success, SmiCheckType smi_check_type)
bool AllowThisStubCall(CodeStub *stub)
void JumpIfNotPowerOfTwoOrZero(Register reg, Register scratch, Label *not_power_of_two_or_zero)
void ldrb(Register dst, const MemOperand &src, Condition cond=al)
void StoreToSafepointRegisterSlot(Register src, Register dst)
void PopSafepointRegistersAndDoubles()
void set_allow_stub_calls(bool value)
void CheckFastObjectElements(Register map, Register scratch, Label *fail)
void Bfi(Register dst, Register src, Register scratch, int lsb, int width, Condition cond=al)
void Swap(Register reg1, Register reg2, Register scratch=no_reg, Condition cond=al)
bool AreAliased(Register r1, Register r2, Register r3, Register r4)
#define ASSERT(condition)
void AssertNotSmi(Register object)
void RecordWriteField(Register object, int offset, Register value, Register scratch, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action=EMIT_REMEMBERED_SET, SmiCheck smi_check=INLINE_SMI_CHECK)
void SmiTag(Register dst, Register src, SBit s=LeaveCC)
void stm(BlockAddrMode am, Register base, RegList src, Condition cond=al)
void PushTryHandler(StackHandler::Kind kind, int handler_index)
void LoadTransitionedArrayMapConditional(ElementsKind expected_kind, ElementsKind transitioned_kind, Register map_in_out, Register scratch, Label *no_map_match)
void And(Register dst, Register src1, const Operand &src2, Condition cond=al)
static bool IsMarkedCode(Instr instr, int type)
void NumberOfOwnDescriptors(Register dst, Register map)
MemOperand GlobalObjectOperand()
void IncrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void Abort(const char *msg)
void Sbfx(Register dst, Register src, int lsb, int width, Condition cond=al)
void MarkCode(NopMarkerTypes type)
MemOperand ContextOperand(Register context, int index)
void CheckMap(Register obj, Register scratch, Handle< Map > map, Label *fail, SmiCheckType smi_check_type, CompareMapMode mode=REQUIRE_EXACT_MAP)
void CopyFields(Register dst, Register src, RegList temps, int field_count)
void AssertSmi(Register object)
void CompareRoot(Register obj, Heap::RootListIndex index)
void Pop(Register src1, Register src2, Register src3, Register src4, Condition cond=al)
void DecrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void StoreToSafepointRegistersAndDoublesSlot(Register src, Register dst)
const Register kRootRegister
bool use_eabi_hardfloat()
void JumpIfInstanceTypeIsNotSequentialAscii(Register type, Register scratch, Label *failure)
void CompareInstanceType(Register map, Register type_reg, InstanceType type)
void Pop(Register src1, Register src2, Condition cond=al)
void EmitVFPTruncate(VFPRoundingMode rounding_mode, Register result, DwVfpRegister double_input, Register scratch, DwVfpRegister double_scratch, CheckForInexactConversion check=kDontCheckForInexactConversion)
void JumpIfNotHeapNumber(Register object, Register heap_number_map, Register scratch, Label *on_not_heap_number)
void AssertRegisterIsRoot(Register reg, Heap::RootListIndex index)
void JumpIfNotInNewSpace(Register object, Register scratch, Label *branch)
void IsObjectJSObjectType(Register heap_object, Register map, Register scratch, Label *fail)
void CompareObjectType(Register heap_object, Register map, Register type_reg, InstanceType type)
void RecordWriteContextSlot(Register context, int offset, Register value, Register scratch, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action=EMIT_REMEMBERED_SET, SmiCheck smi_check=INLINE_SMI_CHECK)
void EnumLength(Register dst, Register map)
static int ActivationFrameAlignment()
void SmiUntag(Register dst, Register src, SBit s=LeaveCC)
void LeaveFrame(StackFrame::Type type)
void CheckFastElements(Register map, Register scratch, Label *fail)
void LoadGlobalFunction(int index, Register function)
void CheckPageFlag(Register object, Register scratch, int mask, Condition cc, Label *condition_met)
void TryGetFunctionPrototype(Register function, Register result, Register scratch, Label *miss, bool miss_on_bound_function=false)
void EmitECMATruncate(Register result, DwVfpRegister double_input, SwVfpRegister single_scratch, Register scratch, Register scratch2, Register scratch3)
void CallCFunction(ExternalReference function, int num_arguments)
Condition IsObjectStringType(Register obj, Register type)
void DecodeField(Register reg)
void AllocateAsciiConsString(Register result, Register length, Register scratch1, Register scratch2, Label *gc_required)
void IsInstanceJSObjectType(Register map, Register scratch, Label *fail)
void CheckFastSmiElements(Register map, Register scratch, Label *fail)
void Jump(Register target, Condition cond=al)
void RecordWrite(Register object, Register address, Register value, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action=EMIT_REMEMBERED_SET, SmiCheck smi_check=INLINE_SMI_CHECK)
void JumpIfDataObject(Register value, Register scratch, Label *not_data_object)
void AllocateHeapNumber(Register result, Register scratch1, Register scratch2, Register heap_number_map, Label *gc_required, TaggingMode tagging_mode=TAG_RESULT)
void CopyBytes(Register src, Register dst, Register length, Register scratch)
void LoadHeapObject(Register dst, Handle< HeapObject > object)
void Throw(Register value)
void ConvertToInt32(Register source, Register dest, Register scratch, Register scratch2, DwVfpRegister double_scratch, Label *not_int32)
void Move(Register dst, Handle< Object > value)
void set_has_frame(bool value)
void SetCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void str(Register src, const MemOperand &dst, Condition cond=al)
void Push(Register src1, Register src2, Register src3, Condition cond=al)
void ldm(BlockAddrMode am, Register base, RegList dst, Condition cond=al)
void InvokeCode(Register code, const ParameterCount &expected, const ParameterCount &actual, InvokeFlag flag, const CallWrapper &call_wrapper, CallKind call_kind)
void GetLeastBitsFromInt32(Register dst, Register src, int mun_least_bits)
MacroAssembler(Isolate *isolate, void *buffer, int size)
const uint32_t kStringTag
void LoadContext(Register dst, int context_chain_length)
void CallExternalReference(const ExternalReference &ext, int num_arguments)
static int CallSize(Register target, Condition cond=al)
void StoreNumberToDoubleElements(Register value_reg, Register key_reg, Register receiver_reg, Register elements_reg, Register scratch1, Register scratch2, Register scratch3, Register scratch4, Label *fail)
void AssertFastElements(Register elements)
void add(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void GetLeastBitsFromSmi(Register dst, Register src, int num_least_bits)
void PushSafepointRegistersAndDoubles()
void JumpIfNotBothSmi(Register reg1, Register reg2, Label *on_not_both_smi)
void JumpIfBlack(Register object, Register scratch0, Register scratch1, Label *on_black)
void AllocateTwoByteConsString(Register result, Register length, Register scratch1, Register scratch2, Label *gc_required)
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
void Drop(int count, Condition cond=al)
void GetBuiltinFunction(Register target, Builtins::JavaScript id)
void ldr(Register dst, const MemOperand &src, Condition cond=al)
void VFPCompareAndSetFlags(const DwVfpRegister src1, const DwVfpRegister src2, const Condition cond=al)
void IllegalOperation(int num_arguments)
void CheckAccessGlobalProxy(Register holder_reg, Register scratch, Label *miss)
void CallApiFunctionAndReturn(ExternalReference function, int stack_space)
void Ldrd(Register dst1, Register dst2, const MemOperand &src, Condition cond=al)
static const int kMapOffset
bool is(Register reg) const 
const uint32_t kIsNotStringMask
void LoadObject(Register result, Handle< Object > object)
void ClearFPSCRBits(const uint32_t bits_to_clear, const Register scratch, const Condition cond=al)
void VFPCompareAndLoadFlags(const DwVfpRegister src1, const DwVfpRegister src2, const Register fpscr_flags, const Condition cond=al)
void CallRuntimeSaveDoubles(Runtime::FunctionId id)
void JumpIfNotPowerOfTwoOrZeroAndNeg(Register reg, Register scratch, Label *zero_and_neg, Label *not_power_of_two)
void ThrowUncatchable(Register value)
void SmiToDoubleVFPRegister(Register smi, DwVfpRegister value, Register scratch1, SwVfpRegister scratch2)
void StoreRoot(Register source, Heap::RootListIndex index, Condition cond=al)
void AllocateInNewSpace(int object_size, Register result, Register scratch1, Register scratch2, Label *gc_required, AllocationFlags flags)
MemOperand FieldMemOperand(Register object, int offset)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers, Register scratch)
void CompareMap(Register obj, Register scratch, Handle< Map > map, Label *early_success, CompareMapMode mode=REQUIRE_EXACT_MAP)
void EmitOutOfInt32RangeTruncate(Register result, Register input_high, Register input_low, Register scratch)
void LoadGlobalFunctionInitialMap(Register function, Register map, Register scratch)
void GetNumberHash(Register t0, Register scratch)
void InvokeFunction(Register function, const ParameterCount &actual, InvokeFlag flag, const CallWrapper &call_wrapper, CallKind call_kind)
Handle< Object > CodeObject()
static int GetCodeMarker(Instr instr)
void JumpIfNonSmisNotBothSequentialAsciiStrings(Register object1, Register object2, Register scratch1, Register scratch2, Label *failure)
void EmitCondition(Condition cond)
void JumpIfBothInstanceTypesAreNotSequentialAscii(Register first_object_instance_type, Register second_object_instance_type, Register scratch1, Register scratch2, Label *failure)
void UndoAllocationInNewSpace(Register object, Register scratch)
void Push(Register src1, Register src2, Register src3, Register src4, Condition cond=al)
void LoadFromSafepointRegisterSlot(Register dst, Register src)
#define ASSERT_EQ(v1, v2)
void Call(Register target, Condition cond=al)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage message
void AllocateAsciiSlicedString(Register result, Register length, Register scratch1, Register scratch2, Label *gc_required)
void set_generating_stub(bool value)
void Check(Condition cond, const char *msg)
void LoadInitialArrayMap(Register function_in, Register scratch, Register map_out, bool can_have_holes)
void JumpIfNotSmi(Register value, Label *not_smi_label)
void AssertRootValue(Register src, Heap::RootListIndex root_value_index, const char *message)
static int CallSizeNotPredictableCodeSize(Address target, RelocInfo::Mode rmode, Condition cond=al)
void InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag, const CallWrapper &call_wrapper=NullCallWrapper())
CheckForInexactConversion
void Pop(Register src1, Register src2, Register src3, Condition cond=al)
void TailCallStub(CodeStub *stub, Condition cond=al)
CodePatcher(byte *address, int instructions)
void UntagAndJumpIfNotSmi(Register dst, Register src, Label *non_smi_case)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
void EnsureNotWhite(Register object, Register scratch1, Register scratch2, Register scratch3, Label *object_is_white_and_not_data)
void InitializeRootRegister()
Operand SmiUntagOperand(Register object)
int CalculateStackPassedWords(int num_reg_arguments, int num_double_arguments)
void and_(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void JumpIfEitherSmi(Register reg1, Register reg2, Label *on_either_smi)
void CallStub(CodeStub *stub, Condition cond=al)
void IndexFromHash(Register hash, Register index)
void TailCallExternalReference(const ExternalReference &ext, int num_arguments, int result_size)
void EnterExitFrame(bool save_doubles, int stack_space=0)
void InitializeFieldsWithFiller(Register start_offset, Register end_offset, Register filler)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
void TailCallRuntime(Runtime::FunctionId fid, int num_arguments, int result_size)
void SetCallKind(Register dst, CallKind kind)
void AllocateTwoByteString(Register result, Register length, Register scratch1, Register scratch2, Register scratch3, Label *gc_required)
void check(i::Vector< const char > string)
void LoadRoot(Register destination, Heap::RootListIndex index, Condition cond=al)
void RememberedSetHelper(Register object, Register addr, Register scratch, SaveFPRegsMode save_fp, RememberedSetFinalAction and_then)
void Usat(Register dst, int satpos, const Operand &src, Condition cond=al)
void Push(Register src1, Register src2, Condition cond=al)
void HasColor(Register object, Register scratch0, Register scratch1, Label *has_color, int first_bit, int second_bit)
void tst(Register src1, const Operand &src2, Condition cond=al)
void Strd(Register src1, Register src2, const MemOperand &dst, Condition cond=al)
void Vmov(const DwVfpRegister dst, const double imm, const Register scratch=no_reg, const Condition cond=al)
void EnterFrame(StackFrame::Type type)
void CheckEnumCache(Register null_value, Label *call_runtime)
static const int kInstanceTypeOffset
void PopSafepointRegisters()
void AllocateHeapNumberWithValue(Register result, DwVfpRegister value, Register scratch1, Register scratch2, Register heap_number_map, Label *gc_required)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag