28 #ifndef V8_X64_MACRO_ASSEMBLER_X64_H_
29 #define V8_X64_MACRO_ASSEMBLER_X64_H_
75 :
reg(index_register),
96 : variable_(&assembler->root_array_available_),
97 old_value_(assembler->root_array_available_) {
98 assembler->root_array_available_ =
false;
101 *variable_ = old_value_;
122 void Load(
Register destination, ExternalReference source);
180 Label* condition_met,
181 Label::Distance condition_met_distance = Label::kFar);
188 Label::Distance distance = Label::kFar) {
189 InNewSpace(
object, scratch,
not_equal, branch, distance);
197 Label::Distance distance = Label::kFar) {
198 InNewSpace(
object, scratch,
equal, branch, distance);
206 Label::Distance on_black_distance = Label::kFar);
212 Label* not_data_object,
213 Label::Distance not_data_object_distance);
223 Label* object_is_white_and_not_data,
224 Label::Distance distance);
255 remembered_set_action,
286 #ifdef ENABLE_DEBUGGER_SUPPORT
300 void EnterExitFrame(
int arg_stack_space = 0,
bool save_doubles =
false);
325 ExternalReference roots_array_start =
326 ExternalReference::roots_array_start(
isolate());
341 const ParameterCount& expected,
342 const ParameterCount& actual,
348 const ParameterCount& expected,
349 const ParameterCount& actual,
350 RelocInfo::Mode rmode,
358 const ParameterCount& actual,
364 const ParameterCount& actual,
434 Label::Distance near_jump = Label::kFar);
493 Label::Distance near_jump = Label::kFar);
497 Label::Distance near_jump = Label::kFar);
502 Label::Distance near_jump = Label::kFar);
507 Label::Distance near_jump = Label::kFar);
512 Label::Distance near_jump = Label::kFar);
519 Label::Distance near_jump = Label::kFar);
524 Label* on_not_both_smi,
525 Label::Distance near_jump = Label::kFar);
529 Label* on_not_both_smi,
530 Label::Distance near_jump = Label::kFar);
543 Label* on_not_smi_result,
544 Label::Distance near_jump = Label::kFar);
559 Label* on_not_smi_result,
560 Label::Distance near_jump = Label::kFar);
572 Label* on_not_smi_result,
573 Label::Distance near_jump = Label::kFar);
579 Label* on_smi_result,
580 Label::Distance near_jump = Label::kFar);
588 Label* on_not_smi_result,
589 Label::Distance near_jump = Label::kFar);
593 Label* on_not_smi_result,
594 Label::Distance near_jump = Label::kFar);
606 Label* on_not_smi_result,
607 Label::Distance near_jump = Label::kFar);
616 Label* on_not_smi_result,
617 Label::Distance near_jump = Label::kFar);
621 const Operand& src2);
630 Label* on_not_smi_result,
631 Label::Distance near_jump = Label::kFar);
638 Label* on_not_smi_result,
639 Label::Distance near_jump = Label::kFar);
646 Label* on_not_smi_result,
647 Label::Distance near_jump = Label::kFar);
664 Label* on_not_smi_result,
665 Label::Distance near_jump = Label::kFar);
681 Label* on_not_smi_result,
682 Label::Distance near_jump = Label::kFar);
700 Label::Distance near_jump = Label::kFar);
721 LoadSmiConstant(dst, source);
725 Register constant = GetSmiConstant(source);
730 void Test(
const Operand& dst,
Smi* source);
740 Label::Distance near_jump = Label::kFar);
748 Label* on_not_both_flat_ascii,
749 Label::Distance near_jump = Label::kFar);
757 Label*on_not_flat_ascii_string,
758 Label::Distance near_jump = Label::kFar);
761 Register first_object_instance_type,
762 Register second_object_instance_type,
766 Label::Distance near_jump = Label::kFar);
773 void Set(
const Operand& dst, int64_t x);
784 void TestBit(
const Operand& dst,
int bit_index);
792 void Cmp(
const Operand& dst,
Smi* src);
801 if (object->IsHeapObject()) {
804 Move(result,
object);
813 void Drop(
int stack_elements);
818 void Jump(
Address destination, RelocInfo::Mode rmode);
819 void Jump(ExternalReference ext);
822 void Call(
Address destination, RelocInfo::Mode rmode);
823 void Call(ExternalReference ext);
825 RelocInfo::Mode rmode,
832 int CallSize(ExternalReference ext);
839 return (target.
high_bit() != 0) ? 3 : 2;
843 return (target.requires_rex() ? 2 : 1) + target.operand_size();
849 Call(
self, RelocInfo::CODE_TARGET);
876 Label::Distance distance = Label::kFar);
882 Label::Distance distance = Label::kFar);
888 Label::Distance distance = Label::kFar);
906 Label* early_success,
953 template<
typename Field>
955 static const int shift = Field::kShift + kSmiShift;
956 static const int mask = Field::kMask >> Field::kShift;
957 shr(reg, Immediate(shift));
958 and_(reg, Immediate(mask));
959 shl(reg, Immediate(kSmiShift));
1070 Label* gc_required);
1079 Label* gc_required);
1085 Label* gc_required);
1092 Label* gc_required);
1096 Label* gc_required);
1103 Label* gc_required);
1107 Label* gc_required);
1119 JumpTarget* then_target);
1124 Register scratch, Label* then_label);
1134 bool miss_on_bound_function =
false);
1158 Label* no_map_match);
1164 bool can_have_holes);
1240 void CallCFunction(ExternalReference
function,
int num_arguments);
1258 return code_object_;
1302 void Abort(
const char* msg);
1317 return SafepointRegisterStackIndex(reg.
code());
1327 Label* call_runtime);
1333 static const int kNumSafepointSavedRegisters = 11;
1336 bool generating_stub_;
1337 bool allow_stub_calls_;
1339 bool root_array_available_;
1345 intptr_t RootRegisterDelta(ExternalReference other);
1354 void InvokePrologue(
const ParameterCount& expected,
1355 const ParameterCount& actual,
1359 bool* definitely_mismatches,
1361 Label::Distance near_jump = Label::kFar,
1365 void EnterExitFramePrologue(
bool save_rax);
1369 void EnterExitFrameEpilogue(
int arg_stack_space,
bool save_doubles);
1371 void LeaveExitFrameEpilogue();
1377 void LoadAllocationTopHelper(
Register result,
1396 Label::Distance distance = Label::kFar);
1402 inline void GetMarkBits(
Register addr_reg,
1408 void JumpToHandlerEntry();
1411 Operand SafepointRegisterSlot(
Register reg);
1412 static int SafepointRegisterStackIndex(
int reg_code) {
1446 inline Operand
FieldOperand(Register
object,
int offset) {
1473 const int kShaddowSpace = 4;
1476 return Operand(
rsp, index * kPointerSize);
1482 #ifdef GENERATED_CODE_COVERAGE
1483 extern void LogGeneratedCodeCoverage(
const char* file_line);
1484 #define CODE_COVERAGE_STRINGIFY(x) #x
1485 #define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
1486 #define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
1487 #define ACCESS_MASM(masm) { \
1488 byte* x64_coverage_function = \
1489 reinterpret_cast<byte*>(FUNCTION_ADDR(LogGeneratedCodeCoverage)); \
1492 masm->push(Immediate(reinterpret_cast<int>(&__FILE_LINE__))); \
1493 masm->call(x64_coverage_function, RelocInfo::RUNTIME_ENTRY); \
1500 #define ACCESS_MASM(masm) masm->
1505 #endif // V8_X64_MACRO_ASSEMBLER_X64_H_
void CallRuntime(const Runtime::Function *f, int num_arguments)
void ClampDoubleToUint8(Register result_reg, DoubleRegister input_reg, DoubleRegister temp_double_reg)
void LoadUint32(XMMRegister dst, Register src, XMMRegister scratch)
void Push(Handle< Object > handle)
void JumpIfSmiEqualsConstant(Register src, Smi *constant, Label *on_equals, Label::Distance near_jump=Label::kFar)
void ClampUint8(Register output_reg, Register input_reg)
Isolate * isolate() const
void JumpIfNotString(Register object, Register object_map, Label *not_string, Label::Distance near_jump=Label::kFar)
void PushSafepointRegisters()
void Assert(Condition cond, const char *msg)
void CheckStackAlignment()
void SmiShiftArithmeticRight(Register dst, Register src1, Register src2)
void JumpIfNotValidSmiValue(Register src, Label *on_invalid, Label::Distance near_jump=Label::kFar)
static int SlotOffset(int index)
void LoadRootIndexed(Register destination, Register variable_offset, int fixed_offset)
int LoadAddressSize(ExternalReference source)
void RecordWriteContextSlot(Register context, int offset, Register value, Register scratch, SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action=EMIT_REMEMBERED_SET, SmiCheck smi_check=INLINE_SMI_CHECK)
SmiIndex SmiToNegativeIndex(Register dst, Register src, int shift)
void SafeMove(Register dst, Smi *src)
void SmiDiv(Register dst, Register src1, Register src2, Label *on_not_smi_result, Label::Distance near_jump=Label::kFar)
void SmiOrIfSmis(Register dst, Register src1, Register src2, Label *on_not_smis, Label::Distance near_jump=Label::kFar)
void SmiOr(Register dst, Register src1, Register src2)
void AllocateTwoByteSlicedString(Register result, Register length, Register scratch1, Register scratch2, Label *gc_required)
Condition CheckUInteger32ValidSmiValue(Register src)
friend class OptimizedFrame
void LeaveExitFrame(bool save_doubles, Register argument_count)
void LoadGlobalCell(Register dst, Handle< JSGlobalPropertyCell > cell)
static Smi * FromInt(int value)
void AssertString(Register object)
static TypeFeedbackId None()
void RecordWriteArray(Register array, Register value, Register index, SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action=EMIT_REMEMBERED_SET, SmiCheck smi_check=INLINE_SMI_CHECK)
void JumpToExternalReference(const ExternalReference &builtin)
void Cmp(Register dst, Handle< Object > source)
void LoadInstanceDescriptors(Register map, Register descriptors)
void AllocateAsciiString(Register result, Register length, Register scratch1, Register scratch2, Register scratch3, Label *gc_required)
void JumpIfNotBothSequentialAsciiStrings(Register first, Register second, Register scratch1, Register scratch2, Label *not_flat_ascii_strings)
void LoadFromNumberDictionary(Label *miss, Register elements, Register key, Register result, Register t0, Register t1, Register t2)
int CallSize(Register target)
void addq(Register dst, Register src)
void SmiAdd(Register dst, Register src1, Register src2, Label *on_not_smi_result, Label::Distance near_jump=Label::kFar)
void GetBuiltinEntry(Register target, Builtins::JavaScript id)
void JumpIfSmi(Register value, Label *smi_label)
void DispatchMap(Register obj, Register scratch, Handle< Map > map, Handle< Code > success, SmiCheckType smi_check_type)
bool AllowThisStubCall(CodeStub *stub)
void StoreToSafepointRegisterSlot(Register src, Register dst)
void CheckSmiToIndicator(Register dst, Register src)
static const int kCallInstructionLength
void set_allow_stub_calls(bool value)
void CheckFastObjectElements(Register map, Register scratch, Label *fail)
SmiIndex SmiToIndex(Register dst, Register src, int shift)
bool AreAliased(Register r1, Register r2, Register r3, Register r4)
#define ASSERT(condition)
void AssertNotSmi(Register object)
void RecordWriteField(Register object, int offset, Register value, Register scratch, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action=EMIT_REMEMBERED_SET, SmiCheck smi_check=INLINE_SMI_CHECK)
static const int kShortCallInstructionLength
Condition CheckIsMinSmi(Register src)
void PushTryHandler(StackHandler::Kind kind, int handler_index)
void LoadTransitionedArrayMapConditional(ElementsKind expected_kind, ElementsKind transitioned_kind, Register map_in_out, Register scratch, Label *no_map_match)
void NumberOfOwnDescriptors(Register dst, Register map)
void SmiAndConstant(Register dst, Register src1, Smi *constant)
void SmiOrConstant(Register dst, Register src1, Smi *constant)
MemOperand GlobalObjectOperand()
void IncrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void Abort(const char *msg)
void SmiSub(Register dst, Register src1, Register src2, Label *on_not_smi_result, Label::Distance near_jump=Label::kFar)
void SmiTryAddConstant(Register dst, Register src, Smi *constant, Label *on_not_smi_result, Label::Distance near_jump=Label::kFar)
MemOperand ContextOperand(Register context, int index)
void CheckMap(Register obj, Register scratch, Handle< Map > map, Label *fail, SmiCheckType smi_check_type, CompareMapMode mode=REQUIRE_EXACT_MAP)
void SmiMod(Register dst, Register src1, Register src2, Label *on_not_smi_result, Label::Distance near_jump=Label::kFar)
Condition CheckBothSmi(Register first, Register second)
void AssertSmi(Register object)
void CompareRoot(Register obj, Heap::RootListIndex index)
void PushHeapObject(Handle< HeapObject > object)
void DecrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
static int SafepointRegisterStackIndex(Register reg)
static const int kNumRegisters
void Move(Register dst, Smi *source)
const Register kRootRegister
void PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg)
void JumpIfInNewSpace(Register object, Register scratch, Label *branch, Label::Distance distance=Label::kFar)
void JumpIfInstanceTypeIsNotSequentialAscii(Register type, Register scratch, Label *failure)
int CallSize(const Operand &target)
void SmiToInteger64(Register dst, Register src)
void EnumLength(Register dst, Register map)
void Load(Register destination, ExternalReference source)
void LeaveFrame(StackFrame::Type type)
void CheckFastElements(Register map, Register scratch, Label *fail)
void LoadGlobalFunction(int index, Register function)
void CheckPageFlag(Register object, Register scratch, int mask, Condition cc, Label *condition_met)
void Integer64PlusConstantToSmi(Register dst, Register src, int constant)
void TryGetFunctionPrototype(Register function, Register result, Register scratch, Label *miss, bool miss_on_bound_function=false)
void SmiMul(Register dst, Register src1, Register src2, Label *on_not_smi_result, Label::Distance near_jump=Label::kFar)
void SmiShiftLogicalRight(Register dst, Register src1, Register src2, Label *on_not_smi_result, Label::Distance near_jump=Label::kFar)
void CallCFunction(ExternalReference function, int num_arguments)
Condition IsObjectStringType(Register obj, Register type)
void SafePush(const Immediate &x)
void DecodeField(Register reg)
void AllocateAsciiConsString(Register result, Register length, Register scratch1, Register scratch2, Label *gc_required)
Operand FieldOperand(Register object, int offset)
void SmiShiftLeftConstant(Register dst, Register src, int shift_value)
void CheckFastSmiElements(Register map, Register scratch, Label *fail)
void Jump(Register target, Condition cond=al)
void RecordWrite(Register object, Register address, Register value, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action=EMIT_REMEMBERED_SET, SmiCheck smi_check=INLINE_SMI_CHECK)
void JumpIfDataObject(Register value, Register scratch, Label *not_data_object)
void SmiXorConstant(Register dst, Register src1, Smi *constant)
Operand ExternalOperand(ExternalReference reference, Register scratch=kScratchRegister)
void AllocateHeapNumber(Register result, Register scratch1, Register scratch2, Register heap_number_map, Label *gc_required, TaggingMode tagging_mode=TAG_RESULT)
void CopyBytes(Register src, Register dst, Register length, Register scratch)
void SmiXor(Register dst, Register src1, Register src2)
void LoadHeapObject(Register dst, Handle< HeapObject > object)
void SmiShiftLeft(Register dst, Register src1, Register src2)
void Throw(Register value)
Condition CheckInteger32ValidSmiValue(Register src)
void Move(Register dst, Handle< Object > value)
void shl(Register dst, uint8_t imm8)
Operand StackSpaceOperand(int index)
void EnterApiExitFrame(int argc)
void PrepareCallApiFunction(int argc)
void set_has_frame(bool value)
void SetCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
SmiIndex(Register index_register, ScaleFactor scale)
void InvokeCode(Register code, const ParameterCount &expected, const ParameterCount &actual, InvokeFlag flag, const CallWrapper &call_wrapper, CallKind call_kind)
void NegativeZeroTest(Register result, Register op, Label *then_label)
const int kRootRegisterBias
MacroAssembler(Isolate *isolate, void *buffer, int size)
void SmiCompareInteger32(const Operand &dst, Register src)
void LoadContext(Register dst, int context_chain_length)
void CallExternalReference(const ExternalReference &ext, int num_arguments)
static int CallSize(Register target, Condition cond=al)
void StoreNumberToDoubleElements(Register value_reg, Register key_reg, Register receiver_reg, Register elements_reg, Register scratch1, Register scratch2, Register scratch3, Register scratch4, Label *fail)
void JumpIfNotInNewSpace(Register object, Register scratch, Label *branch, Label::Distance distance=Label::kFar)
void AssertFastElements(Register elements)
void LoadAddress(Register destination, ExternalReference source)
bool IsUnsafeInt(const int x)
void JumpIfNotBothSmi(Register reg1, Register reg2, Label *on_not_both_smi)
void JumpIfBlack(Register object, Register scratch0, Register scratch1, Label *on_black)
void SmiSubConstant(Register dst, Register src, Smi *constant)
void AllocateTwoByteConsString(Register result, Register length, Register scratch1, Register scratch2, Label *gc_required)
void SmiAnd(Register dst, Register src1, Register src2)
void Drop(int count, Condition cond=al)
void Integer32ToSmiField(const Operand &dst, Register src)
void JumpIfUIntNotValidSmiValue(Register src, Label *on_invalid, Label::Distance near_jump=Label::kFar)
int CallSize(Address destination, RelocInfo::Mode rmode)
void GetBuiltinFunction(Register target, Builtins::JavaScript id)
void IllegalOperation(int num_arguments)
void CheckAccessGlobalProxy(Register holder_reg, Register scratch, Label *miss)
void CallApiFunctionAndReturn(ExternalReference function, int stack_space)
NoRootArrayScope(MacroAssembler *assembler)
void AssertZeroExtended(Register reg)
void LoadObject(Register result, Handle< Object > object)
void SmiTest(Register src)
void SmiShiftArithmeticRightConstant(Register dst, Register src, int shift_value)
void InitializeSmiConstantRegister()
void CallRuntimeSaveDoubles(Runtime::FunctionId id)
void ThrowUncatchable(Register value)
void StoreRoot(Register source, Heap::RootListIndex index, Condition cond=al)
void AllocateInNewSpace(int object_size, Register result, Register scratch1, Register scratch2, Label *gc_required, AllocationFlags flags)
void SmiCompare(Register smi1, Register smi2)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers, Register scratch)
void CompareMap(Register obj, Register scratch, Handle< Map > map, Label *early_success, CompareMapMode mode=REQUIRE_EXACT_MAP)
const int kNumSafepointRegisters
void PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg)
const Register kScratchRegister
void LoadGlobalFunctionInitialMap(Register function, Register map, Register scratch)
void GetNumberHash(Register t0, Register scratch)
void InvokeFunction(Register function, const ParameterCount &actual, InvokeFlag flag, const CallWrapper &call_wrapper, CallKind call_kind)
Handle< Object > CodeObject()
void Store(ExternalReference destination, Register source)
void PositiveSmiDivPowerOfTwoToInteger32(Register dst, Register src, int power)
void JumpIfBothInstanceTypesAreNotSequentialAscii(Register first_object_instance_type, Register second_object_instance_type, Register scratch1, Register scratch2, Label *failure)
void SmiToInteger32(Register dst, Register src)
void UndoAllocationInNewSpace(Register object, Register scratch)
void SmiNot(Register dst, Register src)
void LoadFromSafepointRegisterSlot(Register dst, Register src)
void shr(Register dst, uint8_t imm8)
void Call(Register target, Condition cond=al)
void Set(Register dst, const Immediate &x)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage message
void SmiShiftLogicalRightConstant(Register dst, Register src, int shift_value, Label *on_not_smi_result, Label::Distance near_jump=Label::kFar)
void AllocateAsciiSlicedString(Register result, Register length, Register scratch1, Register scratch2, Label *gc_required)
Condition CheckNonNegativeSmi(Register src)
void set_generating_stub(bool value)
void Check(Condition cond, const char *msg)
void SmiAddConstant(Register dst, Register src, Smi *constant)
void LoadInitialArrayMap(Register function_in, Register scratch, Register map_out, bool can_have_holes)
void JumpIfNotSmi(Register value, Label *not_smi_label)
void AssertRootValue(Register src, Heap::RootListIndex root_value_index, const char *message)
Condition CheckEitherSmi(Register first, Register second, Register scratch=kScratchRegister)
void SmiNeg(Register dst, Register src, Label *on_smi_result, Label::Distance near_jump=Label::kFar)
const int kSmiConstantRegisterValue
void InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag, const CallWrapper &call_wrapper=NullCallWrapper())
void AddSmiField(Register dst, const Operand &src)
void TestBit(const Operand &dst, int bit_index)
Condition CheckSmi(Register src)
void SelectNonSmi(Register dst, Register src1, Register src2, Label *on_not_smis, Label::Distance near_jump=Label::kFar)
void TailCallStub(CodeStub *stub, Condition cond=al)
CodePatcher(byte *address, int instructions)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
void EnsureNotWhite(Register object, Register scratch1, Register scratch2, Register scratch3, Label *object_is_white_and_not_data)
void InitializeRootRegister()
void and_(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void Integer32ToSmi(Register dst, Register src)
const Register kSmiConstantRegister
void CallStub(CodeStub *stub, Condition cond=al)
void IndexFromHash(Register hash, Register index)
void TailCallExternalReference(const ExternalReference &ext, int num_arguments, int result_size)
Condition CheckBothNonNegativeSmi(Register first, Register second)
int ArgumentStackSlotsForCFunctionCall(int num_arguments)
void EnterExitFrame(bool save_doubles, int stack_space=0)
void InitializeFieldsWithFiller(Register start_offset, Register end_offset, Register filler)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
int CallSize(Handle< Code > code_object)
void TailCallRuntime(Runtime::FunctionId fid, int num_arguments, int result_size)
void SetCallKind(Register dst, CallKind kind)
void AllocateTwoByteString(Register result, Register length, Register scratch1, Register scratch2, Register scratch3, Label *gc_required)
void LoadRoot(Register destination, Heap::RootListIndex index, Condition cond=al)
void RememberedSetHelper(Register object, Register addr, Register scratch, SaveFPRegsMode save_fp, RememberedSetFinalAction and_then)
void JumpUnlessBothNonNegativeSmi(Register src1, Register src2, Label *on_not_both_smi, Label::Distance near_jump=Label::kFar)
void PositiveSmiTimesPowerOfTwoToInteger64(Register dst, Register src, int power)
void Test(const Operand &dst, Smi *source)
void PushRoot(Heap::RootListIndex index)
void CmpObjectType(Register heap_object, InstanceType type, Register map)
void CmpInstanceType(Register map, InstanceType type)
void JumpUnlessNonNegativeSmi(Register src, Label *on_not_smi, Label::Distance near_jump=Label::kFar)
void EnterFrame(StackFrame::Type type)
void movq(const Operand &dst, Register src)
void PushAddress(ExternalReference source)
void CheckEnumCache(Register null_value, Label *call_runtime)
void PopSafepointRegisters()
void StubReturn(int argc)
void AssertNumber(Register object)
void Move(const Operand &dst, Smi *source)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag