28 #ifndef V8_IA32_MACRO_ASSEMBLER_IA32_H_
29 #define V8_IA32_MACRO_ASSEMBLER_IA32_H_
62 class MacroAssembler:
public Assembler {
91 Label::Distance condition_met_distance = Label::kFar);
98 Label::Distance condition_met_distance = Label::kFar);
105 Label::Distance distance = Label::kFar) {
106 InNewSpace(
object, scratch,
zero, branch, distance);
114 Label::Distance distance = Label::kFar) {
115 InNewSpace(
object, scratch,
not_zero, branch, distance);
123 Label::Distance has_color_distance,
131 Label::Distance on_black_distance = Label::kFar);
141 Label* object_is_white_and_not_data,
142 Label::Distance distance);
173 remembered_set_action,
214 #ifdef ENABLE_DEBUGGER_SUPPORT
250 Label* no_map_match);
256 bool can_have_holes);
278 if (object->IsHeapObject()) {
281 Set(result, Immediate(
object));
295 const ParameterCount& expected,
296 const ParameterCount& actual,
300 InvokeCode(Operand(code), expected, actual, flag, call_wrapper, call_kind);
304 const ParameterCount& expected,
305 const ParameterCount& actual,
311 const ParameterCount& expected,
312 const ParameterCount& actual,
313 RelocInfo::Mode rmode,
321 const ParameterCount& actual,
327 const ParameterCount& actual,
346 void Set(
const Operand& dst,
const Immediate& x);
368 Label::Distance distance = Label::kFar);
374 Label::Distance distance = Label::kFar);
380 Label::Distance distance = Label::kFar);
391 bool specialize_for_processor);
399 Label* early_success,
475 Label::Distance distance = Label::kFar) {
477 j(
zero, smi_label, distance);
482 Label::Distance distance = Label::kFar) {
484 j(
zero, smi_label, distance);
488 Label* not_smi_label,
489 Label::Distance distance = Label::kFar) {
498 template<
typename Field>
500 static const int shift = Field::kShift;
501 static const int mask = (Field::kMask >> Field::kShift) <<
kSmiTagSize;
503 and_(reg, Immediate(mask));
675 Register scratch, Label* then_label);
686 bool miss_on_bound_function =
false);
746 void CallCFunction(ExternalReference
function,
int num_arguments);
776 void Drop(
int element_count);
783 call(
self, RelocInfo::CODE_TARGET);
821 void Abort(
const char* msg);
843 Label* on_not_flat_ascii_string);
851 Label* on_not_flat_ascii_strings);
854 return SafepointRegisterStackIndex(reg.
code());
866 bool generating_stub_;
867 bool allow_stub_calls_;
873 void InvokePrologue(
const ParameterCount& expected,
874 const ParameterCount& actual,
876 const Operand& code_operand,
878 bool* definitely_mismatches,
880 Label::Distance done_distance,
884 void EnterExitFramePrologue();
885 void EnterExitFrameEpilogue(
int argc,
bool save_doubles);
887 void LeaveExitFrameEpilogue();
890 void LoadAllocationTopHelper(
Register result,
906 Label* condition_met,
907 Label::Distance condition_met_distance = Label::kFar);
913 inline void GetMarkBits(
Register addr_reg,
919 void JumpToHandlerEntry();
922 Operand SafepointRegisterSlot(
Register reg);
923 static int SafepointRegisterStackIndex(
int reg_code);
983 #ifdef GENERATED_CODE_COVERAGE
984 extern void LogGeneratedCodeCoverage(
const char* file_line);
985 #define CODE_COVERAGE_STRINGIFY(x) #x
986 #define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
987 #define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
988 #define ACCESS_MASM(masm) { \
989 byte* ia32_coverage_function = \
990 reinterpret_cast<byte*>(FUNCTION_ADDR(LogGeneratedCodeCoverage)); \
993 masm->push(Immediate(reinterpret_cast<int>(&__FILE_LINE__))); \
994 masm->call(ia32_coverage_function, RelocInfo::RUNTIME_ENTRY); \
1001 #define ACCESS_MASM(masm) masm->
1007 #endif // V8_IA32_MACRO_ASSEMBLER_IA32_H_
void CallRuntime(const Runtime::Function *f, int num_arguments)
void ClampDoubleToUint8(Register result_reg, DoubleRegister input_reg, DoubleRegister temp_double_reg)
void LoadUint32(XMMRegister dst, Register src, XMMRegister scratch)
void Push(Handle< Object > handle)
void ClampUint8(Register output_reg, Register input_reg)
void LoadPowerOf2(XMMRegister dst, Register scratch, int power)
void SmiUntag(Register reg, Label *is_smi)
Isolate * isolate() const
const intptr_t kSmiTagMask
void PushSafepointRegisters()
void Assert(Condition cond, const char *msg)
void CheckStackAlignment()
static int SlotOffset(int index)
void SmiUntag(Register reg)
void RecordWriteContextSlot(Register context, int offset, Register value, Register scratch, SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action=EMIT_REMEMBERED_SET, SmiCheck smi_check=INLINE_SMI_CHECK)
void AllocateTwoByteSlicedString(Register result, Register length, Register scratch1, Register scratch2, Label *gc_required)
void LeaveExitFrame(bool save_doubles, Register argument_count)
void AssertString(Register object)
static TypeFeedbackId None()
void RecordWriteArray(Register array, Register value, Register index, SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action=EMIT_REMEMBERED_SET, SmiCheck smi_check=INLINE_SMI_CHECK)
void JumpToExternalReference(const ExternalReference &builtin)
void LoadInstanceDescriptors(Register map, Register descriptors)
void AllocateAsciiString(Register result, Register length, Register scratch1, Register scratch2, Register scratch3, Label *gc_required)
void JumpIfNotSmi(Register value, Label *not_smi_label, Label::Distance distance=Label::kFar)
void JumpIfNotBothSequentialAsciiStrings(Register first, Register second, Register scratch1, Register scratch2, Label *not_flat_ascii_strings)
void LoadFromNumberDictionary(Label *miss, Register elements, Register key, Register result, Register t0, Register t1, Register t2)
void GetBuiltinEntry(Register target, Builtins::JavaScript id)
void DispatchMap(Register obj, Register scratch, Handle< Map > map, Handle< Code > success, SmiCheckType smi_check_type)
bool AllowThisStubCall(CodeStub *stub)
void StoreToSafepointRegisterSlot(Register src, Register dst)
void set_allow_stub_calls(bool value)
void CheckFastObjectElements(Register map, Register scratch, Label *fail)
void j(Condition cc, Label *L, Label::Distance distance=Label::kFar)
bool AreAliased(Register r1, Register r2, Register r3, Register r4)
#define ASSERT(condition)
void RecordWriteForMap(Register object, Handle< Map > map, Register scratch1, Register scratch2, SaveFPRegsMode save_fp)
void AssertNotSmi(Register object)
void RecordWriteField(Register object, int offset, Register value, Register scratch, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action=EMIT_REMEMBERED_SET, SmiCheck smi_check=INLINE_SMI_CHECK)
void PushTryHandler(StackHandler::Kind kind, int handler_index)
void LoadTransitionedArrayMapConditional(ElementsKind expected_kind, ElementsKind transitioned_kind, Register map_in_out, Register scratch, Label *no_map_match)
void CheckPageFlagForMap(Handle< Map > map, int mask, Condition cc, Label *condition_met, Label::Distance condition_met_distance=Label::kFar)
void NumberOfOwnDescriptors(Register dst, Register map)
MemOperand GlobalObjectOperand()
void IncrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void Abort(const char *msg)
void sar(Register dst, uint8_t imm8)
MemOperand ContextOperand(Register context, int index)
void CheckMap(Register obj, Register scratch, Handle< Map > map, Label *fail, SmiCheckType smi_check_type, CompareMapMode mode=REQUIRE_EXACT_MAP)
void AssertSmi(Register object)
void CompareRoot(Register obj, Heap::RootListIndex index)
void PushHeapObject(Handle< HeapObject > object)
void DecrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
static int SafepointRegisterStackIndex(Register reg)
void JumpIfInNewSpace(Register object, Register scratch, Label *branch, Label::Distance distance=Label::kFar)
void JumpIfInstanceTypeIsNotSequentialAscii(Register type, Register scratch, Label *failure)
void IsObjectJSObjectType(Register heap_object, Register map, Register scratch, Label *fail)
void EnumLength(Register dst, Register map)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
void LeaveFrame(StackFrame::Type type)
void CheckFastElements(Register map, Register scratch, Label *fail)
void LoadGlobalFunction(int index, Register function)
void CheckPageFlag(Register object, Register scratch, int mask, Condition cc, Label *condition_met)
void TryGetFunctionPrototype(Register function, Register result, Register scratch, Label *miss, bool miss_on_bound_function=false)
void CallCFunction(ExternalReference function, int num_arguments)
Condition IsObjectStringType(Register obj, Register type)
void SafePush(const Immediate &x)
void DecodeField(Register reg)
void AllocateAsciiConsString(Register result, Register length, Register scratch1, Register scratch2, Label *gc_required)
Operand FieldOperand(Register object, int offset)
void IsInstanceJSObjectType(Register map, Register scratch, Label *fail)
void CheckFastSmiElements(Register map, Register scratch, Label *fail)
void RecordWrite(Register object, Register address, Register value, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action=EMIT_REMEMBERED_SET, SmiCheck smi_check=INLINE_SMI_CHECK)
bool IsUnsafeImmediate(const Immediate &x)
void AllocateHeapNumber(Register result, Register scratch1, Register scratch2, Register heap_number_map, Label *gc_required, TaggingMode tagging_mode=TAG_RESULT)
void CopyBytes(Register src, Register dst, Register length, Register scratch)
void LoadHeapObject(Register dst, Handle< HeapObject > object)
void Throw(Register value)
void Move(Register dst, Handle< Object > value)
void SafeSet(Register dst, const Immediate &x)
void EnterApiExitFrame(int argc)
void PrepareCallApiFunction(int argc)
void set_has_frame(bool value)
void SetCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void InvokeCode(Register code, const ParameterCount &expected, const ParameterCount &actual, InvokeFlag flag, const CallWrapper &call_wrapper, CallKind call_kind)
void NegativeZeroTest(Register result, Register op, Label *then_label)
MacroAssembler(Isolate *isolate, void *buffer, int size)
void LoadContext(Register dst, int context_chain_length)
void CallExternalReference(const ExternalReference &ext, int num_arguments)
void StoreNumberToDoubleElements(Register value_reg, Register key_reg, Register receiver_reg, Register elements_reg, Register scratch1, Register scratch2, Register scratch3, Register scratch4, Label *fail)
void JumpIfNotInNewSpace(Register object, Register scratch, Label *branch, Label::Distance distance=Label::kFar)
void AssertFastElements(Register elements)
void add(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void JumpIfBlack(Register object, Register scratch0, Register scratch1, Label *on_black)
void AllocateTwoByteConsString(Register result, Register length, Register scratch1, Register scratch2, Label *gc_required)
void Drop(int count, Condition cond=al)
void GetBuiltinFunction(Register target, Builtins::JavaScript id)
void IllegalOperation(int num_arguments)
void CheckAccessGlobalProxy(Register holder_reg, Register scratch, Label *miss)
void CallApiFunctionAndReturn(ExternalReference function, int stack_space)
void LoadObject(Register result, Handle< Object > object)
void SmiTag(Register reg)
void CallRuntimeSaveDoubles(Runtime::FunctionId id)
void ThrowUncatchable(Register value)
void AllocateInNewSpace(int object_size, Register result, Register scratch1, Register scratch2, Label *gc_required, AllocationFlags flags)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers, Register scratch)
void CompareMap(Register obj, Register scratch, Handle< Map > map, Label *early_success, CompareMapMode mode=REQUIRE_EXACT_MAP)
void LoadGlobalFunctionInitialMap(Register function, Register map, Register scratch)
void GetNumberHash(Register t0, Register scratch)
void InvokeFunction(Register function, const ParameterCount &actual, InvokeFlag flag, const CallWrapper &call_wrapper, CallKind call_kind)
void JumpIfSmi(Operand value, Label *smi_label, Label::Distance distance=Label::kFar)
Handle< Object > CodeObject()
Operand ApiParameterOperand(int index)
void UndoAllocationInNewSpace(Register object, Register scratch)
void LoadFromSafepointRegisterSlot(Register dst, Register src)
void test(Register reg, const Immediate &imm)
void Set(Register dst, const Immediate &x)
void AllocateAsciiSlicedString(Register result, Register length, Register scratch1, Register scratch2, Label *gc_required)
void set_generating_stub(bool value)
void Check(Condition cond, const char *msg)
void LoadInitialArrayMap(Register function_in, Register scratch, Register map_out, bool can_have_holes)
void InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag, const CallWrapper &call_wrapper=NullCallWrapper())
void TailCallStub(CodeStub *stub, Condition cond=al)
CodePatcher(byte *address, int instructions)
void BooleanBitTest(Register object, int field_offset, int bit_index)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
void EnsureNotWhite(Register object, Register scratch1, Register scratch2, Register scratch3, Label *object_is_white_and_not_data)
void and_(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void CallStub(CodeStub *stub, Condition cond=al)
void IndexFromHash(Register hash, Register index)
void TailCallExternalReference(const ExternalReference &ext, int num_arguments, int result_size)
void EnterExitFrame(bool save_doubles, int stack_space=0)
void InitializeFieldsWithFiller(Register start_offset, Register end_offset, Register filler)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
void TailCallRuntime(Runtime::FunctionId fid, int num_arguments, int result_size)
void SetCallKind(Register dst, CallKind kind)
void AllocateTwoByteString(Register result, Register length, Register scratch1, Register scratch2, Register scratch3, Label *gc_required)
void RememberedSetHelper(Register object, Register addr, Register scratch, SaveFPRegsMode save_fp, RememberedSetFinalAction and_then)
void HasColor(Register object, Register scratch0, Register scratch1, Label *has_color, int first_bit, int second_bit)
void CmpObjectType(Register heap_object, InstanceType type, Register map)
void CmpInstanceType(Register map, InstanceType type)
void EnterFrame(StackFrame::Type type)
void JumpIfSmi(Register value, Label *smi_label, Label::Distance distance=Label::kFar)
void CheckEnumCache(Register null_value, Label *call_runtime)
void PopSafepointRegisters()
void StubReturn(int argc)
void AssertNumber(Register object)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag