32 #if V8_TARGET_ARCH_IA32
35 #elif V8_TARGET_ARCH_X64
38 #elif V8_TARGET_ARCH_ARM
41 #elif V8_TARGET_ARCH_MIPS
45 #error "Unknown architecture."
61 switch (unalloc->
policy()) {
65 const char* register_name =
67 stream->
Add(
"(=%s)", register_name);
71 const char* double_register_name =
73 stream->
Add(
"(=%s)", double_register_name);
94 stream->
Add(
"[constant:%d]",
index());
100 stream->
Add(
"[double_stack:%d]",
index());
114 #define DEFINE_OPERAND_CACHE(name, type) \
115 L##name* L##name::cache = NULL; \
117 void L##name::SetUpCache() { \
119 cache = new L##name[kNumCachedOperands]; \
120 for (int i = 0; i < kNumCachedOperands; i++) { \
121 cache[i].ConvertTo(type, i); \
125 void L##name::TearDownCache() { \
130 #undef DEFINE_OPERAND_CACHE
133 #define LITHIUM_OPERAND_SETUP(name, type) L##name::SetUpCache();
135 #undef LITHIUM_OPERAND_SETUP
140 #define LITHIUM_OPERAND_TEARDOWN(name, type) L##name::TearDownCache();
142 #undef LITHIUM_OPERAND_TEARDOWN
147 for (
int i = 0; i < move_operands_.length(); ++i) {
148 if (!move_operands_[i].
IsRedundant())
return false;
156 for (
int i = 0; i < move_operands_.length(); ++i) {
157 if (!move_operands_[i].IsEliminated()) {
158 LOperand* source = move_operands_[i].source();
159 LOperand* destination = move_operands_[i].destination();
160 if (!first) stream->
Add(
" ");
162 if (source->
Equals(destination)) {
176 stream->
Add(
"[id=%d|",
ast_id().ToInt());
179 for (
int i = 0; i < values_.length(); ++i) {
180 if (i != 0) stream->
Add(
";");
181 if (values_[i] ==
NULL) {
182 stream->
Add(
"[hole]");
184 values_[i]->PrintTo(stream);
193 if (op->IsStackSlot() && op->
index() < 0)
return;
194 ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
195 pointer_operands_.Add(op, zone);
201 if (op->IsStackSlot() && op->
index() < 0)
return;
202 ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
203 for (
int i = 0; i < pointer_operands_.length(); ++i) {
204 if (pointer_operands_[i]->Equals(op)) {
205 pointer_operands_.Remove(i);
214 if (op->IsStackSlot() && op->
index() < 0)
return;
215 ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
216 untagged_operands_.Add(op, zone);
222 for (
int i = 0; i < pointer_operands_.length(); ++i) {
223 if (i != 0) stream->
Add(
";");
224 pointer_operands_[i]->PrintTo(stream);
231 switch (elements_kind) {
278 return label->
label();
282 HPhase phase(
"L_Mark empty blocks",
this);
283 for (
int i = 0; i <
graph()->
blocks()->length(); ++i) {
291 if (last_instr->IsGoto()) {
292 LGoto* goto_instr = LGoto::cast(last_instr);
295 bool can_eliminate =
true;
296 for (
int i = first + 1; i < last && can_eliminate; ++i) {
301 can_eliminate =
false;
304 can_eliminate =
false;
321 instructions_.Add(gap,
zone());
322 index = instructions_.length();
323 instructions_.Add(instr,
zone());
325 index = instructions_.length();
326 instructions_.Add(instr,
zone());
327 instructions_.Add(gap,
zone());
355 return (1 +
info()->scope()->num_parameters() - index) *
366 return instructions_[index]->IsGap();
371 while (!
IsGapAt(index)) index--;
394 NoHandleAllocation no_handles;
403 LAllocator allocator(values, graph);
404 LChunkBuilder builder(info, graph, &allocator);
405 LChunk* chunk = builder.Build();
408 if (!allocator.Allocate(chunk)) {
423 if (generator.GenerateCode()) {
424 if (FLAG_trace_codegen) {
425 PrintF(
"Crankshaft Compiler - ");
431 generator.FinishCode(code);
HValue * LookupValue(int id) const
static LUnallocated * cast(LOperand *op)
static LGap * cast(LInstruction *instr)
static LConstantOperand * Create(int index, Zone *zone)
#define DEFINE_OPERAND_CACHE(name, type)
int GetParameterStackSlot(int index) const
void PrintF(const char *format,...)
static void TearDownCaches()
LParallelMove * GetOrCreateParallelMove(InnerPosition pos, Zone *zone)
int ParameterAt(int index)
void RemovePointer(LOperand *op)
LLabel * GetLabel(int block_id) const
#define ASSERT(condition)
const int kPointerSizeLog2
static void SetUpCaches()
int GetMaximumValueID() const
void PrintTo(StringStream *stream)
bool Equals(LOperand *other) const
static const char * AllocationIndexToString(int index)
void RecordUntagged(LOperand *op, Zone *zone)
int last_instruction_index() const
LGap * GetGapAt(int index) const
static void PrintCode(Handle< Code > code, CompilationInfo *info)
void Add(Vector< const char > format, Vector< FmtElm > elms)
int virtual_register() const
void PrintTo(StringStream *stream)
static const int kMaxVirtualRegisters
LLabel * replacement() const
bool is_loop_header() const
void AddMove(LOperand *from, LOperand *to, Zone *zone)
void RecordPointer(LOperand *op, Zone *zone)
#define LITHIUM_OPERAND_TEARDOWN(name, type)
CompilationInfo * info() const
static LChunk * NewChunk(HGraph *graph)
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
int num_parameters() const
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
int ElementsKindToShiftSize(ElementsKind elements_kind)
bool IsGapAt(int index) const
virtual bool IsControl() const
LPointerMap * pointer_map() const
const ZoneList< HBasicBlock * > * blocks() const
int first_instruction_index() const
int parameter_count() const
virtual bool IsGap() const
void AddInstruction(LInstruction *instruction, HBasicBlock *block)
void set_bailout_reason(const char *reason)
void PrintDataTo(StringStream *stream) const
CompilationInfo * info() const
static Handle< T > null()
void AddGapMove(int index, LOperand *from, LOperand *to)
LConstantOperand * DefineConstantOperand(HConstant *constant)
void set_replacement(LLabel *label)
static const char * AllocationIndexToString(int index)
Representation LookupLiteralRepresentation(LConstantOperand *operand) const
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Label * GetAssemblyLabel(int block_id) const
bool HasPointerMap() const
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
int NearestGapPos(int index) const
int LookupDestination(int block_id) const
static Handle< Code > MakeCodeEpilogue(MacroAssembler *masm, Code::Flags flags, CompilationInfo *info)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
#define LITHIUM_OPERAND_LIST(V)
void set_lithium_position(int pos)
static HValue * cast(HValue *value)
static void MakeCodePrologue(CompilationInfo *info)
#define LITHIUM_OPERAND_SETUP(name, type)
HConstant * LookupConstant(LConstantOperand *operand) const
bool HasReplacement() const
void PrintTo(StringStream *stream)
int arguments_stack_height() const
const ZoneList< LInstruction * > * instructions() const