v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
lithium.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 #include "lithium.h"
30 #include "scopes.h"
31 
32 #if V8_TARGET_ARCH_IA32
33 #include "ia32/lithium-ia32.h"
35 #elif V8_TARGET_ARCH_X64
36 #include "x64/lithium-x64.h"
38 #elif V8_TARGET_ARCH_ARM
39 #include "arm/lithium-arm.h"
41 #elif V8_TARGET_ARCH_MIPS
42 #include "mips/lithium-mips.h"
44 #else
45 #error "Unknown architecture."
46 #endif
47 
48 namespace v8 {
49 namespace internal {
50 
51 
53  LUnallocated* unalloc = NULL;
54  switch (kind()) {
55  case INVALID:
56  stream->Add("(0)");
57  break;
58  case UNALLOCATED:
59  unalloc = LUnallocated::cast(this);
60  stream->Add("v%d", unalloc->virtual_register());
61  switch (unalloc->policy()) {
62  case LUnallocated::NONE:
63  break;
65  const char* register_name =
67  stream->Add("(=%s)", register_name);
68  break;
69  }
71  const char* double_register_name =
73  stream->Add("(=%s)", double_register_name);
74  break;
75  }
77  stream->Add("(=%dS)", unalloc->fixed_index());
78  break;
80  stream->Add("(R)");
81  break;
83  stream->Add("(WR)");
84  break;
86  stream->Add("(1)");
87  break;
88  case LUnallocated::ANY:
89  stream->Add("(-)");
90  break;
91  }
92  break;
93  case CONSTANT_OPERAND:
94  stream->Add("[constant:%d]", index());
95  break;
96  case STACK_SLOT:
97  stream->Add("[stack:%d]", index());
98  break;
99  case DOUBLE_STACK_SLOT:
100  stream->Add("[double_stack:%d]", index());
101  break;
102  case REGISTER:
103  stream->Add("[%s|R]", Register::AllocationIndexToString(index()));
104  break;
105  case DOUBLE_REGISTER:
106  stream->Add("[%s|R]", DoubleRegister::AllocationIndexToString(index()));
107  break;
108  case ARGUMENT:
109  stream->Add("[arg:%d]", index());
110  break;
111  }
112 }
113 
114 #define DEFINE_OPERAND_CACHE(name, type) \
115  L##name* L##name::cache = NULL; \
116  \
117  void L##name::SetUpCache() { \
118  if (cache) return; \
119  cache = new L##name[kNumCachedOperands]; \
120  for (int i = 0; i < kNumCachedOperands; i++) { \
121  cache[i].ConvertTo(type, i); \
122  } \
123  } \
124  \
125  void L##name::TearDownCache() { \
126  delete[] cache; \
127  }
128 
130 #undef DEFINE_OPERAND_CACHE
131 
133 #define LITHIUM_OPERAND_SETUP(name, type) L##name::SetUpCache();
135 #undef LITHIUM_OPERAND_SETUP
136 }
137 
138 
140 #define LITHIUM_OPERAND_TEARDOWN(name, type) L##name::TearDownCache();
142 #undef LITHIUM_OPERAND_TEARDOWN
143 }
144 
145 
147  for (int i = 0; i < move_operands_.length(); ++i) {
148  if (!move_operands_[i].IsRedundant()) return false;
149  }
150  return true;
151 }
152 
153 
155  bool first = true;
156  for (int i = 0; i < move_operands_.length(); ++i) {
157  if (!move_operands_[i].IsEliminated()) {
158  LOperand* source = move_operands_[i].source();
159  LOperand* destination = move_operands_[i].destination();
160  if (!first) stream->Add(" ");
161  first = false;
162  if (source->Equals(destination)) {
163  destination->PrintTo(stream);
164  } else {
165  destination->PrintTo(stream);
166  stream->Add(" = ");
167  source->PrintTo(stream);
168  }
169  stream->Add(";");
170  }
171  }
172 }
173 
174 
176  stream->Add("[id=%d|", ast_id().ToInt());
177  stream->Add("[parameters=%d|", parameter_count());
178  stream->Add("[arguments_stack_height=%d|", arguments_stack_height());
179  for (int i = 0; i < values_.length(); ++i) {
180  if (i != 0) stream->Add(";");
181  if (values_[i] == NULL) {
182  stream->Add("[hole]");
183  } else {
184  values_[i]->PrintTo(stream);
185  }
186  }
187  stream->Add("]");
188 }
189 
190 
192  // Do not record arguments as pointers.
193  if (op->IsStackSlot() && op->index() < 0) return;
194  ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
195  pointer_operands_.Add(op, zone);
196 }
197 
198 
200  // Do not record arguments as pointers.
201  if (op->IsStackSlot() && op->index() < 0) return;
202  ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
203  for (int i = 0; i < pointer_operands_.length(); ++i) {
204  if (pointer_operands_[i]->Equals(op)) {
205  pointer_operands_.Remove(i);
206  --i;
207  }
208  }
209 }
210 
211 
213  // Do not record arguments as pointers.
214  if (op->IsStackSlot() && op->index() < 0) return;
215  ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
216  untagged_operands_.Add(op, zone);
217 }
218 
219 
221  stream->Add("{");
222  for (int i = 0; i < pointer_operands_.length(); ++i) {
223  if (i != 0) stream->Add(";");
224  pointer_operands_[i]->PrintTo(stream);
225  }
226  stream->Add("} @%d", position());
227 }
228 
229 
231  switch (elements_kind) {
235  return 0;
238  return 1;
242  return 2;
246  return 3;
247  case FAST_SMI_ELEMENTS:
248  case FAST_ELEMENTS:
250  case FAST_HOLEY_ELEMENTS:
251  case DICTIONARY_ELEMENTS:
253  return kPointerSizeLog2;
254  }
255  UNREACHABLE();
256  return 0;
257 }
258 
259 
260 LLabel* LChunk::GetLabel(int block_id) const {
261  HBasicBlock* block = graph_->blocks()->at(block_id);
262  int first_instruction = block->first_instruction_index();
263  return LLabel::cast(instructions_[first_instruction]);
264 }
265 
266 
267 int LChunk::LookupDestination(int block_id) const {
268  LLabel* cur = GetLabel(block_id);
269  while (cur->replacement() != NULL) {
270  cur = cur->replacement();
271  }
272  return cur->block_id();
273 }
274 
275 Label* LChunk::GetAssemblyLabel(int block_id) const {
276  LLabel* label = GetLabel(block_id);
277  ASSERT(!label->HasReplacement());
278  return label->label();
279 }
280 
282  HPhase phase("L_Mark empty blocks", this);
283  for (int i = 0; i < graph()->blocks()->length(); ++i) {
284  HBasicBlock* block = graph()->blocks()->at(i);
285  int first = block->first_instruction_index();
286  int last = block->last_instruction_index();
287  LInstruction* first_instr = instructions()->at(first);
288  LInstruction* last_instr = instructions()->at(last);
289 
290  LLabel* label = LLabel::cast(first_instr);
291  if (last_instr->IsGoto()) {
292  LGoto* goto_instr = LGoto::cast(last_instr);
293  if (label->IsRedundant() &&
294  !label->is_loop_header()) {
295  bool can_eliminate = true;
296  for (int i = first + 1; i < last && can_eliminate; ++i) {
297  LInstruction* cur = instructions()->at(i);
298  if (cur->IsGap()) {
299  LGap* gap = LGap::cast(cur);
300  if (!gap->IsRedundant()) {
301  can_eliminate = false;
302  }
303  } else {
304  can_eliminate = false;
305  }
306  }
307 
308  if (can_eliminate) {
309  label->set_replacement(GetLabel(goto_instr->block_id()));
310  }
311  }
312  }
313  }
314 }
315 
316 
318  LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
319  int index = -1;
320  if (instr->IsControl()) {
321  instructions_.Add(gap, zone());
322  index = instructions_.length();
323  instructions_.Add(instr, zone());
324  } else {
325  index = instructions_.length();
326  instructions_.Add(instr, zone());
327  instructions_.Add(gap, zone());
328  }
329  if (instr->HasPointerMap()) {
330  pointer_maps_.Add(instr->pointer_map(), zone());
331  instr->pointer_map()->set_lithium_position(index);
332  }
333 }
334 
335 
337  return LConstantOperand::Create(constant->id(), zone());
338 }
339 
340 
341 int LChunk::GetParameterStackSlot(int index) const {
342  // The receiver is at index 0, the first parameter at index 1, so we
343  // shift all parameter indexes down by the number of parameters, and
344  // make sure they end up negative so they are distinguishable from
345  // spill slots.
346  int result = index - info()->scope()->num_parameters() - 1;
347  ASSERT(result < 0);
348  return result;
349 }
350 
351 
352 // A parameter relative to ebp in the arguments stub.
353 int LChunk::ParameterAt(int index) {
354  ASSERT(-1 <= index); // -1 is the receiver.
355  return (1 + info()->scope()->num_parameters() - index) *
356  kPointerSize;
357 }
358 
359 
360 LGap* LChunk::GetGapAt(int index) const {
361  return LGap::cast(instructions_[index]);
362 }
363 
364 
365 bool LChunk::IsGapAt(int index) const {
366  return instructions_[index]->IsGap();
367 }
368 
369 
370 int LChunk::NearestGapPos(int index) const {
371  while (!IsGapAt(index)) index--;
372  return index;
373 }
374 
375 
376 void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
378  LGap::START, zone())->AddMove(from, to, zone());
379 }
380 
381 
383  return HConstant::cast(graph_->LookupValue(operand->index()));
384 }
385 
386 
388  LConstantOperand* operand) const {
389  return graph_->LookupValue(operand->index())->representation();
390 }
391 
392 
394  NoHandleAllocation no_handles;
395  AssertNoAllocation no_gc;
396 
397  int values = graph->GetMaximumValueID();
398  CompilationInfo* info = graph->info();
399  if (values > LUnallocated::kMaxVirtualRegisters) {
400  info->set_bailout_reason("not enough virtual registers for values");
401  return NULL;
402  }
403  LAllocator allocator(values, graph);
404  LChunkBuilder builder(info, graph, &allocator);
405  LChunk* chunk = builder.Build();
406  if (chunk == NULL) return NULL;
407 
408  if (!allocator.Allocate(chunk)) {
409  info->set_bailout_reason("not enough virtual registers (regalloc)");
410  return NULL;
411  }
412 
413  return chunk;
414 }
415 
416 
418  MacroAssembler assembler(info()->isolate(), NULL, 0);
419  LCodeGen generator(this, &assembler, info());
420 
421  MarkEmptyBlocks();
422 
423  if (generator.GenerateCode()) {
424  if (FLAG_trace_codegen) {
425  PrintF("Crankshaft Compiler - ");
426  }
428  Code::Flags flags = Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
430  CodeGenerator::MakeCodeEpilogue(&assembler, flags, info());
431  generator.FinishCode(code);
433  return code;
434  }
435  return Handle<Code>::null();
436 }
437 
438 
439 } } // namespace v8::internal
HValue * LookupValue(int id) const
Definition: hydrogen.h:309
int index() const
Definition: lithium.h:62
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:198
static LGap * cast(LInstruction *instr)
Definition: lithium-arm.h:327
static LConstantOperand * Create(int index, Zone *zone)
Definition: lithium.h:265
#define DEFINE_OPERAND_CACHE(name, type)
Definition: lithium.cc:114
int GetParameterStackSlot(int index) const
Definition: lithium.cc:341
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static void TearDownCaches()
Definition: lithium.cc:139
LParallelMove * GetOrCreateParallelMove(InnerPosition pos, Zone *zone)
Definition: lithium-arm.h:345
int ParameterAt(int index)
Definition: lithium.cc:353
void RemovePointer(LOperand *op)
Definition: lithium.cc:199
LLabel * GetLabel(int block_id) const
Definition: lithium.cc:260
Handle< Code > Codegen()
Definition: lithium.cc:417
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:232
static void SetUpCaches()
Definition: lithium.cc:132
int GetMaximumValueID() const
Definition: hydrogen.h:303
void PrintTo(StringStream *stream)
Definition: lithium.cc:220
int position() const
Definition: lithium.h:435
bool Equals(LOperand *other) const
Definition: lithium.h:70
static const char * AllocationIndexToString(int index)
void RecordUntagged(LOperand *op, Zone *zone)
Definition: lithium.cc:212
int last_instruction_index() const
Definition: hydrogen.h:90
LGap * GetGapAt(int index) const
Definition: lithium.cc:360
static void PrintCode(Handle< Code > code, CompilationInfo *info)
Definition: codegen.cc:115
void Add(Vector< const char > format, Vector< FmtElm > elms)
int virtual_register() const
Definition: lithium.h:184
#define UNREACHABLE()
Definition: checks.h:50
void PrintTo(StringStream *stream)
Definition: lithium.cc:175
static const int kMaxVirtualRegisters
Definition: lithium.h:158
Zone * zone() const
Definition: hydrogen.h:248
LLabel * replacement() const
Definition: lithium-arm.h:419
Policy policy() const
Definition: lithium.h:176
bool is_loop_header() const
Definition: lithium-arm.h:417
void AddMove(LOperand *from, LOperand *to, Zone *zone)
Definition: lithium.h:403
const int kPointerSize
Definition: globals.h:220
void RecordPointer(LOperand *op, Zone *zone)
Definition: lithium.cc:191
#define LITHIUM_OPERAND_TEARDOWN(name, type)
int block_id() const
Definition: lithium-arm.h:416
Kind kind() const
Definition: lithium.h:61
bool IsRedundant() const
Definition: lithium.cc:146
BailoutId ast_id() const
Definition: lithium.h:489
CompilationInfo * info() const
Definition: hydrogen.h:249
static LChunk * NewChunk(HGraph *graph)
Definition: lithium.cc:393
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:3491
Zone * zone() const
Definition: lithium.h:683
int num_parameters() const
Definition: scopes.h:336
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
int ElementsKindToShiftSize(ElementsKind elements_kind)
Definition: lithium.cc:230
bool IsGapAt(int index) const
Definition: lithium.cc:365
virtual bool IsControl() const
Definition: lithium-arm.h:241
LPointerMap * pointer_map() const
Definition: lithium-arm.h:248
const ZoneList< HBasicBlock * > * blocks() const
Definition: hydrogen.h:251
int first_instruction_index() const
Definition: hydrogen.h:86
int parameter_count() const
Definition: lithium.h:490
virtual bool IsGap() const
Definition: lithium-arm.h:239
void AddInstruction(LInstruction *instruction, HBasicBlock *block)
Definition: lithium.cc:317
HGraph * graph() const
Definition: lithium.h:663
void set_bailout_reason(const char *reason)
Definition: compiler.h:187
int block_id() const
Definition: lithium-arm.h:378
void PrintDataTo(StringStream *stream) const
Definition: lithium.cc:154
CompilationInfo * info() const
Definition: lithium.h:662
int fixed_index() const
Definition: lithium.h:180
static Handle< T > null()
Definition: handles.h:86
void AddGapMove(int index, LOperand *from, LOperand *to)
Definition: lithium.cc:376
LConstantOperand * DefineConstantOperand(HConstant *constant)
Definition: lithium.cc:336
void set_replacement(LLabel *label)
Definition: lithium-arm.h:420
void MarkEmptyBlocks()
Definition: lithium.cc:281
static const char * AllocationIndexToString(int index)
Definition: assembler-arm.h:87
Representation LookupLiteralRepresentation(LConstantOperand *operand) const
Definition: lithium.cc:387
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
Label * GetAssemblyLabel(int block_id) const
Definition: lithium.cc:275
bool HasPointerMap() const
Definition: lithium-arm.h:249
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
int NearestGapPos(int index) const
Definition: lithium.cc:370
bool IsRedundant() const
Definition: lithium-arm.cc:134
int LookupDestination(int block_id) const
Definition: lithium.cc:267
static Handle< Code > MakeCodeEpilogue(MacroAssembler *masm, Code::Flags flags, CompilationInfo *info)
Definition: codegen.cc:96
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
#define LITHIUM_OPERAND_LIST(V)
Definition: lithium.h:38
void set_lithium_position(int pos)
Definition: lithium.h:438
static HValue * cast(HValue *value)
static void MakeCodePrologue(CompilationInfo *info)
Definition: codegen.cc:61
#define LITHIUM_OPERAND_SETUP(name, type)
HConstant * LookupConstant(LConstantOperand *operand) const
Definition: lithium.cc:382
bool HasReplacement() const
Definition: lithium-arm.h:421
void PrintTo(StringStream *stream)
Definition: lithium.cc:52
int arguments_stack_height() const
Definition: lithium.h:486
const ZoneList< LInstruction * > * instructions() const
Definition: lithium.h:664
Scope * scope() const
Definition: compiler.h:67