38 #define LITHIUM_OPERAND_LIST(V)         \ 
   39   V(ConstantOperand, CONSTANT_OPERAND)  \ 
   40   V(StackSlot,       STACK_SLOT)        \ 
   41   V(DoubleStackSlot, DOUBLE_STACK_SLOT) \ 
   42   V(Register,        REGISTER)          \ 
   43   V(DoubleRegister,  DOUBLE_REGISTER) 
   63 #define LITHIUM_OPERAND_PREDICATE(name, type) \ 
   64   bool Is##name() const { return kind() == type; } 
   69 #undef LITHIUM_OPERAND_PREDICATE 
  129     Initialize(policy, 0, lifetime);
 
  149       : 
public BitField<Lifetime, kLifetimeShift, kLifetimeWidth> {
 
  154                         kVirtualRegisterShift,
 
  155                         kVirtualRegisterWidth> {
 
  199     ASSERT(op->IsUnallocated());
 
  220       : source_(source), destination_(destination) {
 
  232     return destination_ == 
NULL && source_ != 
NULL;
 
  237     return !IsEliminated() && source()->Equals(operand);
 
  243     return IsEliminated() || source_->Equals(destination_) || IsIgnored();
 
  247     return destination_ != 
NULL && destination_->IsIgnored();
 
  254     return source_ == 
NULL;
 
  267     if (index < kNumCachedOperands) 
return &cache[
index];
 
  272     ASSERT(op->IsConstantOperand());
 
  280   static const int kNumCachedOperands = 128;
 
  303     if (index < kNumCachedOperands) 
return &cache[
index];
 
  308     ASSERT(op->IsStackSlot());
 
  316   static const int kNumCachedOperands = 128;
 
  328     if (index < kNumCachedOperands) 
return &cache[
index];
 
  333     ASSERT(op->IsStackSlot());
 
  341   static const int kNumCachedOperands = 128;
 
  353     if (index < kNumCachedOperands) 
return &cache[
index];
 
  366   static const int kNumCachedOperands = 16;
 
  378     if (index < kNumCachedOperands) 
return &cache[
index];
 
  383     ASSERT(op->IsDoubleRegister());
 
  391   static const int kNumCachedOperands = 16;
 
  404     move_operands_.
Add(LMoveOperands(from, to), zone);
 
  410     return &move_operands_;
 
  423       : pointer_operands_(8, zone),
 
  424         untagged_operands_(0, zone),
 
  426         lithium_position_(-1) { }
 
  429     for (
int i = 0; i < untagged_operands_.length(); ++i) {
 
  432     untagged_operands_.Clear();
 
  433     return &pointer_operands_;
 
  439     ASSERT(lithium_position_ == -1);
 
  440     lithium_position_ = pos;
 
  452   int lithium_position_;
 
  468         frame_type_(frame_type),
 
  469         arguments_stack_height_(argument_count),
 
  470         deoptimization_index_(Safepoint::kNoDeoptimizationIndex),
 
  471         translation_index_(-1),
 
  473         parameter_count_(parameter_count),
 
  475         values_(value_count, zone),
 
  476         is_tagged_(value_count, zone),
 
  477         is_uint32_(value_count, zone),
 
  478         spilled_registers_(
NULL),
 
  479         spilled_double_registers_(
NULL),
 
  494     return spilled_double_registers_;
 
  503     values_.Add(operand, 
zone());
 
  506       is_tagged_.
Add(values_.length() - 1);
 
  510       is_uint32_.
Add(values_.length() - 1);
 
  531     return deoptimization_index_ != Safepoint::kNoDeoptimizationIndex;
 
  536     spilled_registers_ = registers;
 
  537     spilled_double_registers_ = double_registers;
 
  547   int arguments_stack_height_;
 
  548   int deoptimization_index_;
 
  549   int translation_index_;
 
  551   int parameter_count_;
 
  561   LOperand** spilled_double_registers_;
 
  575         limit_(env != 
NULL ? env->values()->length() : 0),
 
  584     return env_->values()->at(
current_);
 
  597     return op == 
NULL || op->IsConstantOperand() || op->IsArgument();
 
  601   void SkipUninteresting() {
 
  617       : current_iterator_(env) {
 
  621   bool Done() { 
return current_iterator_.Done(); }
 
  624     ASSERT(!current_iterator_.Done());
 
  625     return current_iterator_.Current();
 
  629     current_iterator_.Advance();
 
  634   void SkipUninteresting() {
 
  635     while (current_iterator_.env() != 
NULL && current_iterator_.Done()) {
 
  636       current_iterator_ = ShallowIterator(current_iterator_.env()->outer());
 
  640   ShallowIterator current_iterator_;
 
  644 class LPlatformChunk;
 
  676     return &inlined_closures_;
 
  680     inlined_closures_.Add(closure, 
zone());
 
  692         instructions_(32, graph->
zone()),
 
  693         pointer_maps_(8, graph->
zone()),
 
  694         inlined_closures_(1, graph->
zone()) { }
 
  712 #endif  // V8_LITHIUM_H_ 
static const int kFixedIndexShift
static LUnallocated * cast(LOperand *op)
const ZoneList< LMoveOperands > * move_operands() const 
static LArgument * cast(LOperand *op)
static LConstantOperand * Create(int index, Zone *zone)
void AddValue(LOperand *operand, Representation representation, bool is_uint32)
void SetSpilledRegisters(LOperand **registers, LOperand **double_registers)
int GetParameterStackSlot(int index) const 
DeepIterator(LEnvironment *env)
const ZoneList< LOperand * > * GetNormalizedOperands()
static LDoubleRegister * cast(LOperand *op)
static void TearDownCaches()
int lithium_position() const 
LUnallocated(Policy policy)
LParallelMove(Zone *zone)
LEnvironment * outer() const 
static uint32_t encode(Kindvalue)
static const int kVirtualRegisterWidth
int ParameterAt(int index)
LOperand(Kind kind, int index)
static const int kPolicyShift
void RemovePointer(LOperand *op)
LUnallocated * CopyUnconstrained(Zone *zone)
LLabel * GetLabel(int block_id) const 
#define ASSERT(condition)
bool HasSameAsInputPolicy() const 
static void SetUpCaches()
bool HasRegisterPolicy() const 
static LDoubleRegister * Create(int index, Zone *zone)
void PrintTo(StringStream *stream)
bool is_uint32(int64_t x)
static uint32_t update(uint32_t previous, Policyvalue)
LChunk(CompilationInfo *info, HGraph *graph)
bool Equals(LOperand *other) const 
bool HasBeenRegistered() const 
void RecordUntagged(LOperand *op, Zone *zone)
int translation_index() const 
LOperand * destination() const 
LGap * GetGapAt(int index) const 
bool Contains(int i) const 
void set_virtual_register(unsigned id)
static void TearDownCache()
#define LITHIUM_OPERAND_PREDICATE(name, type)
int virtual_register() const 
void PrintTo(StringStream *stream)
static const int kMaxVirtualRegisters
void set_source(LOperand *operand)
void ConvertTo(Kind kind, int index)
LMoveOperands(LOperand *source, LOperand *destination)
void AddMove(LOperand *from, LOperand *to, Zone *zone)
static const int kLifetimeWidth
static LDoubleStackSlot * Create(int index, Zone *zone)
void RecordPointer(LOperand *op, Zone *zone)
int spill_slot_count() const 
const ZoneList< LOperand * > * values() const 
static LConstantOperand * cast(LOperand *op)
static void TearDownCache()
bool Blocks(LOperand *operand) const 
Handle< JSFunction > closure() const 
static const int kPolicyWidth
static void TearDownCache()
static Kind decode(uint32_t value)
static LChunk * NewChunk(HGraph *graph)
static const int kVirtualRegisterShift
LPointerMap(int position, Zone *zone)
int ElementsKindToShiftSize(ElementsKind elements_kind)
int deoptimization_index() const 
static LStackSlot * Create(int index, Zone *zone)
LOperand ** spilled_double_registers() const 
static const int kMaxFixedIndex
bool IsGapAt(int index) const 
static LDoubleStackSlot * cast(LOperand *op)
int parameter_count() const 
void AddInstruction(LInstruction *instruction, HBasicBlock *block)
ShallowIterator(LEnvironment *env)
const ZoneList< Handle< JSFunction > > * inlined_closures() const 
static const int kLifetimeShift
bool HasTaggedValueAt(int index) const 
void PrintDataTo(StringStream *stream) const 
LOperand ** spilled_registers() const 
CompilationInfo * info() const 
LEnvironment(Handle< JSFunction > closure, FrameType frame_type, BailoutId ast_id, int parameter_count, int argument_count, int value_count, LEnvironment *outer, HEnterInlined *entry, Zone *zone)
void AddGapMove(int index, LOperand *from, LOperand *to)
const ZoneList< LPointerMap * > * pointer_maps() const 
bool HasUint32ValueAt(int index) const 
static void TearDownCache()
LConstantOperand * DefineConstantOperand(HConstant *constant)
void set_policy(Policy policy)
bool HasFixedPolicy() const 
LOperand * source() const 
static LRegister * Create(int index, Zone *zone)
bool IsEliminated() const 
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
void Register(int deoptimization_index, int translation_index, int pc_offset)
bool HasAnyPolicy() const 
Representation LookupLiteralRepresentation(LConstantOperand *operand) const 
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Label * GetAssemblyLabel(int block_id) const 
int NearestGapPos(int index) const 
FrameType frame_type() const 
static LStackSlot * cast(LOperand *op)
int LookupDestination(int block_id) const 
static const int kFixedIndexWidth
static const int kMinFixedIndex
LUnallocated(Policy policy, int fixed_index)
LUnallocated(Policy policy, Lifetime lifetime)
#define LITHIUM_OPERAND_LIST(V)
void set_lithium_position(int pos)
void set_destination(LOperand *operand)
static LRegister * cast(LOperand *op)
HConstant * LookupConstant(LConstantOperand *operand) const 
static void TearDownCache()
void PrintTo(StringStream *stream)
STATIC_ASSERT(kFixedIndexWidth > 5)
int arguments_stack_height() const 
const ZoneList< LInstruction * > * instructions() const 
static const int kKindFieldWidth
void AddInlinedClosure(Handle< JSFunction > closure)