28 #ifndef V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ 
   29 #define V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ 
   42 class SafepointGenerator;
 
   47       : zone_(info->zone()),
 
   52         current_instruction_(-1),
 
   54         deoptimizations_(4, info->zone()),
 
   55         deopt_jump_table_(4, info->zone()),
 
   56         deoptimization_literals_(8, info->zone()),
 
   57         inlined_function_count_(0),
 
   58         scope_(info->scope()),
 
   60         translations_(info->zone()),
 
   61         deferred_(8, info->zone()),
 
   63         last_lazy_deopt_pc_(0),
 
   64         safepoints_(info->zone()),
 
   66         expected_safepoint_kind_(Safepoint::kSimple) {
 
   67     PopulateDeoptimizationLiteralsWithInlinedFunctions();
 
   76   Heap* 
heap()
 const { 
return isolate()->heap(); }
 
  117                             IntegerSignedness signedness);
 
  122   void DoDeferredRandom(
LRandom* instr);
 
  134   void DoGap(
LGap* instr);
 
  138                                  bool key_is_constant,
 
  142                                  int additional_index,
 
  143                                  int additional_offset);
 
  147                         Translation* translation,
 
  148                         int* arguments_index,
 
  149                         int* arguments_count);
 
  152 #define DECLARE_DO(type) void Do##type(L##type* node); 
  164   bool is_unused()
 const { 
return status_ == UNUSED; }
 
  165   bool is_generating()
 const { 
return status_ == GENERATING; }
 
  166   bool is_done()
 const { 
return status_ == 
DONE; }
 
  167   bool is_aborted()
 const { 
return status_ == ABORTED; }
 
  173   LPlatformChunk* chunk()
 const { 
return chunk_; }
 
  174   Scope* scope()
 const { 
return scope_; }
 
  175   HGraph* graph()
 const { 
return chunk_->graph(); }
 
  181   int GetNextEmittedBlock(
int block);
 
  182   LInstruction* GetNextInstruction();
 
  184   void EmitClassOfTest(Label* if_true,
 
  186                        Handle<String> class_name,
 
  189                        Register temporary2);
 
  191   int GetStackSlotCount()
 const { 
return chunk()->spill_slot_count(); }
 
  192   int GetParameterCount()
 const { 
return scope()->num_parameters(); }
 
  194   void Abort(
const char* reason);
 
  195   void Comment(
const char* format, ...);
 
  197   void AddDeferredCode(LDeferredCode* 
code) { deferred_.Add(code, zone()); }
 
  201   bool GeneratePrologue();
 
  203   bool GenerateDeferredCode();
 
  204   bool GenerateDeoptJumpTable();
 
  205   bool GenerateSafepointTable();
 
  208     RECORD_SIMPLE_SAFEPOINT,
 
  209     RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
 
  212   void CallCode(Handle<Code> code,
 
  213                 RelocInfo::Mode mode,
 
  214                 LInstruction* instr);
 
  216   void CallCodeGeneric(Handle<Code> code,
 
  217                        RelocInfo::Mode mode,
 
  219                        SafepointMode safepoint_mode);
 
  221   void CallRuntime(
const Runtime::Function* 
function,
 
  223                    LInstruction* instr);
 
  227                    LInstruction* instr) {
 
  229     CallRuntime(
function, num_arguments, instr);
 
  234                                LInstruction* instr);
 
  243   void CallKnownFunction(Handle<JSFunction> 
function,
 
  249   void LoadHeapObject(Register result, Handle<HeapObject> 
object);
 
  251   void RecordSafepointWithLazyDeopt(LInstruction* instr,
 
  252                                     SafepointMode safepoint_mode);
 
  254   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
 
  255                                             Safepoint::DeoptMode mode);
 
  257                     LEnvironment* environment,
 
  258                     Register src1 = zero_reg,
 
  259                     const Operand& src2 = Operand(zero_reg));
 
  261   void AddToTranslation(Translation* translation,
 
  266                         int arguments_count);
 
  267   void PopulateDeoptimizationData(Handle<Code> code);
 
  268   int DefineDeoptimizationLiteral(Handle<Object> literal);
 
  270   void PopulateDeoptimizationLiteralsWithInlinedFunctions();
 
  276   void EmitIntegerMathAbs(LUnaryMathOperation* instr);
 
  277   void DoMathAbs(LUnaryMathOperation* instr);
 
  278   void DoMathFloor(LUnaryMathOperation* instr);
 
  279   void DoMathRound(LUnaryMathOperation* instr);
 
  280   void DoMathSqrt(LUnaryMathOperation* instr);
 
  281   void DoMathPowHalf(LUnaryMathOperation* instr);
 
  282   void DoMathLog(LUnaryMathOperation* instr);
 
  283   void DoMathTan(LUnaryMathOperation* instr);
 
  284   void DoMathCos(LUnaryMathOperation* instr);
 
  285   void DoMathSin(LUnaryMathOperation* instr);
 
  288   void RecordSafepoint(LPointerMap* pointers,
 
  289                        Safepoint::Kind kind,
 
  291                        Safepoint::DeoptMode mode);
 
  292   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
 
  293   void RecordSafepoint(Safepoint::DeoptMode mode);
 
  294   void RecordSafepointWithRegisters(LPointerMap* pointers,
 
  296                                     Safepoint::DeoptMode mode);
 
  297   void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers,
 
  299                                               Safepoint::DeoptMode mode);
 
  300   void RecordPosition(
int position);
 
  303   void EmitGoto(
int block);
 
  304   void EmitBranch(
int left_block,
 
  308                   const Operand& src2);
 
  309   void EmitBranchF(
int left_block,
 
  314   void EmitCmpI(LOperand* left, LOperand* right);
 
  315   void EmitNumberUntagD(Register input,
 
  317                         bool deoptimize_on_undefined,
 
  318                         bool deoptimize_on_minus_zero,
 
  321   void DeoptIfTaggedButNotSmi(LEnvironment* environment,
 
  330   Condition EmitTypeofIs(Label* true_label,
 
  333                          Handle<String> type_name,
 
  343                          Label* is_not_object,
 
  351                          Label* is_not_string);
 
  355   void EmitIsConstructCall(Register temp1, Register temp2);
 
  357   void EmitLoadFieldOrConstantFunction(Register result,
 
  365   void EmitDeepCopy(Handle<JSObject> 
object,
 
  370   struct JumpTableEntry {
 
  371     explicit inline JumpTableEntry(
Address entry)
 
  378   void EnsureSpaceForLazyDeopt();
 
  381   LPlatformChunk* 
const chunk_;
 
  382   MacroAssembler* 
const masm_;
 
  383   CompilationInfo* 
const info_;
 
  386   int current_instruction_;
 
  387   const ZoneList<LInstruction*>* instructions_;
 
  388   ZoneList<LEnvironment*> deoptimizations_;
 
  389   ZoneList<JumpTableEntry> deopt_jump_table_;
 
  390   ZoneList<Handle<Object> > deoptimization_literals_;
 
  391   int inlined_function_count_;
 
  394   TranslationBuffer translations_;
 
  395   ZoneList<LDeferredCode*> deferred_;
 
  397   int last_lazy_deopt_pc_;
 
  401   SafepointTableBuilder safepoints_;
 
  404   LGapResolver resolver_;
 
  406   Safepoint::Kind expected_safepoint_kind_;
 
  410     PushSafepointRegistersScope(LCodeGen* codegen,
 
  411                                 Safepoint::Kind kind)
 
  412         : codegen_(codegen) {
 
  413       ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
 
  414       codegen_->expected_safepoint_kind_ = kind;
 
  416       switch (codegen_->expected_safepoint_kind_) {
 
  417         case Safepoint::kWithRegisters:
 
  418           codegen_->masm_->PushSafepointRegisters();
 
  420         case Safepoint::kWithRegistersAndDoubles:
 
  421           codegen_->masm_->PushSafepointRegistersAndDoubles();
 
  428     ~PushSafepointRegistersScope() {
 
  429       Safepoint::Kind kind = codegen_->expected_safepoint_kind_;
 
  430       ASSERT((kind & Safepoint::kWithRegisters) != 0);
 
  432         case Safepoint::kWithRegisters:
 
  433           codegen_->masm_->PopSafepointRegisters();
 
  435         case Safepoint::kWithRegistersAndDoubles:
 
  436           codegen_->masm_->PopSafepointRegistersAndDoubles();
 
  441       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
 
  448   friend class LDeferredCode;
 
  449   friend class LEnvironment;
 
  450   friend class SafepointGenerator;
 
  455 class LDeferredCode: 
public ZoneObject {
 
  459         external_exit_(
NULL),
 
  460         instruction_index_(codegen->current_instruction_) {
 
  461     codegen->AddDeferredCode(
this);
 
  470   Label* 
exit() { 
return external_exit_ != 
NULL ? external_exit_ : &exit_; }
 
  474   LCodeGen* 
codegen()
 const { 
return codegen_; }
 
  481   Label* external_exit_;
 
  482   int instruction_index_;
 
  487 #endif  // V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ 
#define kLithiumScratchReg2
void SetExit(Label *exit)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 instructions(ARM only)") DEFINE_bool(enable_vfp2
MacroAssembler * masm() const 
LDeferredCode(LCodeGen *codegen)
static const Function * FunctionForId(FunctionId id)
#define ASSERT(condition)
LCodeGen * codegen() const 
MacroAssembler * masm() const 
#define kLithiumScratchReg
int instruction_index() const 
bool is_uint32(int64_t x)
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
DwVfpRegister DoubleRegister
#define kLithiumScratchDouble
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
LCodeGen(LChunk *chunk, MacroAssembler *assembler, CompilationInfo *info)
Isolate * isolate() const 
virtual LInstruction * instr()=0
virtual void Generate()=0
CompilationInfo * info() const 
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
Register ToRegister(int num)
Factory * factory() const