28 #ifndef V8_ARM_LITHIUM_CODEGEN_ARM_H_
29 #define V8_ARM_LITHIUM_CODEGEN_ARM_H_
42 class SafepointGenerator;
47 : zone_(info->zone()),
52 current_instruction_(-1),
54 deoptimizations_(4, info->zone()),
55 deopt_jump_table_(4, info->zone()),
56 deoptimization_literals_(8, info->zone()),
57 inlined_function_count_(0),
58 scope_(info->scope()),
60 translations_(info->zone()),
61 deferred_(8, info->zone()),
63 last_lazy_deopt_pc_(0),
64 safepoints_(info->zone()),
66 expected_safepoint_kind_(Safepoint::kSimple) {
67 PopulateDeoptimizationLiteralsWithInlinedFunctions();
76 Heap*
heap()
const {
return isolate()->heap(); }
113 void DoDeferredBinaryOpStub(
LPointerMap* pointer_map,
122 IntegerSignedness signedness);
127 void DoDeferredRandom(
LRandom* instr);
139 void DoGap(
LGap* instr);
143 bool key_is_constant,
147 int additional_index,
148 int additional_offset);
152 Translation* translation,
153 int* arguments_index,
154 int* arguments_count);
157 #define DECLARE_DO(type) void Do##type(L##type* node);
169 bool is_unused()
const {
return status_ == UNUSED; }
170 bool is_generating()
const {
return status_ == GENERATING; }
171 bool is_done()
const {
return status_ ==
DONE; }
172 bool is_aborted()
const {
return status_ == ABORTED; }
178 LPlatformChunk* chunk()
const {
return chunk_; }
179 Scope* scope()
const {
return scope_; }
180 HGraph* graph()
const {
return chunk_->graph(); }
182 Register scratch0() {
return r9; }
185 int GetNextEmittedBlock(
int block);
186 LInstruction* GetNextInstruction();
188 void EmitClassOfTest(Label* if_true,
190 Handle<String> class_name,
193 Register temporary2);
195 int GetStackSlotCount()
const {
return chunk()->spill_slot_count(); }
196 int GetParameterCount()
const {
return scope()->num_parameters(); }
198 void Abort(
const char* reason);
199 void Comment(
const char* format, ...);
201 void AddDeferredCode(LDeferredCode*
code) { deferred_.Add(code, zone()); }
205 bool GeneratePrologue();
207 bool GenerateDeferredCode();
208 bool GenerateDeoptJumpTable();
209 bool GenerateSafepointTable();
212 RECORD_SIMPLE_SAFEPOINT,
213 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
218 RelocInfo::Mode mode,
222 void CallCodeGeneric(
224 RelocInfo::Mode mode,
226 SafepointMode safepoint_mode,
229 void CallRuntime(
const Runtime::Function*
function,
231 LInstruction* instr);
235 LInstruction* instr) {
237 CallRuntime(
function, num_arguments, instr);
242 LInstruction* instr);
251 void CallKnownFunction(Handle<JSFunction>
function,
257 void LoadHeapObject(Register result, Handle<HeapObject>
object);
259 void RecordSafepointWithLazyDeopt(LInstruction* instr,
260 SafepointMode safepoint_mode);
262 void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
263 Safepoint::DeoptMode mode);
264 void DeoptimizeIf(
Condition cc, LEnvironment* environment);
266 void AddToTranslation(Translation* translation,
271 int arguments_count);
272 void PopulateDeoptimizationData(Handle<Code> code);
273 int DefineDeoptimizationLiteral(Handle<Object> literal);
275 void PopulateDeoptimizationLiteralsWithInlinedFunctions();
281 void EmitIntegerMathAbs(LUnaryMathOperation* instr);
282 void DoMathAbs(LUnaryMathOperation* instr);
283 void DoMathFloor(LUnaryMathOperation* instr);
284 void DoMathRound(LUnaryMathOperation* instr);
285 void DoMathSqrt(LUnaryMathOperation* instr);
286 void DoMathPowHalf(LUnaryMathOperation* instr);
287 void DoMathLog(LUnaryMathOperation* instr);
288 void DoMathTan(LUnaryMathOperation* instr);
289 void DoMathCos(LUnaryMathOperation* instr);
290 void DoMathSin(LUnaryMathOperation* instr);
293 void RecordSafepoint(LPointerMap* pointers,
294 Safepoint::Kind kind,
296 Safepoint::DeoptMode mode);
297 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
298 void RecordSafepoint(Safepoint::DeoptMode mode);
299 void RecordSafepointWithRegisters(LPointerMap* pointers,
301 Safepoint::DeoptMode mode);
302 void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers,
304 Safepoint::DeoptMode mode);
305 void RecordPosition(
int position);
308 void EmitGoto(
int block);
309 void EmitBranch(
int left_block,
int right_block,
Condition cc);
310 void EmitNumberUntagD(Register input,
312 bool deoptimize_on_undefined,
313 bool deoptimize_on_minus_zero,
316 void DeoptIfTaggedButNotSmi(LEnvironment* environment,
323 Condition EmitTypeofIs(Label* true_label,
326 Handle<String> type_name);
333 Label* is_not_object,
341 Label* is_not_string);
345 void EmitIsConstructCall(Register temp1, Register temp2);
347 void EmitLoadFieldOrConstantFunction(Register result,
355 void EmitDeepCopy(Handle<JSObject>
object,
364 void EmitSignedIntegerDivisionByConstant(Register result,
369 LEnvironment* environment);
371 struct JumpTableEntry {
372 explicit inline JumpTableEntry(
Address entry)
379 void EnsureSpaceForLazyDeopt();
382 LPlatformChunk*
const chunk_;
383 MacroAssembler*
const masm_;
384 CompilationInfo*
const info_;
387 int current_instruction_;
388 const ZoneList<LInstruction*>* instructions_;
389 ZoneList<LEnvironment*> deoptimizations_;
390 ZoneList<JumpTableEntry> deopt_jump_table_;
391 ZoneList<Handle<Object> > deoptimization_literals_;
392 int inlined_function_count_;
395 TranslationBuffer translations_;
396 ZoneList<LDeferredCode*> deferred_;
398 int last_lazy_deopt_pc_;
402 SafepointTableBuilder safepoints_;
405 LGapResolver resolver_;
407 Safepoint::Kind expected_safepoint_kind_;
411 PushSafepointRegistersScope(LCodeGen* codegen,
412 Safepoint::Kind kind)
413 : codegen_(codegen) {
414 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
415 codegen_->expected_safepoint_kind_ = kind;
417 switch (codegen_->expected_safepoint_kind_) {
418 case Safepoint::kWithRegisters:
419 codegen_->masm_->PushSafepointRegisters();
421 case Safepoint::kWithRegistersAndDoubles:
422 codegen_->masm_->PushSafepointRegistersAndDoubles();
429 ~PushSafepointRegistersScope() {
430 Safepoint::Kind kind = codegen_->expected_safepoint_kind_;
431 ASSERT((kind & Safepoint::kWithRegisters) != 0);
433 case Safepoint::kWithRegisters:
434 codegen_->masm_->PopSafepointRegisters();
436 case Safepoint::kWithRegistersAndDoubles:
437 codegen_->masm_->PopSafepointRegistersAndDoubles();
442 codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
460 external_exit_(
NULL),
461 instruction_index_(codegen->current_instruction_) {
462 codegen->AddDeferredCode(
this);
471 Label*
exit() {
return external_exit_ !=
NULL ? external_exit_ : &exit_; }
475 LCodeGen*
codegen()
const {
return codegen_; }
482 Label* external_exit_;
483 int instruction_index_;
488 #endif // V8_ARM_LITHIUM_CODEGEN_ARM_H_
void SetExit(Label *exit)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 instructions(ARM only)") DEFINE_bool(enable_vfp2
MacroAssembler * masm() const
LDeferredCode(LCodeGen *codegen)
static const Function * FunctionForId(FunctionId id)
#define ASSERT(condition)
LCodeGen * codegen() const
MacroAssembler * masm() const
int instruction_index() const
bool is_uint32(int64_t x)
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
DwVfpRegister DoubleRegister
#define kScratchDoubleReg
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
LCodeGen(LChunk *chunk, MacroAssembler *assembler, CompilationInfo *info)
Isolate * isolate() const
virtual LInstruction * instr()=0
virtual void Generate()=0
CompilationInfo * info() const
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
Register ToRegister(int num)
Factory * factory() const