28 #ifndef V8_HYDROGEN_INSTRUCTIONS_H_
29 #define V8_HYDROGEN_INSTRUCTIONS_H_
51 class HInferRepresentationPhase;
53 class HLoopInformation;
54 class HStoreNamedField;
59 #define HYDROGEN_ABSTRACT_INSTRUCTION_LIST(V) \
60 V(ArithmeticBinaryOperation) \
62 V(BitwiseBinaryOperation) \
63 V(ControlInstruction) \
67 #define HYDROGEN_CONCRETE_INSTRUCTION_LIST(V) \
69 V(AccessArgumentsAt) \
73 V(ArgumentsElements) \
79 V(BoundsCheckBaseIndexInformation) \
81 V(CallWithDescriptor) \
91 V(CheckInstanceType) \
97 V(ClassOfTestAndBranch) \
98 V(CompareNumericAndBranch) \
99 V(CompareHoleAndBranch) \
101 V(CompareMinusZeroAndBranch) \
102 V(CompareObjectEqAndBranch) \
115 V(EnvironmentMarker) \
116 V(ForceRepresentation) \
120 V(GetCachedArrayIndex) \
122 V(HasCachedArrayIndexAndBranch) \
123 V(HasInstanceTypeAndBranch) \
124 V(InnerAllocatedObject) \
126 V(InstanceOfKnownGlobal) \
128 V(IsConstructCallAndBranch) \
129 V(IsObjectAndBranch) \
130 V(IsStringAndBranch) \
132 V(IsUndetectableAndBranch) \
135 V(LoadFieldByIndex) \
136 V(LoadFunctionPrototype) \
138 V(LoadGlobalGeneric) \
140 V(LoadKeyedGeneric) \
142 V(LoadNamedGeneric) \
157 V(SeqStringGetChar) \
158 V(SeqStringSetChar) \
164 V(StoreContextSlot) \
167 V(StoreKeyedGeneric) \
169 V(StoreNamedGeneric) \
171 V(StringCharCodeAt) \
172 V(StringCharFromCode) \
173 V(StringCompareAndBranch) \
176 V(ToFastProperties) \
177 V(TransitionElementsKind) \
178 V(TrapAllocationMemento) \
180 V(TypeofIsAndBranch) \
181 V(UnaryMathOperation) \
186 #define GVN_TRACKED_FLAG_LIST(V) \
190 #define GVN_UNTRACKED_FLAG_LIST(V) \
194 V(BackingStoreFields) \
197 V(DoubleArrayElements) \
206 V(TypedArrayElements)
209 #define DECLARE_ABSTRACT_INSTRUCTION(type) \
210 virtual bool Is##type() const V8_FINAL V8_OVERRIDE { return true; } \
211 static H##type* cast(HValue* value) { \
212 ASSERT(value->Is##type()); \
213 return reinterpret_cast<H##type*>(value); \
217 #define DECLARE_CONCRETE_INSTRUCTION(type) \
218 virtual LInstruction* CompileToLithium( \
219 LChunkBuilder* builder) V8_FINAL V8_OVERRIDE; \
220 static H##type* cast(HValue* value) { \
221 ASSERT(value->Is##type()); \
222 return reinterpret_cast<H##type*>(value); \
224 virtual Opcode opcode() const V8_FINAL V8_OVERRIDE { \
225 return HValue::k##type; \
232 class Range
V8_FINAL :
public ZoneObject {
238 can_be_minus_zero_(
false) { }
244 can_be_minus_zero_(
false) { }
248 Range*
next()
const {
return next_; }
250 return new(zone) Range(
kMinInt, upper_);
253 return new(zone) Range(lower_,
kMaxInt);
256 Range* result =
new(zone) Range(lower_, upper_);
257 result->set_can_be_minus_zero(CanBeMinusZero());
263 bool CanBeZero()
const {
return upper_ >= 0 && lower_ <= 0; }
266 bool Includes(
int value)
const {
return lower_ <= value && upper_ >= value; }
268 return lower_ ==
kMinInt && upper_ ==
kMaxInt && CanBeMinusZero();
287 void Intersect(Range* other);
288 void Union(Range* other);
289 void CombinedMax(Range* other);
290 void CombinedMin(Range* other);
292 void AddConstant(
int32_t value);
303 bool can_be_minus_zero_;
309 static HType
None() {
return HType(kNone); }
310 static HType
Tagged() {
return HType(kTagged); }
313 static HType
Smi() {
return HType(kSmi); }
315 static HType
String() {
return HType(kString); }
316 static HType
Boolean() {
return HType(kBoolean); }
318 static HType
JSArray() {
return HType(kJSArray); }
319 static HType
JSObject() {
return HType(kJSObject); }
323 return HType(static_cast<Type>(type_ & other.type_));
327 return type_ == other.type_;
331 return Combine(other).Equals(other);
335 return ((type_ & kTaggedPrimitive) == kTaggedPrimitive);
339 return ((type_ & kTaggedNumber) == kTaggedNumber);
343 return ((type_ & kSmi) == kSmi);
347 return ((type_ & kHeapNumber) == kHeapNumber);
351 return ((type_ & kString) == kString);
355 return IsTaggedPrimitive() || IsSmi() || IsHeapNumber() ||
356 IsBoolean() || IsJSArray();
360 return ((type_ & kBoolean) == kBoolean);
364 return ((type_ & kNonPrimitive) == kNonPrimitive);
368 return ((type_ & kJSArray) == kJSArray);
372 return ((type_ & kJSObject) == kJSObject);
376 return IsHeapNumber() || IsString() || IsBoolean() || IsNonPrimitive();
381 case kTaggedPrimitive:
399 const char* ToString();
405 kTaggedPrimitive = 0x5,
411 kNonPrimitive = 0x101,
419 explicit HType(
Type t) : type_(t) { }
428 : tail_(tail), value_(value), index_(index) {
433 int index()
const {
return index_; }
484 #define DECLARE_FLAG(Type) k##Type,
488 #define COUNT_FLAG(Type) + 1
496 static inline GVNFlag GVNFlagFromInt(
int i) {
499 return static_cast<GVNFlag>(i);
511 bool Apply(
HValue* other_base,
int other_offset,
int other_scale = 0) {
514 offset_ = other_offset;
515 scale_ = other_scale;
520 offset_ += other_offset;
521 scale_ = other_scale;
530 swap(&base_, other_base);
531 swap(&offset_, other_offset);
532 swap(&scale_, other_scale);
536 template <
class T>
void swap(
T* a,
T* b) {
566 bool IsUnknown()
const {
return value_ == RelocInfo::kNoPosition; }
570 if (FLAG_hydrogen_track_positions) {
579 if (FLAG_hydrogen_track_positions) {
584 int raw()
const {
return value_; }
661 #define DECLARE_OPCODE(type) k##type,
664 #undef DECLARE_OPCODE
669 #define DECLARE_PREDICATE(type) \
670 bool Is##type() const { return opcode() == k##type; }
672 #undef DECLARE_PREDICATE
676 #define DECLARE_PREDICATE(type) \
677 virtual bool Is##type() const { return false; }
679 #undef DECLARE_PREDICATE
729 if (t.IsHeapObject())
return r;
926 virtual void Verify() = 0;
990 #define ADD_FLAG(Type) result.Add(k##Type);
1000 result.
Remove(kOsrEntries);
1008 result.
Remove(kNewSpacePromotion);
1009 result.
Remove(kElementsKind);
1010 result.
Remove(kElementsPointer);
1036 virtual bool IsDeletable()
const {
return false; }
1038 DISALLOW_COPY_AND_ASSIGN(
HValue);
1042 #define DECLARE_INSTRUCTION_FACTORY_P0(I) \
1043 static I* New(Zone* zone, HValue* context) { \
1044 return new(zone) I(); \
1047 #define DECLARE_INSTRUCTION_FACTORY_P1(I, P1) \
1048 static I* New(Zone* zone, HValue* context, P1 p1) { \
1049 return new(zone) I(p1); \
1052 #define DECLARE_INSTRUCTION_FACTORY_P2(I, P1, P2) \
1053 static I* New(Zone* zone, HValue* context, P1 p1, P2 p2) { \
1054 return new(zone) I(p1, p2); \
1057 #define DECLARE_INSTRUCTION_FACTORY_P3(I, P1, P2, P3) \
1058 static I* New(Zone* zone, HValue* context, P1 p1, P2 p2, P3 p3) { \
1059 return new(zone) I(p1, p2, p3); \
1062 #define DECLARE_INSTRUCTION_FACTORY_P4(I, P1, P2, P3, P4) \
1063 static I* New(Zone* zone, \
1069 return new(zone) I(p1, p2, p3, p4); \
1072 #define DECLARE_INSTRUCTION_FACTORY_P5(I, P1, P2, P3, P4, P5) \
1073 static I* New(Zone* zone, \
1080 return new(zone) I(p1, p2, p3, p4, p5); \
1083 #define DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P0(I) \
1084 static I* New(Zone* zone, HValue* context) { \
1085 return new(zone) I(context); \
1088 #define DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P1(I, P1) \
1089 static I* New(Zone* zone, HValue* context, P1 p1) { \
1090 return new(zone) I(context, p1); \
1093 #define DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P2(I, P1, P2) \
1094 static I* New(Zone* zone, HValue* context, P1 p1, P2 p2) { \
1095 return new(zone) I(context, p1, p2); \
1098 #define DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P3(I, P1, P2, P3) \
1099 static I* New(Zone* zone, HValue* context, P1 p1, P2 p2, P3 p3) { \
1100 return new(zone) I(context, p1, p2, p3); \
1103 #define DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P4(I, P1, P2, P3, P4) \
1104 static I* New(Zone* zone, \
1110 return new(zone) I(context, p1, p2, p3, p4); \
1113 #define DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P5(I, P1, P2, P3, P4, P5) \
1114 static I* New(Zone* zone, \
1121 return new(zone) I(context, p1, p2, p3, p4, p5); \
1137 if (has_operand_positions()) {
1138 return operand_positions()[kInstructionPosIndex];
1144 if (has_operand_positions()) {
1145 operand_positions()[kInstructionPosIndex] = pos;
1147 data_ = TagPosition(pos.
raw());
1152 if (has_operand_positions()) {
1156 const int length = kFirstOperandPosIndex + operand_count;
1159 for (
int i = 0; i < length; i++) {
1164 data_ =
reinterpret_cast<intptr_t
>(positions);
1167 ASSERT(has_operand_positions());
1171 if (!has_operand_positions()) {
1174 return *operand_position_slot(idx);
1178 *operand_position_slot(idx) = pos;
1182 static const intptr_t kInstructionPosIndex = 0;
1183 static const intptr_t kFirstOperandPosIndex = 1;
1186 ASSERT(has_operand_positions());
1187 return &(operand_positions()[kFirstOperandPosIndex + idx]);
1190 bool has_operand_positions()
const {
1191 return !IsTaggedPosition(data_);
1194 HSourcePosition* operand_positions()
const {
1195 ASSERT(has_operand_positions());
1196 return reinterpret_cast<HSourcePosition*
>(data_);
1199 static const intptr_t kPositionTag = 1;
1200 static const intptr_t kPositionShift = 1;
1201 static bool IsTaggedPosition(intptr_t val) {
1202 return (val & kPositionTag) != 0;
1204 static intptr_t UntagPosition(intptr_t val) {
1205 ASSERT(IsTaggedPosition(val));
1206 return val >> kPositionShift;
1208 static intptr_t TagPosition(intptr_t val) {
1209 const intptr_t result = (val << kPositionShift) | kPositionTag;
1210 ASSERT(UntagPosition(result) == val);
1232 instr->InsertBefore(
this);
1239 instr->InsertAfter(
this);
1285 position_(RelocInfo::kNoPosition) {
1297 void PrintMnemonicTo(StringStream* stream);
1301 HPositionInfo position_;
1372 template<
int S,
int V>
1378 successors_[i] =
block;
1396 class HBlockEntry
V8_FINAL :
public HTemplateInstruction<0> {
1406 class HDummyUse
V8_FINAL :
public HTemplateInstruction<1> {
1410 SetOperandAt(0, value);
1430 class HDebugBreak
V8_FINAL :
public HTemplateInstruction<0> {
1442 class HGoto
V8_FINAL :
public HTemplateControlInstruction<1, 0> {
1445 SetSuccessorAt(0, target);
1449 *block = FirstSuccessor();
1463 class HDeoptimize
V8_FINAL :
public HTemplateControlInstruction<1, 0> {
1469 HBasicBlock* unreachable_continuation) {
1470 return new(zone) HDeoptimize(reason, type, unreachable_continuation);
1482 const char*
reason()
const {
return reason_; }
1488 explicit HDeoptimize(const
char* reason,
1490 HBasicBlock* unreachable_continuation)
1491 : reason_(reason), type_(type) {
1492 SetSuccessorAt(0, unreachable_continuation);
1495 const char* reason_;
1516 class HBranch
V8_FINAL :
public HUnaryControlInstruction {
1520 ToBooleanStub::Types);
1522 ToBooleanStub::Types,
1523 HBasicBlock*, HBasicBlock*);
1530 virtual bool KnownSuccessorBlock(HBasicBlock** block)
V8_OVERRIDE;
1533 return expected_input_types_;
1541 HBasicBlock* true_target =
NULL,
1542 HBasicBlock* false_target = NULL)
1544 expected_input_types_(expected_input_types) {
1545 SetFlag(kAllowUndefinedAsNaN);
1548 ToBooleanStub::Types expected_input_types_;
1552 class HCompareMap
V8_FINAL :
public HUnaryControlInstruction {
1556 HBasicBlock*, HBasicBlock*);
1559 if (known_successor_index() != kNoKnownSuccessorIndex) {
1560 *block = SuccessorAt(known_successor_index());
1569 static const int kNoKnownSuccessorIndex = -1;
1572 known_successor_index_ = known_successor_index;
1575 Unique<Map>
map()
const {
return map_; }
1584 virtual
int RedefinedOperandIndex() {
return 0; }
1587 HCompareMap(
HValue* value,
1589 HBasicBlock* true_target =
NULL,
1590 HBasicBlock* false_target =
NULL)
1592 known_successor_index_(kNoKnownSuccessorIndex), map_(Unique<
Map>(map)) {
1597 int known_successor_index_;
1602 class HContext
V8_FINAL :
public HTemplateInstruction<0> {
1605 return new(zone) HContext();
1623 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
1627 class HReturn
V8_FINAL :
public HTemplateControlInstruction<0, 3> {
1648 SetOperandAt(0, value);
1649 SetOperandAt(1, context);
1650 SetOperandAt(2, parameter_count);
1655 class HAbnormalExit
V8_FINAL :
public HTemplateControlInstruction<0, 0> {
1685 class HUseConst
V8_FINAL :
public HUnaryOperation {
1700 class HForceRepresentation
V8_FINAL :
public HTemplateInstruction<1> {
1702 static HInstruction* New(Zone* zone, HValue* context, HValue* value,
1703 Representation required_representation);
1708 return representation();
1717 SetOperandAt(0, value);
1718 set_representation(required_representation);
1723 class HChange
V8_FINAL :
public HUnaryOperation {
1727 bool is_truncating_to_smi,
1728 bool is_truncating_to_int32)
1733 set_representation(to);
1735 SetFlag(kCanOverflow);
1736 if (is_truncating_to_smi) {
1737 SetFlag(kTruncatingToSmi);
1738 SetFlag(kTruncatingToInt32);
1740 if (is_truncating_to_int32) SetFlag(kTruncatingToInt32);
1742 set_type(HType::Smi());
1744 set_type(HType::TaggedNumber());
1745 if (to.
IsTagged()) SetChangesFlag(kNewSpacePromotion);
1750 return CheckUsesForFlag(kAllowUndefinedAsNaN);
1753 virtual HType CalculateInferredType()
V8_OVERRIDE;
1759 return CheckFlag(kBailoutOnMinusZero);
1776 return !from().IsTagged() || value()->
type().IsSmi();
1781 class HClampToUint8
V8_FINAL :
public HUnaryOperation {
1795 explicit HClampToUint8(
HValue* value)
1798 SetFlag(kAllowUndefinedAsNaN);
1802 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
1806 class HDoubleBits
V8_FINAL :
public HUnaryOperation {
1821 return other->IsDoubleBits() && HDoubleBits::cast(other)->bits() == bits();
1825 HDoubleBits(
HValue* value, Bits bits)
1831 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
1837 class HConstructDouble
V8_FINAL :
public HTemplateInstruction<2> {
1857 SetOperandAt(0, hi);
1858 SetOperandAt(1, lo);
1861 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
1871 class HSimulate
V8_FINAL :
public HInstruction {
1878 pop_count_(pop_count),
1880 assigned_indexes_(2, zone),
1882 removable_(removable),
1883 done_with_replay_(
false) {}
1898 ASSERT(HasAssignedIndexAt(index));
1899 return assigned_indexes_[index];
1902 return assigned_indexes_[index] != kNoIndex;
1905 AddValue(index, value);
1908 AddValue(kNoIndex, value);
1911 for (
int i = 0; i < assigned_indexes_.length(); ++i) {
1912 if (assigned_indexes_[i] == environment_index)
return i;
1918 return values_[index];
1930 void ReplayEnvironment(HEnvironment* env);
1942 values_[index] = value;
1946 static const int kNoIndex = -1;
1947 void AddValue(
int index,
HValue* value) {
1948 assigned_indexes_.Add(index, zone_);
1950 values_.Add(
NULL, zone_);
1953 SetOperandAt(values_.length() - 1, value);
1955 bool HasValueForIndex(
int index) {
1956 for (
int i = 0; i < assigned_indexes_.length(); ++i) {
1957 if (assigned_indexes_[i] == index)
return true;
1963 ZoneList<HValue*> values_;
1964 ZoneList<int> assigned_indexes_;
1967 bool done_with_replay_ : 1;
1970 Handle<JSFunction> closure_;
1975 class HEnvironmentMarker
V8_FINAL :
public HTemplateInstruction<1> {
1985 next_simulate_ = simulate;
1996 ASSERT(closure_.is_null());
2006 HEnvironmentMarker(Kind kind,
int index)
2007 : kind_(kind), index_(index), next_simulate_(
NULL) { }
2011 HSimulate* next_simulate_;
2014 Handle<JSFunction> closure_;
2019 class HStackCheck
V8_FINAL :
public HTemplateInstruction<1> {
2038 DeleteAndReplaceWith(
NULL);
2048 HStackCheck(
HValue* context,
Type type) : type_(type) {
2049 SetOperandAt(0, context);
2050 SetChangesFlag(kNewSpacePromotion);
2065 class HArgumentsObject;
2068 class HEnterInlined
V8_FINAL :
public HTemplateInstruction<0> {
2073 int arguments_count,
2074 FunctionLiteral*
function,
2077 HArgumentsObject* arguments_object) {
2078 return new(zone) HEnterInlined(closure, arguments_count,
function,
2079 inlining_kind, arguments_var,
2080 arguments_object, zone);
2083 void RegisterReturnTarget(HBasicBlock* return_target,
Zone* zone);
2092 FunctionLiteral*
function()
const {
return function_; }
2106 int arguments_count,
2107 FunctionLiteral* function,
2110 HArgumentsObject* arguments_object,
2112 : closure_(closure),
2113 arguments_count_(arguments_count),
2114 arguments_pushed_(
false),
2115 function_(function),
2116 inlining_kind_(inlining_kind),
2117 arguments_var_(arguments_var),
2118 arguments_object_(arguments_object),
2119 return_targets_(2, zone) {
2123 int arguments_count_;
2124 bool arguments_pushed_;
2125 FunctionLiteral* function_;
2127 Variable* arguments_var_;
2128 HArgumentsObject* arguments_object_;
2129 ZoneList<HBasicBlock*> return_targets_;
2133 class HLeaveInlined
V8_FINAL :
public HTemplateInstruction<0> {
2138 drop_count_(drop_count) { }
2145 return entry_->arguments_pushed() ? -drop_count_ : 0;
2151 HEnterInlined* entry_;
2170 explicit HPushArgument(
HValue* value) : HUnaryOperation(value) {
2176 class HThisFunction
V8_FINAL :
public HTemplateInstruction<0> {
2195 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
2199 class HDeclareGlobals
V8_FINAL :
public HUnaryOperation {
2216 HDeclareGlobals(
HValue* context,
2219 : HUnaryOperation(context),
2223 SetAllSideEffects();
2235 explicit HCall<V>(
int argument_count) : argument_count_(argument_count) {
2237 this->SetAllSideEffects();
2241 return HType::Tagged();
2245 return argument_count_;
2249 return -argument_count();
2253 int argument_count_;
2260 :
HCall<1>(argument_count) {
2261 SetOperandAt(0, value);
2278 :
HCall<2>(argument_count) {
2279 SetOperandAt(0, first);
2280 SetOperandAt(1, second);
2295 class HCallJSFunction
V8_FINAL :
public HCall<1> {
2297 static HCallJSFunction* New(Zone* zone,
2301 bool pass_argument_count);
2316 return has_stack_check_;
2323 HCallJSFunction(
HValue* function,
2325 bool pass_argument_count,
2326 bool has_stack_check)
2327 :
HCall<1>(argument_count),
2328 pass_argument_count_(pass_argument_count),
2329 has_stack_check_(has_stack_check) {
2330 SetOperandAt(0,
function);
2333 bool pass_argument_count_;
2334 bool has_stack_check_;
2338 class HCallWithDescriptor
V8_FINAL :
public HInstruction {
2346 HCallWithDescriptor* res =
2347 new(zone) HCallWithDescriptor(target, argument_count,
2348 descriptor, operands, zone);
2354 return values_[index];
2362 int par_index = index - 1;
2363 ASSERT(par_index < descriptor_->environment_length());
2364 return descriptor_->GetParameterRepresentation(par_index);
2371 return HType::Tagged();
2375 return argument_count_;
2379 return -argument_count_;
2387 return OperandAt(0);
2394 HCallWithDescriptor(
HValue* target,
2399 : descriptor_(descriptor),
2400 values_(descriptor->environment_length() + 1, zone) {
2401 argument_count_ = argument_count;
2402 AddOperand(target, zone);
2403 for (
int i = 0; i < operands.
length(); i++) {
2404 AddOperand(operands[i], zone);
2407 this->SetAllSideEffects();
2410 void AddOperand(HValue* v, Zone* zone) {
2411 values_.Add(
NULL, zone);
2412 SetOperandAt(values_.length() - 1, v);
2415 void InternalSetOperandAt(
int index,
2417 values_[index] = value;
2420 const CallInterfaceDescriptor* descriptor_;
2421 ZoneList<HValue*> values_;
2422 int argument_count_;
2426 class HInvokeFunction
V8_FINAL :
public HBinaryCall {
2435 known_function_(known_function) {
2436 formal_parameter_count_ = known_function.
is_null()
2437 ? 0 : known_function->shared()->formal_parameter_count();
2438 has_stack_check_ = !known_function.
is_null() &&
2439 (known_function->code()->kind() == Code::FUNCTION ||
2440 known_function->code()->kind() == Code::OPTIMIZED_FUNCTION);
2447 int argument_count) {
2448 return new(zone) HInvokeFunction(context,
function,
2449 known_function, argument_count);
2458 return has_stack_check_;
2464 HInvokeFunction(
HValue* context,
HValue* function,
int argument_count)
2466 has_stack_check_(
false) {
2470 int formal_parameter_count_;
2471 bool has_stack_check_;
2475 class HCallFunction
V8_FINAL :
public HBinaryCall {
2487 virtual
int argument_delta() const
V8_OVERRIDE {
return -argument_count(); }
2490 HCallFunction(
HValue* context,
2500 class HCallNew
V8_FINAL :
public HBinaryCall {
2510 HCallNew(
HValue* context,
HValue* constructor,
int argument_count)
2511 :
HBinaryCall(context, constructor, argument_count) {}
2515 class HCallNewArray
V8_FINAL :
public HBinaryCall {
2532 HCallNewArray(
HValue* context,
HValue* constructor,
int argument_count,
2534 :
HBinaryCall(context, constructor, argument_count),
2535 elements_kind_(elements_kind) {}
2541 class HCallRuntime
V8_FINAL :
public HCall<1> {
2545 const Runtime::Function*,
2548 virtual void PrintDataTo(StringStream* stream)
V8_OVERRIDE;
2555 save_doubles_ = save_doubles;
2565 HCallRuntime(
HValue* context,
2569 :
HCall<1>(argument_count), c_function_(c_function), name_(name),
2571 SetOperandAt(0, context);
2574 const Runtime::Function* c_function_;
2580 class HMapEnumLength
V8_FINAL :
public HUnaryOperation {
2594 explicit HMapEnumLength(
HValue* value)
2595 : HUnaryOperation(value, HType::
Smi()) {
2598 SetDependsOnFlag(kMaps);
2601 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
2605 class HUnaryMathOperation
V8_FINAL :
public HTemplateInstruction<2> {
2607 static HInstruction* New(Zone* zone,
2630 return representation();
2646 const char* OpName()
const;
2652 HUnaryMathOperation* b = HUnaryMathOperation::cast(other);
2653 return op_ == b->op();
2659 SetOperandAt(0, context);
2660 SetOperandAt(1, value);
2669 SetFlag(kFlexibleRepresentation);
2672 SetChangesFlag(kNewSpacePromotion);
2684 SetFlag(kAllowUndefinedAsNaN);
2687 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
2689 HValue* SimplifiedDividendForMathFloorOfDiv(HDiv* hdiv);
2690 HValue* SimplifiedDivisorForMathFloorOfDiv(HDiv* hdiv);
2696 class HLoadRoot
V8_FINAL :
public HTemplateInstruction<0> {
2711 HLoadRoot* b = HLoadRoot::cast(other);
2712 return index_ == b->index_;
2721 SetDependsOnFlag(kCalls);
2724 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
2730 class HCheckMaps
V8_FINAL :
public HTemplateInstruction<2> {
2732 static HCheckMaps* New(Zone* zone, HValue* context, HValue* value,
2733 Handle<Map> map, CompilationInfo*
info,
2734 HValue* typecheck =
NULL);
2736 HValue* value, SmallMapList* maps,
2738 HCheckMaps* check_map =
new(zone) HCheckMaps(value, zone, typecheck);
2739 for (
int i = 0; i < maps->length(); i++) {
2740 check_map->Add(maps->at(i), zone);
2751 virtual bool HandleSideEffectDominator(
GVNFlag side_effect,
2763 for (
int i = 0; i < maps->size(); i++) {
2764 map_set_.Add(maps->at(i), zone);
2769 return has_migration_target_;
2776 return this->map_set_.Equals(&HCheckMaps::cast(other)->map_set_);
2783 map_set_.Add(Unique<Map>(map), zone);
2784 SetDependsOnFlag(kMaps);
2785 SetDependsOnFlag(kElementsKind);
2787 if (!has_migration_target_ && map->is_migration_target()) {
2788 has_migration_target_ =
true;
2789 SetChangesFlag(kNewSpacePromotion);
2794 HCheckMaps(HValue* value, Zone *zone, HValue* typecheck)
2795 : HTemplateInstruction<2>(value->type()),
2797 SetOperandAt(0, value);
2799 SetOperandAt(1, typecheck !=
NULL ? typecheck : value);
2802 SetFlag(kTrackSideEffectDominators);
2806 bool has_migration_target_;
2807 UniqueSet<Map> map_set_;
2811 class HCheckValue
V8_FINAL :
public HUnaryOperation {
2820 Unique<JSFunction> target = Unique<JSFunction>::CreateUninitialized(func);
2821 HCheckValue*
check =
new(zone) HCheckValue(value, target, in_new_space);
2825 HValue* value, Unique<HeapObject> target,
2826 bool object_in_new_space) {
2827 return new(zone) HCheckValue(value, target, object_in_new_space);
2831 object_ = Unique<HeapObject>(object_.handle());
2845 Unique<HeapObject>
object()
const {
return object_; }
2852 HCheckValue* b = HCheckValue::cast(other);
2853 return object_ == b->object_;
2857 HCheckValue(
HValue* value, Unique<HeapObject>
object,
2858 bool object_in_new_space)
2859 : HUnaryOperation(value, value->type()),
2861 object_in_new_space_(object_in_new_space) {
2866 Unique<HeapObject> object_;
2867 bool object_in_new_space_;
2871 class HCheckInstanceType
V8_FINAL :
public HUnaryOperation {
2878 LAST_INTERVAL_CHECK = IS_JS_ARRAY
2891 bool is_interval_check()
const {
return check_ <= LAST_INTERVAL_CHECK; }
2893 void GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag);
2902 HCheckInstanceType* b = HCheckInstanceType::cast(other);
2903 return check_ == b->check_;
2909 const char* GetCheckName();
2912 : HUnaryOperation(value), check_(check) {
2921 class HCheckSmi
V8_FINAL :
public HUnaryOperation {
2930 HType value_type = value()->
type();
2931 if (value_type.IsSmi()) {
2943 explicit HCheckSmi(
HValue* value) : HUnaryOperation(value, HType::
Smi()) {
2950 class HCheckHeapObject
V8_FINAL :
public HUnaryOperation {
2964 return value()->
type().IsHeapObject() ?
NULL :
this;
2973 explicit HCheckHeapObject(
HValue* value)
2974 : HUnaryOperation(value, HType::NonPrimitive()) {
2981 class InductionVariableData;
2991 : updated_variable(
NULL), limit(
NULL),
2992 limit_is_upper(
false), limit_is_included(
false) {}
3002 class InductionVariableData
V8_FINAL :
public ZoneObject {
3006 HBoundsCheck*
check() {
return check_; }
3011 return upper_limit_;
3014 upper_limit_ = upper_limit;
3022 int32_t upper_limit = kNoLimit)
3023 : check_(check), next_(next), upper_limit_(upper_limit),
3024 processed_(
false) {}
3027 HBoundsCheck* check_;
3039 void AddCheck(HBoundsCheck*
check,
int32_t upper_limit = kNoLimit);
3040 void CloseCurrentBlock();
3043 : length_(length), next_(next), checks_(
NULL),
3044 first_check_in_block_(
NULL),
3046 added_constant_(
NULL),
3047 current_and_mask_in_block_(0),
3048 current_or_mask_in_block_(0) {}
3056 HBoundsCheck* first_check_in_block() {
return first_check_in_block_; }
3057 HBitwise* added_index() {
return added_index_; }
3058 void set_added_index(HBitwise* index) { added_index_ = index; }
3059 HConstant* added_constant() {
return added_constant_; }
3060 void set_added_constant(HConstant* constant) { added_constant_ = constant; }
3061 int32_t current_and_mask_in_block() {
return current_and_mask_in_block_; }
3062 int32_t current_or_mask_in_block() {
return current_or_mask_in_block_; }
3063 int32_t current_upper_limit() {
return current_upper_limit_; }
3066 ChecksRelatedToLength* next_;
3067 InductionVariableCheck* checks_;
3069 HBoundsCheck* first_check_in_block_;
3070 HBitwise* added_index_;
3071 HConstant* added_constant_;
3072 int32_t current_and_mask_in_block_;
3073 int32_t current_or_mask_in_block_;
3087 token == Token::GTE || token == Token::LTE;
3090 return token == Token::LTE || token == Token::LT || token == Token::NE;
3095 token(
Token::ILLEGAL),
3097 other_target(
NULL) {}
3102 static InductionVariableData* ExaminePhi(HPhi* phi);
3103 static void ComputeLimitFromPredecessorBlock(
3106 static bool ComputeInductionVariableLimit(
3117 : base(
NULL), and_mask(0), or_mask(0), context(
NULL) {}
3119 static void DecomposeBitwise(
HValue* value,
3120 BitwiseDecompositionResult* result);
3122 void AddCheck(HBoundsCheck*
check,
int32_t upper_limit = kNoLimit);
3125 HBasicBlock* current_branch,
3126 HBasicBlock* other_branch);
3141 return additional_upper_limit_is_included_;
3145 return additional_lower_limit_is_included_;
3149 if (base()->IsInteger32Constant() && base()->GetInteger32Constant() >= 0) {
3152 if (additional_lower_limit() !=
NULL &&
3153 additional_lower_limit()->IsInteger32Constant() &&
3154 additional_lower_limit()->GetInteger32Constant() >= 0) {
3165 template <
class T>
void swap(
T* a,
T* b) {
3171 InductionVariableData(HPhi* phi, HValue* base,
int32_t increment)
3172 : phi_(phi), base_(IgnoreOsrValue(base)), increment_(increment),
3173 limit_(
NULL), limit_included_(
false), limit_validity_(
NULL),
3174 induction_exit_block_(
NULL), induction_exit_target_(
NULL),
3176 additional_upper_limit_(
NULL),
3177 additional_upper_limit_is_included_(
false),
3178 additional_lower_limit_(
NULL),
3179 additional_lower_limit_is_included_(
false) {}
3181 static int32_t ComputeIncrement(HPhi* phi, HValue* phi_operand);
3183 static HValue* IgnoreOsrValue(HValue* v);
3184 static InductionVariableData* GetInductionVariableData(HValue* v);
3190 bool limit_included_;
3191 HBasicBlock* limit_validity_;
3192 HBasicBlock* induction_exit_block_;
3193 HBasicBlock* induction_exit_target_;
3194 ChecksRelatedToLength* checks_;
3195 HValue* additional_upper_limit_;
3196 bool additional_upper_limit_is_included_;
3197 HValue* additional_lower_limit_;
3198 bool additional_lower_limit_is_included_;
3202 class HPhi
V8_FINAL :
public HValue {
3206 merged_index_(merged_index),
3208 induction_variable_data_(
NULL) {
3210 non_phi_uses_[i] = 0;
3211 indirect_uses_[i] = 0;
3213 ASSERT(merged_index >= 0 || merged_index == kInvalidMergedIndex);
3214 SetFlag(kFlexibleRepresentation);
3215 SetFlag(kAllowUndefinedAsNaN);
3221 virtual
void InferRepresentation(
3224 return representation();
3227 return representation();
3229 virtual HType CalculateInferredType()
V8_OVERRIDE;
3232 return inputs_[index];
3234 HValue* GetRedundantReplacement();
3235 void AddInput(
HValue* value);
3243 int merged_index()
const {
return merged_index_; }
3246 return induction_variable_data_;
3249 return induction_variable_data_ !=
NULL;
3252 return IsInductionVariable() &&
3253 induction_variable_data_->limit() !=
NULL;
3257 induction_variable_data_ = InductionVariableData::ExaminePhi(
this);
3266 void InitRealUses(
int id);
3267 void AddNonPhiUsesFrom(HPhi* other);
3268 void AddIndirectUsesTo(
int* use_count);
3298 return reinterpret_cast<HPhi*
>(value);
3302 void SimplifyConstantInputs();
3305 static const int kInvalidMergedIndex = -1;
3310 inputs_[index] = value;
3320 InductionVariableData* induction_variable_data_;
3323 virtual bool IsDeletable() const
V8_OVERRIDE {
return !IsReceiver(); }
3334 return values_[index];
3348 values_[index] = value;
3359 return new(zone) HArgumentsObject(count, zone);
3368 values_.Add(
NULL, zone);
3369 SetOperandAt(values_.length() - 1, argument);
3375 HArgumentsObject(
int count,
Zone* zone)
3378 SetFlag(kIsArguments);
3385 class HCapturedObject
V8_FINAL :
public HDematerializedObject {
3390 values_.AddBlock(
NULL, length, zone);
3397 int length()
const {
return values_.length(); }
3405 ASSERT(store->IsStoreNamedField());
3410 void ReplayEnvironment(HEnvironment* env);
3426 class HConstant
V8_FINAL :
public HTemplateInstruction<0> {
3439 return instruction->
Append(HConstant::New(
3440 zone, context, value, representation));
3448 return instruction->
Prepend(HConstant::New(
3449 zone, context, value, representation));
3453 Unique<Object> unique,
3454 bool is_not_in_new_space,
3456 return instruction->
Prepend(
new(zone) HConstant(
3458 is_not_in_new_space,
false,
false, kUnknownInstanceType));
3462 if (object_.handle().is_null()) {
3465 object_ = Unique<Object>::CreateUninitialized(
3469 ASSERT(has_int32_value_ || !object_.handle()->IsSmi());
3470 return object_.handle();
3475 return constant_object->IsHeapObject() &&
3480 return has_double_value_ &&
3481 (BitCast<int64_t>(double_value_) == BitCast<int64_t>(-0.0) ||
3487 return is_not_in_new_space_;
3490 bool ImmortalImmovable()
const;
3511 Maybe<HConstant*> CopyToTruncatedInt32(
Zone* zone);
3512 Maybe<HConstant*> CopyToTruncatedNumber(
Zone* zone);
3513 bool HasInteger32Value()
const {
return has_int32_value_; }
3515 ASSERT(HasInteger32Value());
3516 return int32_value_;
3521 ASSERT(HasDoubleValue());
3522 return double_value_;
3528 return object_.IsKnownGlobal(isolate()->heap()->the_hole_value());
3532 ASSERT(HasNumberValue());
3536 return int32_value_;
3539 if (has_double_value_ || has_int32_value_)
return false;
3540 ASSERT(!object_.handle().is_null());
3544 ASSERT(HasStringValue());
3548 return HasStringValue() && StringShape(instance_type_).IsInternalized();
3552 return has_external_reference_value_;
3555 return external_reference_value_;
3564 if (has_int32_value_) {
3565 return static_cast<intptr_t
>(int32_value_);
3566 }
else if (has_double_value_) {
3567 return static_cast<intptr_t
>(BitCast<int64_t>(double_value_));
3568 }
else if (has_external_reference_value_) {
3569 return reinterpret_cast<intptr_t
>(external_reference_value_.address());
3571 ASSERT(!object_.handle().is_null());
3572 return object_.Hashcode();
3577 if (!has_double_value_ && !has_external_reference_value_) {
3578 ASSERT(!object_.handle().is_null());
3579 object_ = Unique<Object>(object_.handle());
3588 return object_.IsInitialized() && object_ == other;
3592 HConstant* other_constant = HConstant::cast(other);
3593 if (has_int32_value_) {
3594 return other_constant->has_int32_value_ &&
3595 int32_value_ == other_constant->int32_value_;
3596 }
else if (has_double_value_) {
3597 return other_constant->has_double_value_ &&
3598 BitCast<int64_t>(double_value_) ==
3599 BitCast<int64_t>(other_constant->double_value_);
3600 }
else if (has_external_reference_value_) {
3601 return other_constant->has_external_reference_value_ &&
3602 external_reference_value_ ==
3603 other_constant->external_reference_value_;
3605 if (other_constant->has_int32_value_ ||
3606 other_constant->has_double_value_ ||
3607 other_constant->has_external_reference_value_) {
3610 ASSERT(!object_.handle().is_null());
3611 return other_constant->object_ == object_;
3622 virtual Range* InferRange(Zone* zone)
V8_OVERRIDE;
3625 friend class HGraph;
3629 bool is_not_in_new_space =
true,
3631 HConstant(
double value,
3633 bool is_not_in_new_space = true,
3635 HConstant(Unique<
Object> unique,
3638 bool is_not_in_new_space,
3640 bool is_undetectable,
3643 explicit HConstant(ExternalReference reference);
3647 virtual
bool IsDeletable() const V8_OVERRIDE {
return true; }
3653 Unique<Object> object_;
3660 bool has_smi_value_ : 1;
3661 bool has_int32_value_ : 1;
3662 bool has_double_value_ : 1;
3663 bool has_external_reference_value_ : 1;
3664 bool is_not_in_new_space_ : 1;
3665 bool boolean_value_ : 1;
3666 bool is_undetectable_: 1;
3668 double double_value_;
3669 ExternalReference external_reference_value_;
3679 HType type = HType::Tagged())
3683 SetOperandAt(0, context);
3684 SetOperandAt(1, left);
3685 SetOperandAt(2, right);
3696 if (!IsCommutative())
return false;
3700 if (left()->IsConstant())
return true;
3701 if (right()->IsConstant())
return false;
3710 return AreOperandsBetterSwitched() ? right() : left();
3714 return AreOperandsBetterSwitched() ? left() : right();
3718 ASSERT(index >= 1 && index <= 2);
3719 observed_input_representation_[index - 1] = rep;
3723 observed_output_representation_ = observed;
3728 return observed_input_representation_[index - 1];
3739 virtual void InferRepresentation(
3745 virtual
bool IsCommutative()
const {
return false; }
3751 return representation();
3757 set_operand_position(zone, 1, left_pos);
3758 set_operand_position(zone, 2, right_pos);
3762 if (!right()->IsInteger32Constant())
return false;
3770 bool IgnoreObservedOutputRepresentation(
Representation current_rep);
3793 bool known_function()
const {
return known_function_; }
3799 known_function_ =
function->IsConstant() &&
3800 HConstant::cast(
function)->handle(function->isolate())->IsJSFunction();
3802 SetOperandAt(0, receiver);
3803 SetOperandAt(1,
function);
3807 bool known_function_;
3811 class HApplyArguments
V8_FINAL :
public HTemplateInstruction<4> {
3831 HApplyArguments(
HValue* function,
3836 SetOperandAt(0,
function);
3837 SetOperandAt(1, receiver);
3838 SetOperandAt(2, length);
3839 SetOperandAt(3, elements);
3840 SetAllSideEffects();
3845 class HArgumentsElements
V8_FINAL :
public HTemplateInstruction<0> {
3861 explicit HArgumentsElements(
bool from_inlined) : from_inlined_(from_inlined) {
3868 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
3874 class HArgumentsLength
V8_FINAL :
public HUnaryOperation {
3888 explicit HArgumentsLength(
HValue* value) : HUnaryOperation(value) {
3893 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
3897 class HAccessArgumentsAt
V8_FINAL :
public HTemplateInstruction<3> {
3901 virtual void PrintDataTo(StringStream* stream)
V8_OVERRIDE;
3920 SetOperandAt(0, arguments);
3921 SetOperandAt(1, length);
3922 SetOperandAt(2, index);
3925 virtual bool DataEquals(HValue* other)
V8_OVERRIDE {
return true; }
3929 class HBoundsCheckBaseIndexInformation;
3932 class HBoundsCheck
V8_FINAL :
public HTemplateInstruction<2> {
3943 void ApplyIndexChange();
3947 DecompositionResult decomposition;
3948 if (index()->TryDecompose(&decomposition)) {
3949 base_ = decomposition.base();
3950 offset_ = decomposition.offset();
3951 scale_ = decomposition.scale();
3962 return representation();
3966 virtual void InferRepresentation(
3976 return skip_check();
3982 friend class HBoundsCheckBaseIndexInformation;
3986 virtual
bool DataEquals(
HValue* other) V8_OVERRIDE {
return true; }
3999 : skip_check_(
false),
4000 base_(
NULL), offset_(0), scale_(0),
4001 allow_equality_(
false) {
4002 SetOperandAt(0, index);
4003 SetOperandAt(1, length);
4004 SetFlag(kFlexibleRepresentation);
4009 return skip_check() && !FLAG_debug_code;
4014 class HBoundsCheckBaseIndexInformation
V8_FINAL
4015 :
public HTemplateInstruction<2> {
4018 DecompositionResult decomposition;
4019 if (check->index()->TryDecompose(&decomposition)) {
4020 SetOperandAt(0, decomposition.base());
4021 SetOperandAt(1, check);
4033 return representation();
4046 HType type = HType::Tagged())
4048 SetFlag(kFlexibleRepresentation);
4049 SetFlag(kTruncatingToInt32);
4050 SetFlag(kAllowUndefinedAsNaN);
4051 SetAllSideEffects();
4055 if (to.IsTagged() &&
4057 SetAllSideEffects();
4060 ClearAllSideEffects();
4063 if (to.IsTagged()) SetChangesFlag(kNewSpacePromotion);
4088 virtual
bool IsDeletable() const
V8_OVERRIDE {
return true; }
4092 class HMathFloorOfDiv
V8_FINAL :
public HBinaryOperation {
4108 SetFlag(kCanOverflow);
4109 SetFlag(kCanBeDivByZero);
4110 SetFlag(kLeftCanBeMinInt);
4111 SetFlag(kLeftCanBeNegative);
4112 SetFlag(kLeftCanBePositive);
4113 SetFlag(kAllowUndefinedAsNaN);
4116 virtual Range* InferRange(Zone* zone)
V8_OVERRIDE;
4118 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
4126 SetAllSideEffects();
4127 SetFlag(kFlexibleRepresentation);
4128 SetFlag(kAllowUndefinedAsNaN);
4132 if (to.IsTagged() &&
4134 SetAllSideEffects();
4137 ClearAllSideEffects();
4140 if (to.IsTagged()) SetChangesFlag(kNewSpacePromotion);
4146 virtual
bool IsDeletable() const
V8_OVERRIDE {
return true; }
4150 class HCompareGeneric
V8_FINAL :
public HBinaryOperation {
4167 HCompareGeneric(
HValue* context,
4175 SetAllSideEffects();
4188 HBasicBlock*, HBasicBlock*);
4196 observed_input_representation_[0] = left;
4197 observed_input_representation_[1] = right;
4200 virtual void InferRepresentation(
4204 return representation();
4207 return observed_input_representation_[index];
4214 set_operand_position(zone, 0, left_pos);
4215 set_operand_position(zone, 1, right_pos);
4224 HBasicBlock* true_target =
NULL,
4225 HBasicBlock* false_target = NULL)
4227 SetFlag(kFlexibleRepresentation);
4229 SetOperandAt(0, left);
4230 SetOperandAt(1, right);
4231 SetSuccessorAt(0, true_target);
4232 SetSuccessorAt(1, false_target);
4235 Representation observed_input_representation_[2];
4240 class HCompareHoleAndBranch
V8_FINAL :
public HUnaryControlInstruction {
4244 HBasicBlock*, HBasicBlock*);
4246 virtual void InferRepresentation(
4250 return representation();
4256 HCompareHoleAndBranch(
HValue* value,
4257 HBasicBlock* true_target =
NULL,
4258 HBasicBlock* false_target = NULL)
4260 SetFlag(kFlexibleRepresentation);
4261 SetFlag(kAllowUndefinedAsNaN);
4266 class HCompareMinusZeroAndBranch
V8_FINAL :
public HUnaryControlInstruction {
4270 virtual void InferRepresentation(
4274 return representation();
4277 virtual bool KnownSuccessorBlock(HBasicBlock** block)
V8_OVERRIDE;
4282 explicit HCompareMinusZeroAndBranch(
HValue* value)
4292 HBasicBlock*, HBasicBlock*);
4294 virtual bool KnownSuccessorBlock(HBasicBlock** block)
V8_OVERRIDE;
4296 static const int kNoKnownSuccessorIndex = -1;
4299 known_successor_index_ = known_successor_index;
4320 HBasicBlock* true_target =
NULL,
4321 HBasicBlock* false_target = NULL)
4322 : known_successor_index_(kNoKnownSuccessorIndex) {
4323 ASSERT(!left->IsConstant() ||
4324 (!HConstant::cast(left)->HasInteger32Value() ||
4325 HConstant::cast(left)->HasSmiValue()));
4326 ASSERT(!right->IsConstant() ||
4327 (!HConstant::cast(right)->HasInteger32Value() ||
4328 HConstant::cast(right)->HasSmiValue()));
4329 SetOperandAt(0, left);
4330 SetOperandAt(1, right);
4331 SetSuccessorAt(0, true_target);
4332 SetSuccessorAt(1, false_target);
4335 int known_successor_index_;
4339 class HIsObjectAndBranch
V8_FINAL :
public HUnaryControlInstruction {
4343 HBasicBlock*, HBasicBlock*);
4349 virtual bool KnownSuccessorBlock(HBasicBlock** block)
V8_OVERRIDE;
4354 HIsObjectAndBranch(
HValue* value,
4355 HBasicBlock* true_target = NULL,
4356 HBasicBlock* false_target = NULL)
4361 class HIsStringAndBranch
V8_FINAL :
public HUnaryControlInstruction {
4365 HBasicBlock*, HBasicBlock*);
4371 virtual bool KnownSuccessorBlock(HBasicBlock** block)
V8_OVERRIDE;
4376 virtual
int RedefinedOperandIndex() {
return 0; }
4379 HIsStringAndBranch(
HValue* value,
4380 HBasicBlock* true_target = NULL,
4381 HBasicBlock* false_target = NULL)
4386 class HIsSmiAndBranch
V8_FINAL :
public HUnaryControlInstruction {
4390 HBasicBlock*, HBasicBlock*);
4403 HIsSmiAndBranch(
HValue* value,
4404 HBasicBlock* true_target = NULL,
4405 HBasicBlock* false_target = NULL)
4412 class HIsUndetectableAndBranch
V8_FINAL :
public HUnaryControlInstruction {
4416 HBasicBlock*, HBasicBlock*);
4422 virtual bool KnownSuccessorBlock(HBasicBlock** block)
V8_OVERRIDE;
4427 HIsUndetectableAndBranch(
HValue* value,
4428 HBasicBlock* true_target = NULL,
4429 HBasicBlock* false_target = NULL)
4465 SetOperandAt(0, context);
4466 SetOperandAt(1, left);
4467 SetOperandAt(2, right);
4469 SetChangesFlag(kNewSpacePromotion);
4490 class HHasInstanceTypeAndBranch
V8_FINAL :
public HUnaryControlInstruction {
4506 virtual bool KnownSuccessorBlock(HBasicBlock** block)
V8_OVERRIDE;
4514 : HUnaryControlInstruction(value, NULL, NULL), from_(from), to_(to) {
4523 class HHasCachedArrayIndexAndBranch
V8_FINAL :
public HUnaryControlInstruction {
4533 explicit HHasCachedArrayIndexAndBranch(
HValue* value)
4538 class HGetCachedArrayIndex
V8_FINAL :
public HUnaryOperation {
4552 explicit HGetCachedArrayIndex(
HValue* value) : HUnaryOperation(value) {
4557 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
4561 class HClassOfTestAndBranch
V8_FINAL :
public HUnaryControlInstruction {
4579 class_name_(class_name) { }
4585 class HTypeofIsAndBranch
V8_FINAL :
public HUnaryControlInstruction {
4598 virtual bool KnownSuccessorBlock(HBasicBlock** block)
V8_OVERRIDE;
4601 type_literal_ = Unique<String>(type_literal_.handle());
4607 type_literal_(Unique<
String>::CreateUninitialized(type_literal)) { }
4609 Unique<String> type_literal_;
4613 class HInstanceOf
V8_FINAL :
public HBinaryOperation {
4629 SetAllSideEffects();
4634 class HInstanceOfKnownGlobal
V8_FINAL :
public HTemplateInstruction<2> {
4638 Handle<JSFunction>);
4651 HInstanceOfKnownGlobal(
HValue* context,
4654 : HTemplateInstruction<2>(HType::Boolean()), function_(right) {
4655 SetOperandAt(0, context);
4656 SetOperandAt(1, left);
4658 SetAllSideEffects();
4665 class HPower
V8_FINAL :
public HTemplateInstruction<2> {
4667 static HInstruction* New(Zone* zone,
4681 return RequiredInputRepresentation(index);
4691 SetOperandAt(0, left);
4692 SetOperandAt(1, right);
4695 SetChangesFlag(kNewSpacePromotion);
4704 class HAdd
V8_FINAL :
public HArithmeticBinaryOperation {
4706 static HInstruction* New(Zone* zone,
4715 return !representation().IsTagged() && !representation().IsExternal();
4720 virtual
bool TryDecompose(DecompositionResult* decomposition)
V8_OVERRIDE {
4721 if (left()->IsInteger32Constant()) {
4722 decomposition->Apply(right(), left()->GetInteger32Constant());
4724 }
else if (right()->IsInteger32Constant()) {
4725 decomposition->Apply(left(), right()->GetInteger32Constant());
4733 if (to.IsTagged() &&
4736 SetAllSideEffects();
4739 ClearAllSideEffects();
4742 if (to.IsTagged()) {
4743 SetChangesFlag(kNewSpacePromotion);
4744 ClearFlag(kAllowUndefinedAsNaN);
4755 virtual
bool DataEquals(
HValue* other) V8_OVERRIDE {
return true; }
4762 SetFlag(kCanOverflow);
4767 class HSub
V8_FINAL :
public HArithmeticBinaryOperation {
4769 static HInstruction* New(Zone* zone,
4776 virtual
bool TryDecompose(DecompositionResult* decomposition)
V8_OVERRIDE {
4777 if (right()->IsInteger32Constant()) {
4778 decomposition->Apply(left(), -right()->GetInteger32Constant());
4795 SetFlag(kCanOverflow);
4800 class HMul
V8_FINAL :
public HArithmeticBinaryOperation {
4802 static HInstruction* New(Zone* zone,
4811 HInstruction* instr = HMul::New(zone, context, left, right);
4812 if (!instr->IsMul())
return instr;
4813 HMul* mul = HMul::cast(instr);
4824 return !representation().IsTagged();
4845 SetFlag(kCanOverflow);
4850 class HMod
V8_FINAL :
public HArithmeticBinaryOperation {
4852 static HInstruction* New(Zone* zone,
4877 SetFlag(kCanBeDivByZero);
4878 SetFlag(kCanOverflow);
4879 SetFlag(kLeftCanBeNegative);
4884 class HDiv
V8_FINAL :
public HArithmeticBinaryOperation {
4886 static HInstruction* New(Zone* zone,
4910 SetFlag(kCanBeDivByZero);
4911 SetFlag(kCanOverflow);
4916 class HMathMinMax
V8_FINAL :
public HArithmeticBinaryOperation {
4927 return RequiredInputRepresentation(index);
4930 virtual void InferRepresentation(
4951 return other->IsMathMinMax() &&
4952 HMathMinMax::cast(other)->operation_ == operation_;
4962 Operation operation_;
4966 class HBitwise
V8_FINAL :
public HBitwiseBinaryOperation {
4968 static HInstruction* New(Zone* zone,
4985 virtual
bool DataEquals(
HValue* other) V8_OVERRIDE {
4986 return op() == HBitwise::cast(other)->op();
4992 HBitwise(
HValue* context,
4998 ASSERT(op == Token::BIT_AND || op == Token::BIT_OR || op == Token::BIT_XOR);
5001 if (op == Token::BIT_AND &&
5002 ((left->IsConstant() &&
5004 HConstant::cast(left)->Integer32Value() >= 0) ||
5005 (right->IsConstant() &&
5007 HConstant::cast(right)->Integer32Value() >= 0))) {
5008 SetFlag(kTruncatingToSmi);
5009 SetFlag(kTruncatingToInt32);
5012 }
else if (op == Token::BIT_OR &&
5013 ((left->IsConstant() &&
5015 HConstant::cast(left)->Integer32Value() < 0) ||
5016 (right->IsConstant() &&
5018 HConstant::cast(right)->Integer32Value() < 0))) {
5019 SetFlag(kTruncatingToSmi);
5020 SetFlag(kTruncatingToInt32);
5028 class HShl
V8_FINAL :
public HBitwiseBinaryOperation {
5030 static HInstruction* New(Zone* zone,
5035 virtual Range* InferRange(Zone* zone)
V8_OVERRIDE;
5040 if (new_rep.IsSmi() &&
5059 class HShr
V8_FINAL :
public HBitwiseBinaryOperation {
5061 static HInstruction* New(Zone* zone,
5067 if (right()->IsInteger32Constant()) {
5068 if (decomposition->Apply(left(), 0, right()->GetInteger32Constant())) {
5098 class HSar
V8_FINAL :
public HBitwiseBinaryOperation {
5100 static HInstruction* New(Zone* zone,
5106 if (right()->IsInteger32Constant()) {
5107 if (decomposition->Apply(left(), 0, right()->GetInteger32Constant())) {
5137 class HRor
V8_FINAL :
public HBitwiseBinaryOperation {
5143 return new(zone) HRor(context, left, right);
5166 class HOsrEntry
V8_FINAL :
public HTemplateInstruction<0> {
5179 explicit HOsrEntry(
BailoutId ast_id) : ast_id_(ast_id) {
5180 SetChangesFlag(kOsrEntries);
5181 SetChangesFlag(kNewSpacePromotion);
5188 class HParameter
V8_FINAL :
public HTemplateInstruction<0> {
5200 unsigned index()
const {
return index_; }
5212 explicit HParameter(
unsigned index,
5213 ParameterKind kind = STACK_PARAMETER)
5219 explicit HParameter(
unsigned index,
5224 set_representation(r);
5228 ParameterKind kind_;
5232 class HCallStub
V8_FINAL :
public HUnaryCall {
5244 HCallStub(
HValue* context, CodeStub::Major major_key,
int argument_count)
5246 major_key_(major_key) {
5249 CodeStub::Major major_key_;
5253 class HUnknownOSRValue
V8_FINAL :
public HTemplateInstruction<0> {
5257 virtual void PrintDataTo(StringStream* stream);
5270 return incoming_value_->KnownOptimalRepresentation();
5276 HUnknownOSRValue(HEnvironment* environment,
int index)
5277 : environment_(environment),
5279 incoming_value_(NULL) {
5283 HEnvironment* environment_;
5285 HPhi* incoming_value_;
5289 class HLoadGlobalCell
V8_FINAL :
public HTemplateInstruction<0> {
5294 Unique<Cell>
cell()
const {
return cell_; }
5295 bool RequiresHoleCheck()
const;
5300 return cell_.Hashcode();
5304 cell_ = Unique<Cell>(cell_.handle());
5315 return cell_ == HLoadGlobalCell::cast(other)->cell_;
5319 HLoadGlobalCell(
Handle<Cell> cell, PropertyDetails details)
5320 : cell_(Unique<
Cell>::CreateUninitialized(cell)), details_(details) {
5323 SetDependsOnFlag(kGlobalVars);
5326 virtual bool IsDeletable() const
V8_OVERRIDE {
return !RequiresHoleCheck(); }
5329 PropertyDetails details_;
5333 class HLoadGlobalGeneric
V8_FINAL :
public HTemplateInstruction<2> {
5336 Handle<Object>,
bool);
5352 HLoadGlobalGeneric(
HValue* context,
5357 for_typeof_(for_typeof) {
5358 SetOperandAt(0, context);
5359 SetOperandAt(1, global_object);
5361 SetAllSideEffects();
5369 class HAllocate
V8_FINAL :
public HTemplateInstruction<2> {
5373 return ComputeFlags(
TENURED, type1) == ComputeFlags(
TENURED, type2) &&
5385 return new(zone) HAllocate(context, size, type, pretenure_flag,
5386 instance_type, allocation_site);
5404 return known_initial_map_;
5408 known_initial_map_ = known_initial_map;
5412 return (flags_ & ALLOCATE_IN_NEW_SPACE) != 0;
5416 return (flags_ & ALLOCATE_IN_OLD_DATA_SPACE) != 0;
5420 return (flags_ & ALLOCATE_IN_OLD_POINTER_SPACE) != 0;
5424 return (flags_ & ALLOCATE_DOUBLE_ALIGNED) != 0;
5428 return (flags_ & PREFILL_WITH_FILLER) != 0;
5432 flags_ =
static_cast<HAllocate::Flags
>(flags_ | PREFILL_WITH_FILLER);
5436 return (flags_ & CLEAR_NEXT_MAP_WORD) != 0;
5440 flags_ =
static_cast<HAllocate::Flags
>(flags_ | ALLOCATE_DOUBLE_ALIGNED);
5443 virtual bool HandleSideEffectDominator(
GVNFlag side_effect,
5452 ALLOCATE_IN_NEW_SPACE = 1 << 0,
5453 ALLOCATE_IN_OLD_DATA_SPACE = 1 << 1,
5454 ALLOCATE_IN_OLD_POINTER_SPACE = 1 << 2,
5455 ALLOCATE_DOUBLE_ALIGNED = 1 << 3,
5456 PREFILL_WITH_FILLER = 1 << 4,
5457 CLEAR_NEXT_MAP_WORD = 1 << 5
5460 HAllocate(HValue* context,
5467 : HTemplateInstruction<2>(type),
5468 flags_(ComputeFlags(pretenure_flag, instance_type)),
5469 dominating_allocate_(NULL),
5470 filler_free_space_size_(NULL) {
5471 SetOperandAt(0, context);
5472 SetOperandAt(1, size);
5474 SetFlag(kTrackSideEffectDominators);
5475 SetChangesFlag(kNewSpacePromotion);
5476 SetDependsOnFlag(kNewSpacePromotion);
5478 if (FLAG_trace_pretenuring) {
5479 PrintF(
"HAllocate with AllocationSite %p %s\n",
5480 allocation_site.is_null()
5481 ?
static_cast<void*
>(
NULL)
5482 : static_cast<void*>(*allocation_site),
5483 pretenure_flag ==
TENURED ?
"tenured" :
"not tenured");
5491 ? ALLOCATE_IN_OLD_POINTER_SPACE : ALLOCATE_IN_OLD_DATA_SPACE)
5492 : ALLOCATE_IN_NEW_SPACE;
5494 flags =
static_cast<Flags
>(flags | ALLOCATE_DOUBLE_ALIGNED);
5501 if (!FLAG_use_gvn || !FLAG_use_allocation_folding) {
5502 flags =
static_cast<Flags
>(flags | PREFILL_WITH_FILLER);
5506 flags =
static_cast<Flags
>(flags | CLEAR_NEXT_MAP_WORD);
5511 void UpdateClearNextMapWord(
bool clear_next_map_word) {
5512 flags_ =
static_cast<Flags
>(clear_next_map_word
5513 ? flags_ | CLEAR_NEXT_MAP_WORD
5514 : flags_ & ~CLEAR_NEXT_MAP_WORD);
5517 void UpdateSize(HValue* size) {
5518 SetOperandAt(1, size);
5521 HAllocate* GetFoldableDominator(HAllocate* dominator);
5523 void UpdateFreeSpaceFiller(
int32_t filler_size);
5525 void CreateFreeSpaceFiller(
int32_t filler_size);
5527 bool IsFoldable(HAllocate* allocate) {
5528 return (IsNewSpaceAllocation() && allocate->IsNewSpaceAllocation()) ||
5529 (IsOldDataSpaceAllocation() && allocate->IsOldDataSpaceAllocation()) ||
5530 (IsOldPointerSpaceAllocation() &&
5531 allocate->IsOldPointerSpaceAllocation());
5534 void ClearNextMapWord(
int offset);
5537 Handle<Map> known_initial_map_;
5538 HAllocate* dominating_allocate_;
5539 HStoreNamedField* filler_free_space_size_;
5543 class HStoreCodeEntry
V8_FINAL:
public HTemplateInstruction<2> {
5549 return new(zone) HStoreCodeEntry(
function, code);
5563 SetOperandAt(0,
function);
5564 SetOperandAt(1, code);
5569 class HInnerAllocatedObject
V8_FINAL :
public HTemplateInstruction<2> {
5575 HType type = HType::Tagged()) {
5576 return new(zone) HInnerAllocatedObject(value, offset, type);
5591 HInnerAllocatedObject(
HValue* value,
5593 HType type = HType::Tagged())
5594 : HTemplateInstruction<2>(type) {
5595 ASSERT(value->IsAllocate());
5596 SetOperandAt(0, value);
5597 SetOperandAt(1, offset);
5605 return !value->
type().IsBoolean()
5606 && !value->
type().IsSmi()
5607 && !(value->IsConstant() && HConstant::cast(value)->ImmortalImmovable());
5613 HValue* new_space_dominator) {
5614 while (object->IsInnerAllocatedObject()) {
5615 object = HInnerAllocatedObject::cast(
object)->base_object();
5617 if (object->IsConstant() && HConstant::cast(
object)->IsCell()) {
5620 if (object->IsConstant() &&
5621 HConstant::cast(
object)->HasExternalReferenceValue()) {
5625 if (
object != new_space_dominator)
return true;
5626 if (object->IsAllocate()) {
5629 if (HAllocate::cast(
object)->IsNewSpaceAllocation()) {
5634 while (value->IsInnerAllocatedObject()) {
5635 value = HInnerAllocatedObject::cast(value)->base_object();
5637 return object != value;
5643 class HStoreGlobalCell
V8_FINAL :
public HUnaryOperation {
5648 Unique<PropertyCell>
cell()
const {
return cell_; }
5650 return !details_.IsDontDelete() || details_.IsReadOnly();
5657 cell_ = Unique<PropertyCell>(cell_.handle());
5668 HStoreGlobalCell(
HValue* value,
5670 PropertyDetails details)
5671 : HUnaryOperation(value),
5672 cell_(Unique<
PropertyCell>::CreateUninitialized(cell)),
5674 SetChangesFlag(kGlobalVars);
5677 Unique<PropertyCell> cell_;
5678 PropertyDetails details_;
5682 class HLoadContextSlot
V8_FINAL :
public HUnaryOperation {
5694 kCheckReturnUndefined
5698 : HUnaryOperation(context), slot_index_(var->index()) {
5700 switch (var->
mode()) {
5703 mode_ = kCheckDeoptimize;
5706 mode_ = kCheckReturnUndefined;
5713 SetDependsOnFlag(kContextSlots);
5720 return mode_ == kCheckDeoptimize;
5724 return mode_ != kNoCheck;
5737 HLoadContextSlot* b = HLoadContextSlot::cast(other);
5738 return (slot_index() == b->slot_index());
5742 virtual bool IsDeletable() const
V8_OVERRIDE {
return !RequiresHoleCheck(); }
5749 class HStoreContextSlot
V8_FINAL :
public HTemplateInstruction<2> {
5761 kCheckIgnoreAssignment
5777 return mode_ == kCheckDeoptimize;
5781 return mode_ != kNoCheck;
5793 HStoreContextSlot(
HValue* context,
int slot_index, Mode
mode,
HValue* value)
5794 : slot_index_(slot_index), mode_(mode) {
5795 SetOperandAt(0, context);
5796 SetOperandAt(1, value);
5797 SetChangesFlag(kContextSlots);
5810 return portion() != kBackingStore && portion() != kExternalMemory;
5814 return portion() == kExternalMemory;
5818 return portion() == kStringLengths;
5822 return OffsetField::decode(value_);
5834 return ImmutableField::decode(value_);
5840 return ExistingInobjectPropertyField::decode(value_);
5844 return HObjectAccess(portion(), offset(), representation,
name(),
5845 immutable(), existing_inobject_property());
5849 return HObjectAccess(
5854 return HObjectAccess(kDouble,
5860 return HObjectAccess(kDouble,
5878 return HObjectAccess(
5885 static HObjectAccess ForAllocationSiteOffset(
int offset);
5893 return HObjectAccess(
5900 return HObjectAccess(kInobject,
5907 return HObjectAccess(
5942 return HObjectAccess(kInobject,
5955 return HObjectAccess(kInobject,
5961 return HObjectAccess(kInobject,
5984 static HObjectAccess ForFixedArrayHeader(
int offset);
5991 static HObjectAccess ForMapAndOffset(
Handle<Map> map,
int offset,
6003 static HObjectAccess ForJSArrayOffset(
int offset);
6005 static HObjectAccess ForContextSlot(
int index);
6008 static HObjectAccess ForBackingStoreOffset(
int offset,
6016 static HObjectAccess ForCellPayload(
Isolate* isolate);
6019 return HObjectAccess::ForObservableJSObjectOffset(
6024 return HObjectAccess::ForObservableJSObjectOffset(
6029 return HObjectAccess::ForObservableJSObjectOffset(
6034 return HObjectAccess::ForObservableJSObjectOffset(
6039 return HObjectAccess::ForObservableJSObjectOffset(
6044 return HObjectAccess::ForObservableJSObjectOffset(
6049 return HObjectAccess::ForObservableJSObjectOffset(
6054 return HObjectAccess::ForObservableJSObjectOffset(
6064 inline bool Equals(HObjectAccess that)
const {
6065 return value_ == that.value_;
6084 HObjectAccess() : value_(0) {}
6086 HObjectAccess(Portion portion,
int offset,
6089 bool immutable =
false,
6090 bool existing_inobject_property =
true)
6091 : value_(PortionField::encode(portion) |
6092 RepresentationField::encode(representation.kind()) |
6093 ImmutableField::encode(immutable ? 1 : 0) |
6094 ExistingInobjectPropertyField::encode(
6095 existing_inobject_property ? 1 : 0) |
6096 OffsetField::encode(offset)),
6099 ASSERT(this->offset() == offset);
6100 ASSERT(this->portion() == portion);
6101 ASSERT(this->immutable() == immutable);
6102 ASSERT(this->existing_inobject_property() == existing_inobject_property);
6103 ASSERT(RepresentationField::decode(value_) == representation.kind());
6104 ASSERT(!this->existing_inobject_property() || IsInobject());
6107 class PortionField :
public BitField<Portion, 0, 3> {};
6108 class RepresentationField :
public BitField<Representation::Kind, 3, 4> {};
6109 class ImmutableField :
public BitField<bool, 7, 1> {};
6110 class ExistingInobjectPropertyField :
public BitField<bool, 8, 1> {};
6111 class OffsetField :
public BitField<int, 9, 23> {};
6114 Handle<String> name_;
6116 friend class HLoadNamedField;
6117 friend class HStoreNamedField;
6118 friend class SideEffectsTracker;
6120 inline Portion portion()
const {
6121 return PortionField::decode(value_);
6126 class HLoadNamedField
V8_FINAL :
public HTemplateInstruction<2> {
6134 return OperandAt(1);
6137 HObjectAccess
access()
const {
return access_; }
6139 return access_.representation();
6144 return !access().IsInobject() || access().offset() >=
size;
6147 if (index == 0 && access().IsExternalMemory()) {
6160 HLoadNamedField* b = HLoadNamedField::cast(other);
6161 return access_.Equals(b->access_);
6165 HLoadNamedField(
HValue*
object,
6167 HObjectAccess access) : access_(access) {
6169 SetOperandAt(0,
object);
6170 SetOperandAt(1, dependency != NULL ? dependency :
object);
6178 }
else if (representation.
IsSmi()) {
6179 set_type(HType::Smi());
6180 if (SmiValuesAre32Bits()) {
6183 set_representation(representation);
6185 }
else if (representation.
IsDouble() ||
6188 set_representation(representation);
6190 set_type(HType::NonPrimitive());
6195 access.SetGVNFlags(
this,
LOAD);
6198 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
6200 HObjectAccess access_;
6204 class HLoadNamedGeneric
V8_FINAL :
public HTemplateInstruction<2> {
6224 SetOperandAt(0, context);
6225 SetOperandAt(1,
object);
6227 SetAllSideEffects();
6234 class HLoadFunctionPrototype
V8_FINAL :
public HUnaryOperation {
6250 explicit HLoadFunctionPrototype(
HValue*
function)
6251 : HUnaryOperation(function) {
6254 SetDependsOnFlag(kCalls);
6260 virtual HValue* GetKey() = 0;
6261 virtual void SetKey(
HValue* key) = 0;
6262 virtual void SetIndexOffset(uint32_t index_offset) = 0;
6263 virtual int MaxIndexOffsetBits() = 0;
6264 virtual bool IsDehoisted() = 0;
6265 virtual void SetDehoisted(
bool is_dehoisted) = 0;
6282 :
public HTemplateInstruction<3>,
public ArrayInstructionInterface {
6296 return is_external() || is_fixed_typed_array();
6302 return OperandAt(2);
6307 bit_field_ = IndexOffsetField::update(bit_field_, index_offset);
6310 return kBitsForIndexOffset;
6316 bit_field_ = IsDehoistedField::update(bit_field_, is_dehoisted);
6319 return ElementsKindField::decode(bit_field_);
6322 return HoleModeField::decode(bit_field_);
6336 OperandAt(1)->representation());
6342 return RequiredInputRepresentation(index);
6347 bool UsesMustHandleHole()
const;
6348 bool AllUsesCanTreatHoleAsNaN()
const;
6349 bool RequiresHoleCheck()
const;
6357 if (!other->IsLoadKeyed())
return false;
6358 HLoadKeyed* other_load = HLoadKeyed::cast(other);
6360 if (IsDehoisted() && index_offset() != other_load->index_offset())
6362 return elements_kind() == other_load->elements_kind();
6372 bit_field_ = ElementsKindField::encode(elements_kind) |
6373 HoleModeField::encode(
mode);
6375 SetOperandAt(0, obj);
6376 SetOperandAt(1, key);
6377 SetOperandAt(2, dependency != NULL ? dependency : obj);
6379 if (!is_typed_elements()) {
6389 set_type(HType::Smi());
6390 if (SmiValuesAre32Bits() && !RequiresHoleCheck()) {
6399 SetDependsOnFlag(kArrayElements);
6402 SetDependsOnFlag(kDoubleArrayElements);
6414 if (is_external()) {
6415 SetDependsOnFlag(kExternalMemory);
6416 }
else if (is_fixed_typed_array()) {
6417 SetDependsOnFlag(kTypedArrayElements);
6422 SetDependsOnFlag(kCalls);
6429 return !RequiresHoleCheck();
6433 enum LoadKeyedBits {
6434 kBitsForElementsKind = 5,
6435 kBitsForHoleMode = 1,
6436 kBitsForIndexOffset = 25,
6437 kBitsForIsDehoisted = 1,
6439 kStartElementsKind = 0,
6440 kStartHoleMode = kStartElementsKind + kBitsForElementsKind,
6441 kStartIndexOffset = kStartHoleMode + kBitsForHoleMode,
6442 kStartIsDehoisted = kStartIndexOffset + kBitsForIndexOffset
6446 kBitsForIsDehoisted) <=
sizeof(uint32_t)*8);
6448 class ElementsKindField:
6449 public BitField<ElementsKind, kStartElementsKind, kBitsForElementsKind>
6451 class HoleModeField:
6452 public BitField<LoadKeyedHoleMode, kStartHoleMode, kBitsForHoleMode>
6454 class IndexOffsetField:
6455 public BitField<uint32_t, kStartIndexOffset, kBitsForIndexOffset>
6457 class IsDehoistedField:
6458 public BitField<bool, kStartIsDehoisted, kBitsForIsDehoisted>
6460 uint32_t bit_field_;
6464 class HLoadKeyedGeneric
V8_FINAL :
public HTemplateInstruction<3> {
6486 SetOperandAt(0, obj);
6487 SetOperandAt(1, key);
6488 SetOperandAt(2, context);
6489 SetAllSideEffects();
6505 class HStoreNamedField
V8_FINAL :
public HTemplateInstruction<3> {
6508 HObjectAccess, HValue*);
6518 return !access().IsInobject() || access().offset() >=
size;
6521 if (index == 0 && access().IsExternalMemory()) {
6524 }
else if (index == 1) {
6525 if (field_representation().IsInteger8() ||
6526 field_representation().IsUInteger8() ||
6527 field_representation().IsInteger16() ||
6528 field_representation().IsUInteger16() ||
6529 field_representation().IsInteger32()) {
6531 }
else if (field_representation().IsDouble()) {
6532 return field_representation();
6533 }
else if (field_representation().IsSmi()) {
6537 return field_representation();
6538 }
else if (field_representation().IsExternal()) {
6546 ASSERT(side_effect == kNewSpacePromotion);
6547 if (!FLAG_use_write_barrier_elimination)
return false;
6548 new_space_dominator_ = dominator;
6562 HObjectAccess
access()
const {
return access_; }
6568 if (has_transition()) {
6570 HConstant::cast(transition())->
handle(Isolate::Current()));
6577 ASSERT(!has_transition());
6579 if (map->CanBeDeprecated()) {
6582 SetOperandAt(2, map_constant);
6583 has_transition_ =
true;
6587 ASSERT(!field_representation().IsDouble() || !has_transition());
6588 if (IsSkipWriteBarrier())
return false;
6589 if (field_representation().IsDouble())
return false;
6590 if (field_representation().IsSmi())
return false;
6591 if (field_representation().IsInteger32())
return false;
6592 if (field_representation().IsExternal())
return false;
6595 new_space_dominator());
6599 if (IsSkipWriteBarrier())
return false;
6601 new_space_dominator());
6605 return access_.representation();
6609 SetOperandAt(1, value);
6613 HStoreNamedField(
HValue* obj,
6614 HObjectAccess access,
6618 new_space_dominator_(NULL),
6620 has_transition_(
false),
6621 store_mode_(store_mode) {
6624 ASSERT(!access.IsInobject() || access.existing_inobject_property() ||
6625 obj->IsAllocate() || obj->IsInnerAllocatedObject());
6626 SetOperandAt(0, obj);
6627 SetOperandAt(1, val);
6628 SetOperandAt(2, obj);
6629 access.SetGVNFlags(
this,
STORE);
6632 HObjectAccess access_;
6633 HValue* new_space_dominator_;
6635 bool has_transition_ : 1;
6640 class HStoreNamedGeneric
V8_FINAL :
public HTemplateInstruction<3> {
6643 Handle<String>, HValue*,
6660 HStoreNamedGeneric(
HValue* context,
6666 strict_mode_(strict_mode) {
6667 SetOperandAt(0,
object);
6668 SetOperandAt(1, value);
6669 SetOperandAt(2, context);
6670 SetAllSideEffects();
6679 :
public HTemplateInstruction<3>,
public ArrayInstructionInterface {
6695 }
else if (index == 1) {
6697 OperandAt(1)->representation());
6711 return is_external() || is_fixed_typed_array()
6725 return is_external() || is_fixed_typed_array();
6729 if (index < 2)
return RequiredInputRepresentation(index);
6730 if (IsUninitialized()) {
6742 if (is_typed_elements()) {
6768 is_uninitialized_ = is_uninitialized;
6772 return value()->IsConstant() && HConstant::cast(value())->IsTheHole();
6777 ASSERT(side_effect == kNewSpacePromotion);
6778 new_space_dominator_ = dominator;
6785 if (value_is_smi()) {
6790 new_space_dominator());
6794 bool NeedsCanonicalization();
6804 : elements_kind_(elements_kind),
6806 is_dehoisted_(
false),
6807 is_uninitialized_(false),
6808 store_mode_(store_mode),
6809 new_space_dominator_(NULL) {
6810 SetOperandAt(0, obj);
6811 SetOperandAt(1, key);
6812 SetOperandAt(2, val);
6818 SetFlag(kTrackSideEffectDominators);
6819 SetDependsOnFlag(kNewSpacePromotion);
6821 if (is_external()) {
6822 SetChangesFlag(kExternalMemory);
6823 SetFlag(kAllowUndefinedAsNaN);
6825 SetChangesFlag(kDoubleArrayElements);
6827 SetChangesFlag(kArrayElements);
6828 }
else if (is_fixed_typed_array()) {
6829 SetChangesFlag(kTypedArrayElements);
6830 SetFlag(kAllowUndefinedAsNaN);
6832 SetChangesFlag(kArrayElements);
6840 SetFlag(kTruncatingToInt32);
6845 uint32_t index_offset_;
6846 bool is_dehoisted_ : 1;
6847 bool is_uninitialized_ : 1;
6849 HValue* new_space_dominator_;
6853 class HStoreKeyedGeneric
V8_FINAL :
public HTemplateInstruction<4> {
6874 HStoreKeyedGeneric(
HValue* context,
6879 : strict_mode_(strict_mode) {
6880 SetOperandAt(0,
object);
6881 SetOperandAt(1, key);
6882 SetOperandAt(2, value);
6883 SetOperandAt(3, context);
6884 SetAllSideEffects();
6891 class HTransitionElementsKind
V8_FINAL :
public HTemplateInstruction<2> {
6893 inline static HTransitionElementsKind*
New(
Zone* zone,
6898 return new(zone) HTransitionElementsKind(context,
object,
6899 original_map, transitioned_map);
6919 HTransitionElementsKind* instr = HTransitionElementsKind::cast(other);
6920 return original_map_ == instr->original_map_ &&
6921 transitioned_map_ == instr->transitioned_map_;
6927 HTransitionElementsKind(
HValue* context,
6931 : original_map_(Unique<
Map>(original_map)),
6932 transitioned_map_(Unique<
Map>(transitioned_map)),
6933 from_kind_(original_map->elements_kind()),
6934 to_kind_(transitioned_map->elements_kind()) {
6935 SetOperandAt(0,
object);
6936 SetOperandAt(1, context);
6938 SetChangesFlag(kElementsKind);
6940 SetChangesFlag(kElementsPointer);
6941 SetChangesFlag(kNewSpacePromotion);
6946 Unique<Map> original_map_;
6947 Unique<Map> transitioned_map_;
6953 class HStringAdd
V8_FINAL :
public HBinaryOperation {
6955 static HInstruction* New(Zone* zone,
6961 Handle<AllocationSite> allocation_site =
6962 Handle<AllocationSite>::null());
6977 return flags_ == HStringAdd::cast(other)->flags_ &&
6978 pretenure_flag_ == HStringAdd::cast(other)->pretenure_flag_;
6982 HStringAdd(
HValue* context,
6989 flags_(flags), pretenure_flag_(pretenure_flag) {
6992 SetDependsOnFlag(kMaps);
6993 SetChangesFlag(kNewSpacePromotion);
6994 if (FLAG_trace_pretenuring) {
6995 PrintF(
"HStringAdd with AllocationSite %p %s\n",
6997 ?
static_cast<void*
>(
NULL)
6998 : static_cast<void*>(*allocation_site),
6999 pretenure_flag ==
TENURED ?
"tenured" :
"not tenured");
7004 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
7011 class HStringCharCodeAt
V8_FINAL :
public HTemplateInstruction<3> {
7039 SetOperandAt(0, context);
7040 SetOperandAt(1,
string);
7041 SetOperandAt(2, index);
7044 SetDependsOnFlag(kMaps);
7045 SetDependsOnFlag(kStringChars);
7046 SetChangesFlag(kNewSpacePromotion);
7050 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
7054 class HStringCharFromCode
V8_FINAL :
public HTemplateInstruction<2> {
7056 static HInstruction* New(Zone* zone,
7074 HStringCharFromCode(
HValue* context,
HValue* char_code)
7075 : HTemplateInstruction<2>(HType::
String()) {
7076 SetOperandAt(0, context);
7077 SetOperandAt(1, char_code);
7080 SetChangesFlag(kNewSpacePromotion);
7084 return !value()->ToNumberCanBeObserved();
7093 : literal_index_(index), depth_(depth), allocation_site_mode_(
mode) {
7098 : literal_index_(index), depth_(depth),
7106 return allocation_site_mode_;
7118 class HRegExpLiteral
V8_FINAL :
public HMaterializedLiteral<1> {
7138 HRegExpLiteral(
HValue* context,
7144 literals_(literals),
7147 SetOperandAt(0, context);
7148 SetAllSideEffects();
7149 set_type(HType::JSObject());
7158 class HFunctionLiteral
V8_FINAL :
public HTemplateInstruction<1> {
7161 Handle<SharedFunctionInfo>,
7178 HFunctionLiteral(
HValue* context,
7181 : HTemplateInstruction<1>(HType::
JSObject()),
7182 shared_info_(shared),
7183 pretenure_(pretenure),
7184 has_no_literals_(shared->num_literals() == 0),
7185 is_generator_(shared->is_generator()),
7186 strict_mode_(shared->strict_mode()) {
7187 SetOperandAt(0, context);
7189 SetChangesFlag(kNewSpacePromotion);
7192 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
7194 Handle<SharedFunctionInfo> shared_info_;
7195 bool pretenure_ : 1;
7196 bool has_no_literals_ : 1;
7197 bool is_generator_ : 1;
7202 class HTypeof
V8_FINAL :
public HTemplateInstruction<2> {
7219 SetOperandAt(0, context);
7220 SetOperandAt(1, value);
7224 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
7228 class HTrapAllocationMemento
V8_FINAL :
public HTemplateInstruction<1> {
7241 explicit HTrapAllocationMemento(
HValue* obj) {
7242 SetOperandAt(0, obj);
7247 class HToFastProperties
V8_FINAL :
public HUnaryOperation {
7258 explicit HToFastProperties(
HValue* value) : HUnaryOperation(value) {
7260 SetChangesFlag(kNewSpacePromotion);
7265 ASSERT(value->IsCallRuntime());
7268 ASSERT(function->function_id == Runtime::kHiddenCreateObjectLiteral);
7272 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
7276 class HDateField
V8_FINAL :
public HUnaryOperation {
7290 : HUnaryOperation(date), index_(index) {
7298 class HSeqStringGetChar
V8_FINAL :
public HTemplateInstruction<2> {
7300 static HInstruction* New(Zone* zone,
7319 return encoding() == HSeqStringGetChar::cast(other)->encoding();
7334 HValue* index) : encoding_(encoding) {
7335 SetOperandAt(0,
string);
7336 SetOperandAt(1, index);
7339 SetDependsOnFlag(kStringChars);
7342 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
7348 class HSeqStringSetChar
V8_FINAL :
public HTemplateInstruction<4> {
7352 HValue*, HValue*, HValue*);
7368 HSeqStringSetChar(
HValue* context,
7369 String::Encoding encoding,
7372 HValue* value) : encoding_(encoding) {
7373 SetOperandAt(0, context);
7374 SetOperandAt(1,
string);
7375 SetOperandAt(2, index);
7376 SetOperandAt(3, value);
7378 SetChangesFlag(kStringChars);
7385 class HCheckMapValue
V8_FINAL :
public HTemplateInstruction<2> {
7396 return HType::Tagged();
7405 virtual
int RedefinedOperandIndex() {
return 0; }
7412 HCheckMapValue(
HValue* value,
7414 SetOperandAt(0, value);
7415 SetOperandAt(1, map);
7418 SetDependsOnFlag(kMaps);
7419 SetDependsOnFlag(kElementsKind);
7424 class HForInPrepareMap
V8_FINAL :
public HTemplateInstruction<2> {
7438 return HType::Tagged();
7444 HForInPrepareMap(
HValue* context,
7446 SetOperandAt(0, context);
7447 SetOperandAt(1,
object);
7449 SetAllSideEffects();
7454 class HForInCacheArray
V8_FINAL :
public HTemplateInstruction<2> {
7467 return index_cache_;
7471 index_cache_ = index_cache;
7477 return HType::Tagged();
7483 HForInCacheArray(
HValue* enumerable,
7485 int idx) : idx_(idx) {
7486 SetOperandAt(0, enumerable);
7487 SetOperandAt(1, keys);
7492 HForInCacheArray* index_cache_;
7496 class HLoadFieldByIndex
V8_FINAL :
public HTemplateInstruction<2> {
7500 SetOperandAt(0,
object);
7501 SetOperandAt(1, index);
7515 return HType::Tagged();
7521 virtual bool IsDeletable() const
V8_OVERRIDE {
return true; }
7525 #undef DECLARE_INSTRUCTION
7526 #undef DECLARE_CONCRETE_INSTRUCTION
7530 #endif // V8_HYDROGEN_INSTRUCTIONS_H_
static HObjectAccess ForFunctionContextPointer()
StoreFieldOrKeyedMode store_mode() const
HSourcePosition position() const
bool HasObservableSideEffects() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
bool HasMonomorphicJSObjectType()
bool UpdateInferredType()
bool CanTruncateToInt32() const
static HObjectAccess ForCounter()
virtual bool IsPurelyInformativeDefinition() V8_OVERRIDE
void StackUpon(Range *other)
virtual bool HasEscapingOperandAt(int index) V8_OVERRIDE
static const int kHashFieldOffset
void SetDependsOnFlag(GVNFlag f)
void SetChangesFlag(GVNFlag f)
static HContext * New(Zone *zone)
HPhi(int merged_index, Zone *zone)
void SetOperandPositions(Zone *zone, HSourcePosition left_pos, HSourcePosition right_pos)
static Representation UInteger8()
virtual Opcode opcode() const V8_OVERRIDE
static HInvokeFunction * New(Zone *zone, HValue *context, HValue *function, Handle< JSFunction > known_function, int argument_count)
HValue * BetterRightOperand()
void set_incoming_value(HPhi *value)
static HSourcePosition Unknown()
virtual Representation observed_input_representation(int index) V8_OVERRIDE
static const int kCodeOffset
static bool is_the_hole_nan(double value)
InductionVariableLimitUpdate()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(profile_deserialization
bool IsExternalArrayElementsKind(ElementsKind kind)
Handle< String > pattern()
bool IsHoleyElementsKind(ElementsKind kind)
static const int kCodeEntryOffset
static HObjectAccess ForStringHashField()
#define DECLARE_INSTRUCTION_FACTORY_P2(I, P1, P2)
virtual Representation RequiredInputRepresentation(int index) V8_FINAL V8_OVERRIDE
virtual void RepresentationChanged(Representation to) V8_OVERRIDE
void AddPushedValue(HValue *value)
Handle< String > name() const
#define DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P4(I, P1, P2, P3, P4)
virtual Representation RequiredInputRepresentation(int index) V8_OVERRIDE
bool IsUInteger16() const
static const int kPrototypeOrInitialMapOffset
virtual intptr_t Hashcode() V8_OVERRIDE
#define DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P3(I, P1, P2, P3)
GVNFlagSet depends_on_flags_
static const int kValueOffset
const CallInterfaceDescriptor * descriptor() const
virtual void FinalizeUniqueness() V8_OVERRIDE
bool HasExternalReferenceValue() const
bool LowerLimitIsNonNegativeConstant()
ToBooleanStub::Types expected_input_types() const
bool IsExternalMemory() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths true
static Representation Smi()
void CopyFlag(Flag f, HValue *other)
void PrintF(const char *format,...)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Heap::RootListIndex index() const
virtual void initialize_output_representation(Representation observed)
bool InNewSpace(Object *object)
virtual bool DataEquals(HValue *other)
static const int kMaxUtf16CodeUnit
Deoptimizer::BailoutType type()
virtual HValue * Canonicalize()
static HType NonPrimitive()
virtual Representation RequiredInputRepresentation(int index) V8_OVERRIDE
static HObjectAccess ForConsStringSecond()
static HDeoptimize * New(Zone *zone, HValue *context, const char *reason, Deoptimizer::BailoutType type, HBasicBlock *unreachable_continuation)
HUseListNode * RemoveUse(HValue *value, int index)
bool is_generator() const
HInstruction * previous() const
int formal_parameter_count() const
HSuccessorIterator(HControlInstruction *instr)
bool IsFastObjectElementsKind(ElementsKind kind)
virtual bool KnownSuccessorBlock(HBasicBlock **block)
bool HasNumberValue() const
Token::Value token() const
bool CheckChangesFlag(GVNFlag f) const
virtual void InternalSetOperandAt(int index, HValue *value) V8_FINAL V8_OVERRIDE
StrictMode strict_mode() const
void ClearDependsOnFlag(GVNFlag f)
void set_ast_id(BailoutId id)
void set_arguments_pushed()
static const int kOptimizedCodeMapOffset
const ZoneList< HValue * > * arguments_values() const
virtual int OperandCount() V8_FINAL V8_OVERRIDE
static HObjectAccess ForAllocationMementoSite()
int double_non_phi_uses() const
Handle< JSFunction > known_function()
HBasicBlock * block() const
static bool IsCompareOp(Value op)
static Handle< T > cast(Handle< S > that)
int environment_length() const
HEnvironment * environment()
virtual Representation observed_input_representation(int index) V8_OVERRIDE
static Representation Integer32()
double DoubleValue() const
static HObjectAccess ForHeapNumberValue()
void set_observed_input_representation(Representation left, Representation right)
bool object_in_new_space() const
Handle< Object > name() const
virtual Representation observed_input_representation(int index) V8_OVERRIDE
HLoadFieldByIndex(HValue *object, HValue *index)
HUnaryOperation(HValue *value, HType type=HType::Tagged())
virtual LInstruction * CompileToLithium(LChunkBuilder *builder)=0
virtual int OperandCount() V8_FINAL V8_OVERRIDE
bool HasDependency() const
HUseListNode(HValue *value, int index, HUseListNode *tail)
Range * CopyClearUpper(Zone *zone) const
Representation representation_
TypeImpl< ZoneTypeConfig > Type
static HObjectAccess ForAllocationSiteList()
bool Includes(int value) const
HArgumentsObject * arguments_object()
HValue * new_space_dominator() const
virtual void AssumeRepresentation(Representation r)
Range * Copy(Zone *zone) const
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
static const int kExternalPointerOffset
HBasicBlock * FirstSuccessor()
bool EqualsInteger32Constant(int32_t value)
static HObjectAccess ForArrayLength(ElementsKind elements_kind)
static const int kNoRedefinedOperand
void set_position(int position)
void set_index_cache(HForInCacheArray *index_cache)
bool RequiresHoleCheck() const
Isolate * isolate() const
int tagged_non_phi_uses() const
bool Apply(HValue *other_base, int other_offset, int other_scale=0)
virtual void InternalSetOperandAt(int index, HValue *value) V8_OVERRIDE
virtual Representation observed_input_representation(int index) V8_OVERRIDE
static const int kByteLengthOffset
virtual HBasicBlock * SuccessorAt(int i)=0
bool has_transition() const
ParameterKind kind() const
void set_known_initial_map(Handle< Map > known_initial_map)
unibrow::Mapping< unibrow::Ecma262Canonicalize > Canonicalize
void SwapValues(HValue **other_base, int *other_offset, int *other_scale)
static HInstruction * New(Zone *zone, HValue *context, HValue *left, HValue *right)
static HEnterInlined * New(Zone *zone, HValue *context, Handle< JSFunction > closure, int arguments_count, FunctionLiteral *function, InliningKind inlining_kind, Variable *arguments_var, HArgumentsObject *arguments_object)
#define ASSERT(condition)
bool ToStringCanBeObserved() const
bool IsHeapObject() const
bool is_typed_elements() const
bool CanBeNegative() const
Unique< Map > original_map()
virtual void RepresentationChanged(Representation to) V8_OVERRIDE
LimitFromPredecessorBlock()
virtual void InferRepresentation(HInferRepresentationPhase *h_infer)
HChange(HValue *value, Representation to, bool is_truncating_to_smi, bool is_truncating_to_int32)
static const int kInstanceSizeOffset
AllocationSiteMode allocation_site_mode() const
bool IsUndetectable() const
virtual Representation KnownOptimalRepresentation() V8_OVERRIDE
HObjectAccess WithRepresentation(Representation representation)
virtual void PrintDataTo(StringStream *stream)
bool IsTaggedPrimitive() const
virtual Representation KnownOptimalRepresentation()
HValue * parameter_count()
void SetTransition(HConstant *map_constant, CompilationInfo *info)
Range * CopyClearLower(Zone *zone) const
static HObjectAccess ForOptimizedCodeMap()
Handle< Object > NewNumber(double value, PretenureFlag pretenure=NOT_TENURED)
static Representation Double()
static HObjectAccess ForNextFunctionLinkPointer()
virtual Representation RequiredInputRepresentation(int index) V8_FINAL V8_OVERRIDE
LoadKeyedHoleMode hole_mode() const
bool ContainsAnyOf(const EnumSet &set) const
void set_observed_input_representation(int index, Representation rep)
#define DECLARE_FLAG(Type)
void AddDependantsToWorklist(HInferRepresentationPhase *h_infer)
bool IsStringLength() const
bool IsFastElementsKind(ElementsKind kind)
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
bool IsHeapNumber() const
bool arguments_pushed() const
String::Encoding encoding()
bool IsNonPrimitive() const
bool HasDoubleValue() const
Representation representation() const
static const int kContextOffset
MinusZeroMode GetMinusZeroMode()
static HCheckValue * New(Zone *zone, HValue *context, HValue *value, Unique< HeapObject > target, bool object_in_new_space)
bool IsInformativeDefinition()
void set_operand_position(int idx, HSourcePosition pos)
static HObjectAccess ForExternalArrayExternalPointer()
Representation representation() const
Unique< Object > GetUnique() const
GVNFlagSet DependsOnFlags() const
virtual int OperandCount() V8_OVERRIDE
void DetectInductionVariable()
bool HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const
void set_representation(Representation r)
virtual int argument_count() const
Variable * arguments_var()
#define DECLARE_OPCODE(type)
void set_next_simulate(HSimulate *simulate)
InductionVariableCheck(HBoundsCheck *check, InductionVariableCheck *next, int32_t upper_limit=kNoLimit)
#define DECLARE_INSTRUCTION_FACTORY_P0(I)
void AddAssignedValue(int index, HValue *value)
virtual bool HasEscapingOperandAt(int index)
bool DetectCompoundIndex()
HBinaryCall(HValue *first, HValue *second, int argument_count)
HBasicBlock * induction_exit_block()
GVNFlagSet ObservableChangesFlags() const
InductionVariableData * induction_variable_data()
HBinaryOperation(HValue *context, HValue *left, HValue *right, HType type=HType::Tagged())
#define DECLARE_ABSTRACT_INSTRUCTION(type)
void set_known_successor_index(int known_successor_index)
Unique< Map > transitioned_map()
static const int kFirstOffset
Range(int32_t lower, int32_t upper)
virtual HValue * OperandAt(int index) const V8_FINAL V8_OVERRIDE
static HObjectAccess ForCodeOffset()
static const int kWeakFirstViewOffset
const ZoneList< HValue * > * values() const
virtual void UpdateRepresentation(Representation new_rep, HInferRepresentationPhase *h_infer, const char *reason) V8_OVERRIDE
#define HYDROGEN_CONCRETE_INSTRUCTION_LIST(V)
static HCallWithDescriptor * New(Zone *zone, HValue *context, HValue *target, int argument_count, const CallInterfaceDescriptor *descriptor, Vector< HValue * > &operands)
virtual Representation RequiredInputRepresentation(int index) V8_FINAL V8_OVERRIDE
Handle< String > StringValue() const
CallFunctionFlags function_flags() const
virtual HType CalculateInferredType()
virtual Range * InferRange(Zone *zone) V8_OVERRIDE
static const int kLiteralsOffset
virtual bool EmitAtUses()
virtual bool HandleSideEffectDominator(GVNFlag side_effect, HValue *dominator)
void AddNewRange(Range *r, Zone *zone)
void set_can_be_minus_zero(bool b)
virtual int OperandCount()=0
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
virtual int RedefinedOperandIndex() V8_OVERRIDE
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
bool Equals(HValue *other)
HSourcePosition operand_position(int idx) const
#define DECLARE_INSTRUCTION_FACTORY_P3(I, P1, P2, P3)
virtual Representation RequiredInputRepresentation(int index)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
int32_t GetInteger32Constant()
void PrintRangeTo(StringStream *stream)
static HObjectAccess ForGlobalObjectNativeContext()
static const int kLengthOffset
virtual HType CalculateInferredType() V8_FINAL V8_OVERRIDE
static HObjectAccess ForPropertyCellValue()
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
bool AreOperandsBetterSwitched()
static HArgumentsObject * New(Zone *zone, HValue *context, int count)
static Representation FromKind(Kind kind)
static GVNFlagSet AllSideEffectsFlagSet()
VariableMode mode() const
static const int kValueOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
HValue(HType type=HType::Tagged())
void PrintChangesTo(StringStream *stream)
virtual bool HasOutOfBoundsAccess(int size)
void set_upper_limit(int32_t upper_limit)
void MakePrefillWithFiller()
static HConstant * CreateAndInsertBefore(Zone *zone, Unique< Object > unique, bool is_not_in_new_space, HInstruction *instruction)
bool has_position() const
virtual void RepresentationChanged(Representation to)
bool additional_upper_limit_is_included()
virtual void DeleteFromGraph() V8_OVERRIDE
bool HasMap(Handle< Map > map)
static HUnaryOperation * cast(HValue *value)
static HStoreCodeEntry * New(Zone *zone, HValue *context, HValue *function, HValue *code)
static uint32_t update(uint32_tprevious, T value)
HDematerializedObject(int count, Zone *zone)
bool CheckDependsOnFlag(GVNFlag f) const
void ReuseSideEffectsFromStore(HInstruction *store)
bool value_is_smi() const
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
static HObjectAccess ForMap()
bool HasInternalizedStringValue() const
bool deoptimize_on_minus_zero() const
static const int kMinValue
HArithmeticBinaryOperation(HValue *context, HValue *left, HValue *right)
static const int kBufferOffset
virtual int argument_delta() const V8_OVERRIDE
void check(i::Vector< const uint8_t > string)
HValue * RedefinedOperand()
bool HasStringValue() const
bool StoringValueNeedsWriteBarrier(HValue *value)
void set_map_set(UniqueSet< Map > *maps, Zone *zone)
Representation RepresentationFromUses()
#define DECLARE_PREDICATE(type)
Representation field_representation() const
bool IsDefinedAfter(HBasicBlock *other) const
int known_successor_index() const
static HObjectAccess ForFixedArrayLength()
static HObjectAccess ForJSArrayBufferViewByteLength()
static HObjectAccess ForJSTypedArrayLength()
void ComputeInitialRange(Zone *zone)
bool ReceiverObjectNeedsWriteBarrier(HValue *object, HValue *value, HValue *new_space_dominator)
bool Equals(const Representation &other) const
virtual Representation RepresentationFromInputs() V8_OVERRIDE
bool IsOldPointerSpaceAllocation() const
bool IsSmiOrInteger32() const
static GVNFlagSet AllObservableSideEffectsFlagSet()
#define GVN_TRACKED_FLAG_LIST(V)
static HCheckValue * New(Zone *zone, HValue *context, HValue *value, Handle< JSFunction > func)
virtual Representation RequiredInputRepresentation(int index) V8_OVERRIDE
bool has_migration_target() const
virtual void UpdateRepresentation(Representation new_rep, HInferRepresentationPhase *h_infer, const char *reason) V8_OVERRIDE
bool IsMostGeneric() const
const char * reason() const
static const int kBackingStoreOffset
HSimulate(BailoutId ast_id, int pop_count, Zone *zone, RemovableSimulate removable)
static HObjectAccess ForSharedFunctionInfoPointer()
void RemoveLastAddedRange()
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
bool HasMergedIndex() const
HValue * additional_lower_limit()
virtual Handle< Map > GetMonomorphicJSObjectMap()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static HInstruction * NewImul(Zone *zone, HValue *context, HValue *left, HValue *right)
HBoundsCheckBaseIndexInformation(HBoundsCheck *check)
ElementsKind elements_kind() const
static HObjectAccess ForObservableJSObjectOffset(int offset, Representation representation=Representation::Tagged())
bool MustPrefillWithFiller() const
GVNFlagSet ChangesFlags() const
HValue * OperandAt(int i) const V8_OVERRIDE
void set_operand_position(Zone *zone, int index, HSourcePosition pos)
static HType TaggedNumber()
ExternalReference ExternalReferenceValue() const
bool IsSubtypeOf(const HType &other)
HBoundsCheck * bounds_check()
bool IsFixedTypedArrayElementsKind(ElementsKind kind)
virtual void PrintTo(StringStream *stream) V8_OVERRIDE
ContainedInLattice Combine(ContainedInLattice a, ContainedInLattice b)
static const int kPropertiesOffset
HInstruction * next() const
HValue * BetterLeftOperand()
void Intersect(const EnumSet &set)
virtual HSourcePosition operand_position(int index) const
bool IsContextSlot() const
virtual void initialize_output_representation(Representation observed)
bool Equals(HObjectAccess that) const
bool existing_inobject_property() const
void ensure_storage_for_operand_positions(Zone *zone, int operand_count)
StringAddFlags flags() const
#define DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P2(I, P1, P2)
bool IsFastSmiElementsKind(ElementsKind kind)
static const int kByteOffsetOffset
void set_known_successor_index(int known_successor_index)
bool IsInSmiRange() const
void set_type(HType new_type)
bool is_candidate_for_removal()
static const int kNextFunctionLinkOffset
static const int kElementsOffset
Unique< Map > map() const
virtual Handle< Map > GetMonomorphicJSObjectMap()
virtual int MaxIndexOffsetBits()
bool pass_argument_count() const
static HObjectAccess ForJSArrayBufferViewBuffer()
static bool IsEqualityOp(Value op)
static Representation External()
static const int kLengthOffset
HBasicBlock * SuccessorAt(int i) V8_OVERRIDE
void set_save_doubles(SaveFPRegsMode save_doubles)
virtual HType CalculateInferredType() V8_OVERRIDE
Representation RepresentationFromUseRequirements()
virtual Representation RequiredInputRepresentation(int index) V8_OVERRIDE
HTemplateInstruction(HType type=HType::Tagged())
void DeleteAndReplaceWith(HValue *other)
int ElementsKindToShiftSize(ElementsKind elements_kind)
static HPhi * cast(HValue *value)
static HObjectAccess ForConsStringFirst()
#define GVN_UNTRACKED_FLAG_LIST(V)
BitwiseDecompositionResult()
int32_t NumberValueAsInteger32() const
void SetSuccessorAt(int i, HBasicBlock *block) V8_OVERRIDE
virtual intptr_t Hashcode()
bool BooleanValue() const
int ToOperandIndex(int environment_index)
static const int kLengthOffset
virtual HSourcePosition operand_position(int index) const V8_OVERRIDE
bool CanBePositive() const
virtual bool TryDecompose(DecompositionResult *decomposition)
#define T(name, string, precedence)
Handle< FixedArray > literals()
Unique< HeapObject > object() const
virtual int RedefinedOperandIndex()
virtual HValue * OperandAt(int index) const V8_OVERRIDE
InstanceType GetInstanceType() const
static bool CompatibleInstanceTypes(InstanceType type1, InstanceType type2)
virtual HValue * OperandAt(int i) const V8_FINAL V8_OVERRIDE
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
#define DECLARE_INSTRUCTION_FACTORY_P4(I, P1, P2, P3, P4)
HLeaveInlined(HEnterInlined *entry, int drop_count)
void PrintNameTo(StringStream *stream)
bool IsSkipWriteBarrier() const
Representation generalize(Representation other)
virtual Representation RequiredInputRepresentation(int index) V8_OVERRIDE
virtual void UpdateRepresentation(Representation new_rep, HInferRepresentationPhase *h_infer, const char *reason) V8_OVERRIDE
const int kElementsKindCount
bool MustClearNextMapWord() const
void InsertAfter(HInstruction *previous)
virtual int SuccessorCount()=0
virtual Representation RequiredInputRepresentation(int index) V8_FINAL V8_OVERRIDE
virtual Representation observed_input_representation(int index) V8_OVERRIDE
HLoadContextSlot(HValue *context, Variable *var)
bool NotInNewSpace() const
virtual Opcode opcode() const =0
virtual void DeleteFromGraph()=0
bool CannotBeEliminated() const
bool EqualsUnique(Unique< Object > other) const
virtual void SetSuccessorAt(int i, HBasicBlock *block)=0
static const int kMapOffset
static HObjectAccess ForHeapNumberValueHighestBits()
void UpdateValue(HValue *value)
Handle< FixedArray > pairs() const
virtual Representation RequiredInputRepresentation(int index)=0
virtual HValue * OperandAt(int index) const V8_FINAL V8_OVERRIDE
bool IsTaggedNumber() const
virtual HSourcePosition position() const
void InternalSetOperandAt(int i, HValue *value) V8_OVERRIDE
static HObjectAccess ForPropertiesPointer()
virtual HSourcePosition position() const V8_OVERRIDE
virtual bool IsPurelyInformativeDefinition()
virtual void FinalizeUniqueness()
static const int kLengthOffset
static HType TaggedPrimitive()
void ChangeRepresentation(Representation r)
virtual int argument_count() const
static const int kSecondOffset
static HConstant * CreateAndInsertBefore(Zone *zone, HValue *context, int32_t value, Representation representation, HInstruction *instruction)
bool NeedsWriteBarrierForMap()
InductionVariableData * variable
bool HasAssignedIndexAt(int index) const
Handle< T > handle(T *t, Isolate *isolate)
static HType HeapNumber()
bool Contains(E element) const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
virtual bool HandleSideEffectDominator(GVNFlag side_effect, HValue *dominator) V8_OVERRIDE
void set_allow_equality(bool v)
static HObjectAccess ForJSArrayBufferWeakFirstView()
bool IsLimitedInductionVariable()
void SetOperandAt(int index, HValue *value)
GVNFlagSet changes_flags_
HBasicBlock * SecondSuccessor()
PretenureFlag pretenure_flag() const
virtual int argument_delta() const V8_OVERRIDE
bool IsNewSpaceAllocation() const
virtual Range * InferRange(Zone *zone)
void SetDehoisted(bool is_dehoisted)
Representation GetInputRepresentation() const
bool CheckFlag(Flag f) const
bool ToStringOrToNumberCanBeObserved(Representation representation)
static HObjectAccess ForStringLength()
void set_position(HSourcePosition pos)
int int32_indirect_uses() const
CodeStub::Major major_key()
HUnaryCall(HValue *value, int argument_count)
#define HYDROGEN_ABSTRACT_INSTRUCTION_LIST(V)
#define DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P1(I, P1)
static HObjectAccess ForMapInstanceType()
void set_position(HSourcePosition position)
bool IsOldDataSpaceAllocation() const
int GetAssignedIndexAt(int index) const
ZoneList< HValue * > values_
virtual Representation RepresentationFromInputs()
void InsertBefore(HInstruction *next)
static HInnerAllocatedObject * New(Zone *zone, HValue *context, HValue *value, HValue *offset, HType type=HType::Tagged())
bool from_inlined() const
virtual HValue * Canonicalize() V8_OVERRIDE
int arguments_count() const
int double_indirect_uses() const
InductionVariableData * updated_variable
bool Equals(const HType &other) const
GVNFlagSet SideEffectFlags() const
static Handle< String > null()
int tagged_indirect_uses() const
int smi_non_phi_uses() const
bool CanReplaceWithDummyUses()
int32_t Integer32Value() const
static Representation KeyedAccessIndexRequirement(Representation r)
void set_inlining_id(int inlining_id)
bool CanBeMinusZero() const
virtual ~ArrayInstructionInterface()
Unique< Cell > cell() const
static const int kAllocationSiteOffset
#define ASSERT_EQ(v1, v2)
HObjectAccess access() const
bool IsDoubleOrFloatElementsKind(ElementsKind kind)
virtual bool HasOutOfBoundsAccess(int size) V8_OVERRIDE
void SetBlock(HBasicBlock *block)
HBasicBlock * limit_validity()
bool IsInductionVariable()
bool CanBeZero(HValue *right)
Handle< String > class_name() const
#define DECLARE_CONCRETE_INSTRUCTION(type)
virtual void InternalSetOperandAt(int i, HValue *value) V8_FINAL V8_OVERRIDE
virtual Representation observed_input_representation(int index)
virtual HValue * OperandAt(int index) const =0
static const int kWeakNextOffset
void SetUninitialized(bool is_uninitialized)
int SuccessorCount() V8_OVERRIDE
bool additional_lower_limit_is_included()
bool is_backwards_branch()
bool CheckUsesForFlag(Flag f) const
bool IsSpecialDouble() const
bool MustAllocateDoubleAligned() const
static HObjectAccess ForJSArrayBufferViewByteOffset()
bool IsHeapObject() const
HSourcePosition(const HSourcePosition &other)
bool has_no_literals() const
void SetIndexOffset(uint32_t index_offset)
static HObjectAccess ForElementsPointer()
int smi_indirect_uses() const
static HObjectAccess ForJSArrayBufferViewWeakNext()
InductionVariableCheck * next()
virtual bool HasStackCheck()
virtual int argument_delta() const
bool is_fixed_typed_array() const
virtual bool HasEscapingOperandAt(int index) V8_FINAL V8_OVERRIDE
const char * Mnemonic() const
HUnaryControlInstruction(HValue *value, HBasicBlock *true_target, HBasicBlock *false_target)
virtual void RepresentationChanged(Representation to) V8_OVERRIDE
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric literals(0o77, 0b11)") DEFINE_bool(harmony_strings
int known_successor_index() const
void ClearChangesFlag(GVNFlag f)
static T decode(uint32_tvalue)
bool ToNumberCanBeObserved() const
String::Encoding encoding() const
EnumSet< GVNFlag, int32_t > GVNFlagSet
bool IsInteger32Constant()
HValue * transition() const
ZoneList< HBasicBlock * > * return_targets()
HValue * additional_upper_limit()
static Representation None()
virtual void PrintTo(StringStream *stream)=0
virtual void InternalSetOperandAt(int index, HValue *value)=0
static const int kMaxLength
bool can_convert_undefined_to_nan()
static HObjectAccess ForLiteralsPointer()
Token::Value token() const
void set_tail(HUseListNode *list)
Unique< Map > first_map() const
void SetOperandPositions(Zone *zone, HSourcePosition left_pos, HSourcePosition right_pos)
UniqueSet< Map > map_set() const
static Representation Tagged()
static const int kNativeContextOffset
Handle< Map > transition_map() const
Handle< JSFunction > closure() const
static HObjectAccess ForMapInstanceSize()
Handle< Object > handle(Isolate *isolate)
void PrintTypeTo(StringStream *stream)
virtual bool HasStackCheck() V8_FINAL V8_OVERRIDE
virtual void UpdateRepresentation(Representation new_rep, HInferRepresentationPhase *h_infer, const char *reason)
virtual bool TryDecompose(DecompositionResult *decomposition) V8_OVERRIDE
virtual bool IsCommutative() const V8_OVERRIDE
HInvokeFunction(HValue *context, HValue *function, Handle< JSFunction > known_function, int argument_count)
static HObjectAccess ForCodeEntryPointer()
HInstruction(HType type=HType::Tagged())
static const int kSharedFunctionInfoOffset
HBasicBlock * induction_exit_target()
virtual int OperandCount() V8_FINAL V8_OVERRIDE
STATIC_ASSERT(kLastFlag< kBitsPerInt)
Isolate * isolate() const
static const int kNoNumber
void RegisterUse(int index, HValue *new_value)
HBasicBlock * other_target
static HObjectAccess ForCellValue()
static bool CanTrack(InstanceType type)
bool HasSideEffects() const
static const int kMaxValue
HValue * map_value() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
#define DECLARE_INSTRUCTION_FACTORY_P5(I, P1, P2, P3, P4, P5)
static HValue * cast(HValue *value)
void AddArgument(HValue *argument, Zone *zone)
void ReplaceAllUsesWith(HValue *other)
virtual int RedefinedOperandIndex()
HBitwiseBinaryOperation(HValue *context, HValue *left, HValue *right, HType type=HType::Tagged())
bool HasMultipleUses() const
void ClearAllSideEffects()
Token::Value token() const
int literal_index() const
bool HasBooleanValue() const
HSimulate * next_simulate()
static const int32_t kMaxOneByteCharCode
static HConstant * CreateAndInsertAfter(Zone *zone, HValue *context, int32_t value, Representation representation, HInstruction *instruction)
HForInCacheArray * index_cache()
static HAllocate * New(Zone *zone, HValue *context, HValue *size, HType type, PretenureFlag pretenure_flag, InstanceType instance_type, Handle< AllocationSite > allocation_site=Handle< AllocationSite >::null())
int int32_non_phi_uses() const
SaveFPRegsMode save_doubles() const
static HCheckMaps * New(Zone *zone, HValue *context, HValue *value, SmallMapList *maps, HValue *typecheck=NULL)
static HTransitionElementsKind * New(Zone *zone, HValue *context, HValue *object, Handle< Map > original_map, Handle< Map > transitioned_map)
static HObjectAccess ForHeapNumberValueLowestBits()
Unique< PropertyCell > cell() const
static AllocationSpace TargetSpaceId(InstanceType type)
virtual bool KnownSuccessorBlock(HBasicBlock **block) V8_OVERRIDE
virtual bool DataEquals(HValue *other) V8_OVERRIDE
static HObjectAccess ForPrototypeOrInitialMap()
Handle< String > type_literal()
bool IsFastDoubleElementsKind(ElementsKind kind)
static GVNFlagSet AllFlagSet()
HCapturedObject(int length, int id, Zone *zone)
virtual Representation RequiredInputRepresentation(int index) V8_OVERRIDE
#define DECLARE_INSTRUCTION_FACTORY_P1(I, P1)
HType Combine(HType other)
static const int kInstanceTypeOffset
HUseIterator uses() const
bool IsConstantHoleStore()
ChecksRelatedToLength * checks()
InliningKind inlining_kind() const
static HObjectAccess ForJSArrayBufferBackingStore()
HGoto(HBasicBlock *target)
int OperandCount() V8_OVERRIDE
Representation to() const