35 #if V8_TARGET_ARCH_IA32
37 #elif V8_TARGET_ARCH_X64
39 #elif V8_TARGET_ARCH_ARM64
41 #elif V8_TARGET_ARCH_ARM
43 #elif V8_TARGET_ARCH_MIPS
46 #error Unsupported target architecture.
52 #define DEFINE_COMPILE(type) \
53 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \
54 return builder->Do##type(this); \
62 return block()->isolate();
95 for (HUseIterator it(
uses()); !it.Done(); it.Advance()) {
98 if (rep.
IsNone())
continue;
99 if (FLAG_trace_representation) {
100 PrintF(
"#%d %s is used by #%d %s as %s%s\n",
104 use_count[rep.
kind()] += 1;
106 if (
IsPhi()) HPhi::cast(
this)->AddIndirectUsesTo(&use_count[0]);
123 const char* reason) {
127 if (FLAG_trace_representation) {
128 PrintF(
"Changing #%d %s representation %s -> %s based on %s\n",
138 for (HUseIterator it(
uses()); !it.Done(); it.Advance()) {
169 return static_cast<int32_t>(result);
173 static int32_t AddWithoutOverflow(Representation r,
177 int64_t result =
static_cast<int64_t
>(a) + static_cast<int64_t>(b);
178 return ConvertAndSetOverflow(r, result, overflow);
182 static int32_t SubWithoutOverflow(Representation r,
186 int64_t result =
static_cast<int64_t
>(a) - static_cast<int64_t>(b);
187 return ConvertAndSetOverflow(r, result, overflow);
191 static int32_t MulWithoutOverflow(
const Representation& r,
195 int64_t result =
static_cast<int64_t
>(a) * static_cast<int64_t>(b);
196 return ConvertAndSetOverflow(r, result, overflow);
201 if (lower_ == upper_)
return lower_;
204 while (res < upper_) {
205 res = (res << 1) | 1;
213 void Range::AddConstant(
int32_t value) {
214 if (value == 0)
return;
215 bool may_overflow =
false;
217 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow);
218 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow);
225 void Range::Intersect(Range* other) {
226 upper_ =
Min(upper_, other->upper_);
227 lower_ =
Max(lower_, other->lower_);
228 bool b = CanBeMinusZero() && other->CanBeMinusZero();
229 set_can_be_minus_zero(b);
233 void Range::Union(Range* other) {
234 upper_ =
Max(upper_, other->upper_);
235 lower_ =
Min(lower_, other->lower_);
236 bool b = CanBeMinusZero() || other->CanBeMinusZero();
237 set_can_be_minus_zero(b);
241 void Range::CombinedMax(Range* other) {
242 upper_ =
Max(upper_, other->upper_);
243 lower_ =
Max(lower_, other->lower_);
244 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
248 void Range::CombinedMin(Range* other) {
249 upper_ =
Min(upper_, other->upper_);
250 lower_ =
Min(lower_, other->lower_);
251 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
255 void Range::Sar(
int32_t value) {
257 lower_ = lower_ >> bits;
258 upper_ = upper_ >> bits;
259 set_can_be_minus_zero(
false);
263 void Range::Shl(
int32_t value) {
265 int old_lower = lower_;
266 int old_upper = upper_;
267 lower_ = lower_ << bits;
268 upper_ = upper_ << bits;
269 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) {
273 set_can_be_minus_zero(
false);
277 bool Range::AddAndCheckOverflow(
const Representation& r, Range* other) {
278 bool may_overflow =
false;
279 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow);
280 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow);
289 bool Range::SubAndCheckOverflow(
const Representation& r, Range* other) {
290 bool may_overflow =
false;
291 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow);
292 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow);
301 void Range::KeepOrder() {
302 if (lower_ > upper_) {
311 void Range::Verify()
const {
317 bool Range::MulAndCheckOverflow(
const Representation& r, Range* other) {
318 bool may_overflow =
false;
319 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow);
320 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow);
321 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow);
322 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow);
332 const char* HType::ToString() {
336 case kNone:
return "none";
337 case kTagged:
return "tagged";
338 case kTaggedPrimitive:
return "primitive";
339 case kTaggedNumber:
return "number";
340 case kSmi:
return "smi";
341 case kHeapNumber:
return "heap-number";
342 case kString:
return "string";
343 case kBoolean:
return "boolean";
344 case kNonPrimitive:
return "non-primitive";
345 case kJSArray:
return "array";
346 case kJSObject:
return "object";
349 return "unreachable";
353 HType HType::TypeFromValue(Handle<Object> value) {
354 HType result = HType::Tagged();
355 if (value->IsSmi()) {
356 result = HType::Smi();
357 }
else if (value->IsHeapNumber()) {
358 result = HType::HeapNumber();
359 }
else if (value->IsString()) {
360 result = HType::String();
361 }
else if (value->IsBoolean()) {
362 result = HType::Boolean();
363 }
else if (value->IsJSObject()) {
364 result = HType::JSObject();
365 }
else if (value->IsJSArray()) {
366 result = HType::JSArray();
373 return block()->block_id() > other->block_id();
380 tail_ = tail_->tail_;
387 for (HUseIterator it(
uses()); !it.Done(); it.Advance()) {
388 if (it.value()->IsSimulate())
continue;
389 if (!it.value()->CheckFlag(f))
return false;
396 for (HUseIterator it(
uses()); !it.Done(); it.Advance()) {
397 if (it.value()->IsSimulate())
continue;
398 if (!it.value()->CheckFlag(f)) {
408 bool return_value =
false;
409 for (HUseIterator it(
uses()); !it.Done(); it.Advance()) {
410 if (it.value()->IsSimulate())
continue;
411 if (!it.value()->CheckFlag(f))
return false;
418 HUseIterator::HUseIterator(
HUseListNode* head) : next_(head) {
423 void HUseIterator::Advance() {
435 for (HUseIterator it(
uses()); !it.Done(); it.Advance()) ++count;
443 while (current !=
NULL) {
444 if (current->
value() == value && current->
index() == index) {
445 if (previous ==
NULL) {
454 current = current->
tail();
459 if (current !=
NULL) {
474 if (!other->
type_.Equals(
type_))
return false;
487 intptr_t result =
opcode();
489 for (
int i = 0; i < count; ++i) {
490 result = result * 19 +
OperandAt(i)->
id() + (result >> 7);
498 #define MAKE_CASE(type) case k##type: return #type;
501 case kPhi:
return "Phi";
508 return FLAG_unreachable_code_elimination &&
509 !(
block()->IsReachable() ||
511 IsControlInstruction() ||
519 return IsConstant() && HConstant::cast(
this)->HasInteger32Value();
524 return HConstant::cast(
this)->Integer32Value();
567 if (operand ==
NULL)
continue;
580 id_ = block->graph()->GetNextValueID(
this);
587 stream->
Add(
" type:%s",
type().ToString());
595 stream->
Add(
" range:%d_%d%s",
598 range()->CanBeMinusZero() ?
"_m0" :
"");
604 if (changes_flags.
IsEmpty())
return;
605 stream->
Add(
" changes[");
609 bool add_comma =
false;
610 #define PRINT_DO(Type) \
611 if (changes_flags.Contains(k##Type)) { \
612 if (add_comma) stream->Add(","); \
614 stream->Add(#Type); \
636 bool result = (!type.Equals(
type_));
644 if (old_value == new_value)
return;
647 if (old_value !=
NULL) {
648 removed = old_value->
RemoveUse(
this, index);
651 if (new_value !=
NULL) {
652 if (removed ==
NULL) {
690 if (FLAG_hydrogen_track_positions) {
700 PrintMnemonicTo(stream);
706 stream->
Add(
" [noOSE]");
709 stream->
Add(
" [dead]");
716 if (i > 0) stream->
Add(
" ");
722 void HInstruction::PrintMnemonicTo(
StringStream* stream) {
729 ASSERT(!IsControlInstruction());
732 previous_->next_ = next_;
735 block()->set_last(previous_);
737 next_->previous_ = previous_;
745 ASSERT(!next->IsBlockEntry());
746 ASSERT(!IsControlInstruction());
751 next->previous_ =
this;
763 ASSERT(!previous->IsControlInstruction());
764 ASSERT(!IsControlInstruction() || previous->next_ ==
NULL);
768 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) {
769 ASSERT(block->end()->SecondSuccessor() ==
NULL);
770 InsertAfter(block->end()->FirstSuccessor()->first());
779 ASSERT(next->IsSimulate());
781 next = previous->next_;
787 previous->next_ =
this;
788 if (next !=
NULL) next->previous_ =
this;
790 block->set_last(
this);
799 void HInstruction::Verify() {
804 if (other_operand ==
NULL)
continue;
806 if (cur_block == other_block) {
807 if (!other_operand->
IsPhi()) {
809 while (cur !=
NULL) {
810 if (cur == other_operand)
break;
814 ASSERT(cur == other_operand);
819 ASSERT(other_block->Dominates(cur_block));
835 for (HUseIterator use =
uses(); !use.Done(); use.Advance()) {
836 if (use.value()->IsInstruction()) {
844 static bool HasPrimitiveRepresentation(HValue* instr) {
845 return instr->representation().IsInteger32() ||
846 instr->representation().IsDouble();
853 case HValue::kAccessArgumentsAt:
854 case HValue::kApplyArguments:
855 case HValue::kArgumentsElements:
856 case HValue::kArgumentsLength:
857 case HValue::kArgumentsObject:
858 case HValue::kBoundsCheckBaseIndexInformation:
859 case HValue::kCapturedObject:
860 case HValue::kClampToUint8:
861 case HValue::kConstant:
862 case HValue::kContext:
863 case HValue::kDateField:
864 case HValue::kDebugBreak:
865 case HValue::kDeclareGlobals:
867 case HValue::kDummyUse:
868 case HValue::kEnterInlined:
869 case HValue::kEnvironmentMarker:
870 case HValue::kForInCacheArray:
871 case HValue::kForInPrepareMap:
872 case HValue::kFunctionLiteral:
873 case HValue::kGetCachedArrayIndex:
875 case HValue::kInnerAllocatedObject:
876 case HValue::kInstanceOf:
877 case HValue::kInstanceOfKnownGlobal:
878 case HValue::kInvokeFunction:
879 case HValue::kLeaveInlined:
880 case HValue::kLoadContextSlot:
881 case HValue::kLoadFieldByIndex:
882 case HValue::kLoadFunctionPrototype:
883 case HValue::kLoadGlobalCell:
884 case HValue::kLoadGlobalGeneric:
885 case HValue::kLoadKeyed:
886 case HValue::kLoadKeyedGeneric:
887 case HValue::kLoadNamedField:
888 case HValue::kLoadNamedGeneric:
889 case HValue::kLoadRoot:
890 case HValue::kMapEnumLength:
891 case HValue::kMathFloorOfDiv:
892 case HValue::kMathMinMax:
895 case HValue::kOsrEntry:
896 case HValue::kParameter:
898 case HValue::kPushArgument:
901 case HValue::kSeqStringGetChar:
902 case HValue::kSeqStringSetChar:
905 case HValue::kSimulate:
906 case HValue::kStackCheck:
907 case HValue::kStoreCodeEntry:
908 case HValue::kStoreContextSlot:
909 case HValue::kStoreGlobalCell:
910 case HValue::kStoreKeyed:
911 case HValue::kStoreKeyedGeneric:
912 case HValue::kStoreNamedField:
913 case HValue::kStoreNamedGeneric:
914 case HValue::kStringAdd:
915 case HValue::kStringCharCodeAt:
916 case HValue::kStringCharFromCode:
918 case HValue::kThisFunction:
919 case HValue::kToFastProperties:
920 case HValue::kTransitionElementsKind:
921 case HValue::kTrapAllocationMemento:
922 case HValue::kTypeof:
923 case HValue::kUnaryMathOperation:
924 case HValue::kUseConst:
925 case HValue::kWrapReceiver:
927 case HValue::kForceRepresentation:
929 case HValue::kBitwise:
930 case HValue::kChange:
931 case HValue::kCompareGeneric:
933 if (!HasPrimitiveRepresentation(
this))
return true;
936 if (!HasPrimitiveRepresentation(input))
return true;
946 value()->PrintNameTo(stream);
950 void HEnvironmentMarker::PrintDataTo(StringStream* stream) {
951 stream->Add(
"%s var[%d]", kind() == BIND ?
"bind" :
"lookup", index());
962 void HCallJSFunction::PrintDataTo(
StringStream* stream) {
963 function()->PrintNameTo(stream);
965 stream->
Add(
"#%d", argument_count());
969 HCallJSFunction* HCallJSFunction::New(
974 bool pass_argument_count) {
975 bool has_stack_check =
false;
976 if (function->IsConstant()) {
977 HConstant* fun_const = HConstant::cast(
function);
980 has_stack_check = !jsfun.is_null() &&
981 (jsfun->
code()->
kind() == Code::FUNCTION ||
982 jsfun->
code()->
kind() == Code::OPTIMIZED_FUNCTION);
985 return new(zone) HCallJSFunction(
986 function, argument_count, pass_argument_count,
1002 void HBoundsCheck::ApplyIndexChange() {
1003 if (skip_check())
return;
1005 DecompositionResult decomposition;
1006 bool index_is_decomposable = index()->TryDecompose(&decomposition);
1007 if (index_is_decomposable) {
1008 ASSERT(decomposition.base() == base());
1009 if (decomposition.offset() == offset() &&
1010 decomposition.scale() == scale())
return;
1015 ReplaceAllUsesWith(index());
1017 HValue* current_index = decomposition.base();
1018 int actual_offset = decomposition.offset() + offset();
1019 int actual_scale = decomposition.scale() + scale();
1021 Zone* zone = block()->graph()->zone();
1022 HValue* context = block()->graph()->GetInvalidContext();
1023 if (actual_offset != 0) {
1024 HConstant* add_offset = HConstant::New(zone, context, actual_offset);
1025 add_offset->InsertBefore(
this);
1026 HInstruction* add = HAdd::New(zone, context,
1027 current_index, add_offset);
1028 add->InsertBefore(
this);
1029 add->AssumeRepresentation(index()->representation());
1030 add->ClearFlag(kCanOverflow);
1031 current_index = add;
1034 if (actual_scale != 0) {
1035 HConstant* sar_scale = HConstant::New(zone, context, actual_scale);
1036 sar_scale->InsertBefore(
this);
1037 HInstruction* sar = HSar::New(zone, context,
1038 current_index, sar_scale);
1039 sar->InsertBefore(
this);
1040 sar->AssumeRepresentation(index()->representation());
1041 current_index = sar;
1044 SetOperandAt(0, current_index);
1052 void HBoundsCheck::PrintDataTo(StringStream* stream) {
1053 index()->PrintNameTo(stream);
1055 length()->PrintNameTo(stream);
1056 if (base() !=
NULL && (offset() != 0 || scale() != 0)) {
1057 stream->Add(
" base: ((");
1058 if (base() != index()) {
1059 index()->PrintNameTo(stream);
1061 stream->Add(
"index");
1063 stream->Add(
" + %d) >> %d)", offset(), scale());
1066 stream->Add(
" [DISABLED]");
1071 void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) {
1072 ASSERT(CheckFlag(kFlexibleRepresentation));
1073 HValue* actual_index = index()->ActualValue();
1074 HValue* actual_length = length()->ActualValue();
1075 Representation index_rep = actual_index->representation();
1076 Representation length_rep = actual_length->representation();
1077 if (index_rep.IsTagged() && actual_index->type().IsSmi()) {
1080 if (length_rep.IsTagged() && actual_length->type().IsSmi()) {
1083 Representation r = index_rep.generalize(length_rep);
1087 UpdateRepresentation(r, h_infer,
"boundscheck");
1091 Range* HBoundsCheck::InferRange(Zone* zone) {
1092 Representation r = representation();
1093 if (r.IsSmiOrInteger32() && length()->HasRange()) {
1094 int upper = length()->range()->upper() - (allow_equality() ? 0 : 1);
1097 Range* result =
new(zone) Range(lower, upper);
1098 if (index()->HasRange()) {
1099 result->Intersect(index()->range());
1103 if (r.IsSmi()) result->ClampToSmi();
1110 void HBoundsCheckBaseIndexInformation::PrintDataTo(StringStream* stream) {
1111 stream->Add(
"base: ");
1112 base_index()->PrintNameTo(stream);
1113 stream->Add(
", check: ");
1114 base_index()->PrintNameTo(stream);
1118 void HCallWithDescriptor::PrintDataTo(StringStream* stream) {
1119 for (
int i = 0; i < OperandCount(); i++) {
1120 OperandAt(i)->PrintNameTo(stream);
1123 stream->Add(
"#%d", argument_count());
1127 void HCallNewArray::PrintDataTo(StringStream* stream) {
1134 void HCallRuntime::PrintDataTo(StringStream* stream) {
1135 stream->Add(
"%o ", *
name());
1137 stream->Add(
"[save doubles] ");
1139 stream->Add(
"#%d", argument_count());
1143 void HClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
1144 stream->Add(
"class_of_test(");
1145 value()->PrintNameTo(stream);
1146 stream->Add(
", \"%o\")", *class_name());
1150 void HWrapReceiver::PrintDataTo(StringStream* stream) {
1151 receiver()->PrintNameTo(stream);
1153 function()->PrintNameTo(stream);
1157 void HAccessArgumentsAt::PrintDataTo(StringStream* stream) {
1158 arguments()->PrintNameTo(stream);
1160 index()->PrintNameTo(stream);
1161 stream->Add(
"], length ");
1162 length()->PrintNameTo(stream);
1167 stream->
Add(
" goto (");
1168 bool first_block =
true;
1169 for (HSuccessorIterator it(
this); !it.Done(); it.Advance()) {
1170 stream->
Add(first_block ?
"B%d" :
", B%d", it.Current()->block_id());
1171 first_block =
false;
1184 value()->PrintNameTo(stream);
1185 stream->
Add(
" (pop ");
1186 parameter_count()->PrintNameTo(stream);
1187 stream->
Add(
" values)");
1191 Representation HBranch::observed_input_representation(
int index) {
1192 static const ToBooleanStub::Types tagged_types(
1197 if (expected_input_types_.ContainsAnyOf(tagged_types)) {
1216 bool HBranch::KnownSuccessorBlock(HBasicBlock** block) {
1217 HValue* value = this->value();
1218 if (value->EmitAtUses()) {
1219 ASSERT(value->IsConstant());
1220 ASSERT(!value->representation().IsDouble());
1221 *block = HConstant::cast(value)->BooleanValue()
1223 : SecondSuccessor();
1231 void HCompareMap::PrintDataTo(StringStream* stream) {
1232 value()->PrintNameTo(stream);
1235 if (known_successor_index() == 0) {
1236 stream->Add(
" [true]");
1237 }
else if (known_successor_index() == 1) {
1238 stream->Add(
" [false]");
1243 const char* HUnaryMathOperation::OpName()
const {
1245 case kMathFloor:
return "floor";
1246 case kMathRound:
return "round";
1247 case kMathAbs:
return "abs";
1248 case kMathLog:
return "log";
1249 case kMathExp:
return "exp";
1250 case kMathSqrt:
return "sqrt";
1260 Range* HUnaryMathOperation::InferRange(Zone* zone) {
1261 Representation r = representation();
1262 if (op() ==
kMathClz32)
return new(zone) Range(0, 32);
1263 if (r.IsSmiOrInteger32() && value()->HasRange()) {
1264 if (op() == kMathAbs) {
1265 int upper = value()->range()->upper();
1266 int lower = value()->range()->lower();
1267 bool spans_zero = value()->range()->CanBeZero();
1273 new(zone) Range(spans_zero ? 0 :
Min(abs_lower, abs_upper),
1274 Max(abs_lower, abs_upper));
1277 if (r.IsSmi()) result->ClampToSmi();
1285 void HUnaryMathOperation::PrintDataTo(StringStream* stream) {
1286 const char*
name = OpName();
1287 stream->Add(
"%s ", name);
1288 value()->PrintNameTo(stream);
1297 void HHasInstanceTypeAndBranch::PrintDataTo(
StringStream* stream) {
1298 value()->PrintNameTo(stream);
1318 void HTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
1319 value()->PrintNameTo(stream);
1320 stream->Add(
" == %o", *type_literal_.handle());
1325 static String* TypeOfString(HConstant* constant, Isolate* isolate) {
1326 Heap* heap = isolate->heap();
1327 if (constant->HasNumberValue())
return heap->number_string();
1328 if (constant->IsUndetectable())
return heap->undefined_string();
1329 if (constant->HasStringValue())
return heap->string_string();
1330 switch (constant->GetInstanceType()) {
1332 Unique<Object> unique = constant->GetUnique();
1333 if (unique.IsKnownGlobal(heap->true_value()) ||
1334 unique.IsKnownGlobal(heap->false_value())) {
1335 return heap->boolean_string();
1337 if (unique.IsKnownGlobal(heap->null_value())) {
1338 return FLAG_harmony_typeof ? heap->null_string()
1339 : heap->object_string();
1341 ASSERT(unique.IsKnownGlobal(heap->undefined_value()));
1342 return heap->undefined_string();
1345 return heap->symbol_string();
1348 return heap->function_string();
1350 return heap->object_string();
1355 bool HTypeofIsAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
1356 if (FLAG_fold_constants && value()->IsConstant()) {
1357 HConstant* constant = HConstant::cast(value());
1358 String* type_string = TypeOfString(constant, isolate());
1359 bool same_type = type_literal_.IsKnownGlobal(type_string);
1360 *block = same_type ? FirstSuccessor() : SecondSuccessor();
1362 }
else if (value()->representation().IsSpecialization()) {
1364 type_literal_.IsKnownGlobal(isolate()->heap()->number_string());
1365 *block = number_type ? FirstSuccessor() : SecondSuccessor();
1373 void HCheckMapValue::PrintDataTo(StringStream* stream) {
1374 value()->PrintNameTo(stream);
1376 map()->PrintNameTo(stream);
1380 void HForInPrepareMap::PrintDataTo(StringStream* stream) {
1381 enumerable()->PrintNameTo(stream);
1385 void HForInCacheArray::PrintDataTo(StringStream* stream) {
1386 enumerable()->PrintNameTo(stream);
1388 map()->PrintNameTo(stream);
1389 stream->Add(
"[%d]", idx_);
1393 void HLoadFieldByIndex::PrintDataTo(StringStream* stream) {
1394 object()->PrintNameTo(stream);
1396 index()->PrintNameTo(stream);
1400 static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) {
1401 if (!l->EqualsInteger32Constant(~0))
return false;
1407 static bool MatchNegationViaXor(HValue* instr, HValue** negated) {
1408 if (!instr->IsBitwise())
return false;
1409 HBitwise* b = HBitwise::cast(instr);
1410 return (b->op() == Token::BIT_XOR) &&
1411 (MatchLeftIsOnes(b->left(), b->right(), negated) ||
1412 MatchLeftIsOnes(b->right(), b->left(), negated));
1416 static bool MatchDoubleNegation(HValue* instr, HValue** arg) {
1418 return MatchNegationViaXor(instr, &negated) &&
1419 MatchNegationViaXor(negated, arg);
1424 if (!representation().IsSmiOrInteger32())
return this;
1426 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
1427 if (left()->EqualsInteger32Constant(nop_constant) &&
1428 !right()->CheckFlag(kUint32)) {
1431 if (right()->EqualsInteger32Constant(nop_constant) &&
1432 !left()->CheckFlag(kUint32)) {
1437 if (MatchDoubleNegation(
this, &arg) && !arg->CheckFlag(kUint32)) {
1444 Representation HAdd::RepresentationFromInputs() {
1445 Representation left_rep = left()->representation();
1446 if (left_rep.IsExternal()) {
1453 Representation HAdd::RequiredInputRepresentation(
int index) {
1455 Representation left_rep = left()->representation();
1456 if (left_rep.IsExternal()) {
1464 static bool IsIdentityOperation(HValue* arg1, HValue* arg2,
int32_t identity) {
1465 return arg1->representation().IsSpecialization() &&
1466 arg2->EqualsInteger32Constant(identity);
1472 if (IsIdentityOperation(left(), right(), 0) &&
1473 !left()->representation().IsDouble()) {
1476 if (IsIdentityOperation(right(), left(), 0) &&
1477 !left()->representation().IsDouble()) {
1485 if (IsIdentityOperation(left(), right(), 0))
return left();
1491 if (IsIdentityOperation(left(), right(), 1))
return left();
1492 if (IsIdentityOperation(right(), left(), 1))
return right();
1497 bool HMul::MulMinusOne() {
1498 if (left()->EqualsInteger32Constant(-1) ||
1499 right()->EqualsInteger32Constant(-1)) {
1513 if (IsIdentityOperation(left(), right(), 1))
return left();
1519 return (from().Equals(to())) ? value() : this;
1524 if (HasNoUses())
return NULL;
1525 if (receiver()->type().IsJSObject()) {
1532 void HTypeof::PrintDataTo(StringStream* stream) {
1533 value()->PrintNameTo(stream);
1537 HInstruction* HForceRepresentation::New(Zone* zone, HValue* context,
1538 HValue* value, Representation representation) {
1539 if (FLAG_fold_constants && value->IsConstant()) {
1540 HConstant* c = HConstant::cast(value);
1541 if (c->HasNumberValue()) {
1542 double double_res = c->DoubleValue();
1543 if (representation.CanContainDouble(double_res)) {
1544 return HConstant::New(zone, context,
1545 static_cast<int32_t>(double_res),
1550 return new(zone) HForceRepresentation(value, representation);
1554 void HForceRepresentation::PrintDataTo(StringStream* stream) {
1555 stream->Add(
"%s ", representation().Mnemonic());
1556 value()->PrintNameTo(stream);
1560 void HChange::PrintDataTo(StringStream* stream) {
1562 stream->Add(
" %s to %s", from().Mnemonic(), to().Mnemonic());
1564 if (CanTruncateToInt32()) stream->Add(
" truncating-int32");
1565 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(
" -0?");
1566 if (CheckFlag(kAllowUndefinedAsNaN)) stream->Add(
" allow-undefined-as-nan");
1571 if (op() == kMathRound || op() == kMathFloor) {
1572 HValue* val = value();
1573 if (val->IsChange()) val = HChange::cast(val)->value();
1574 if (val->representation().IsSmiOrInteger32()) {
1575 if (val->representation().Equals(representation()))
return val;
1576 return Prepend(
new(block()->zone()) HChange(
1577 val, representation(),
false,
false));
1580 if (op() == kMathFloor && value()->IsDiv() && value()->UseCount() == 1) {
1581 HDiv* hdiv = HDiv::cast(value());
1583 HValue* left = hdiv->left();
1584 if (left->representation().IsInteger32()) {
1586 }
else if (left->IsChange() && HChange::cast(left)->from().IsInteger32()) {
1588 left = HChange::cast(left)->value();
1589 }
else if (hdiv->observed_input_representation(1).IsSmiOrInteger32()) {
1590 left = Prepend(
new(block()->zone()) HChange(
1596 HValue* right = hdiv->right();
1597 if (right->IsInteger32Constant()) {
1598 right = Prepend(HConstant::cast(right)->CopyToRepresentation(
1600 }
else if (right->representation().IsInteger32()) {
1602 }
else if (right->IsChange() &&
1603 HChange::cast(right)->from().IsInteger32()) {
1605 right = HChange::cast(right)->value();
1606 }
else if (hdiv->observed_input_representation(2).IsSmiOrInteger32()) {
1607 right = Prepend(
new(block()->zone()) HChange(
1613 return Prepend(HMathFloorOfDiv::New(
1614 block()->zone(), context(), left, right));
1621 if (check_ == IS_STRING && value()->type().IsString()) {
1625 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
1626 if (HConstant::cast(value())->HasInternalizedStringValue()) {
1634 void HCheckInstanceType::GetCheckInterval(
InstanceType* first,
1636 ASSERT(is_interval_check());
1638 case IS_SPEC_OBJECT:
1651 void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
1652 ASSERT(!is_interval_check());
1658 case IS_INTERNALIZED_STRING:
1668 bool HCheckMaps::HandleSideEffectDominator(
GVNFlag side_effect,
1669 HValue* dominator) {
1670 ASSERT(side_effect == kMaps);
1674 if (HasNoUses() && dominator->IsStoreNamedField()) {
1675 HStoreNamedField* store = HStoreNamedField::cast(dominator);
1676 if (!store->has_transition() || store->object() != value())
return false;
1677 HConstant* transition = HConstant::cast(store->transition());
1678 if (map_set_.Contains(Unique<Map>::cast(transition->GetUnique()))) {
1679 DeleteAndReplaceWith(
NULL);
1687 void HCheckMaps::PrintDataTo(StringStream* stream) {
1688 value()->PrintNameTo(stream);
1689 stream->Add(
" [%p", *map_set_.at(0).handle());
1690 for (
int i = 1; i < map_set_.size(); ++i) {
1691 stream->Add(
",%p", *map_set_.at(i).handle());
1693 stream->Add(
"]%s", CanOmitMapChecks() ?
"(omitted)" :
"");
1697 void HCheckValue::PrintDataTo(StringStream* stream) {
1698 value()->PrintNameTo(stream);
1700 object().handle()->ShortPrint(stream);
1705 return (value()->IsConstant() &&
1706 HConstant::cast(value())->EqualsUnique(object_)) ?
NULL :
this;
1710 const char* HCheckInstanceType::GetCheckName() {
1712 case IS_SPEC_OBJECT:
return "object";
1713 case IS_JS_ARRAY:
return "array";
1714 case IS_STRING:
return "string";
1715 case IS_INTERNALIZED_STRING:
return "internalized_string";
1722 void HCheckInstanceType::PrintDataTo(StringStream* stream) {
1723 stream->Add(
"%s ", GetCheckName());
1728 void HCallStub::PrintDataTo(StringStream* stream) {
1730 CodeStub::MajorName(major_key_,
false));
1735 void HUnknownOSRValue::PrintDataTo(StringStream *stream) {
1736 const char* type =
"expression";
1737 if (environment_->is_local_index(index_)) type =
"local";
1738 if (environment_->is_special_index(index_)) type =
"special";
1739 if (environment_->is_parameter_index(index_)) type =
"parameter";
1740 stream->Add(
"%s @ %d", type, index_);
1744 void HInstanceOf::PrintDataTo(StringStream* stream) {
1745 left()->PrintNameTo(stream);
1747 right()->PrintNameTo(stream);
1749 context()->PrintNameTo(stream);
1757 result->set_can_be_minus_zero(
false);
1759 result =
new(zone) Range();
1768 Range* HChange::InferRange(
Zone* zone) {
1769 Range* input_range = value()->range();
1773 input_range !=
NULL &&
1774 input_range->IsInSmiRange()))) {
1775 set_type(HType::Smi());
1776 ClearChangesFlag(kNewSpacePromotion);
1778 if (to().IsSmiOrTagged() &&
1779 input_range !=
NULL &&
1780 input_range->IsInSmiRange() &&
1781 (!SmiValuesAre32Bits() ||
1783 input_range->upper() !=
kMaxInt)) {
1786 ClearFlag(kCanOverflow);
1788 Range* result = (input_range !=
NULL)
1789 ? input_range->Copy(zone)
1791 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() ||
1792 !(CheckFlag(kAllUsesTruncatingToInt32) ||
1793 CheckFlag(kAllUsesTruncatingToSmi)));
1794 if (to().IsSmi()) result->ClampToSmi();
1799 Range* HConstant::InferRange(Zone* zone) {
1800 if (has_int32_value_) {
1801 Range* result =
new(zone) Range(int32_value_, int32_value_);
1802 result->set_can_be_minus_zero(
false);
1809 HSourcePosition HPhi::position()
const {
1810 return block()->first()->position();
1814 Range* HPhi::InferRange(Zone* zone) {
1815 Representation r = representation();
1816 if (r.IsSmiOrInteger32()) {
1817 if (block()->IsLoopHeader()) {
1818 Range* range = r.IsSmi()
1823 Range* range = OperandAt(0)->range()->Copy(zone);
1824 for (
int i = 1; i < OperandCount(); ++i) {
1825 range->Union(OperandAt(i)->range());
1835 Range* HAdd::InferRange(Zone* zone) {
1836 Representation r = representation();
1837 if (r.IsSmiOrInteger32()) {
1838 Range* a = left()->range();
1839 Range* b = right()->range();
1840 Range* res = a->Copy(zone);
1841 if (!res->AddAndCheckOverflow(r, b) ||
1842 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1843 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1844 ClearFlag(kCanOverflow);
1846 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1847 !CheckFlag(kAllUsesTruncatingToInt32) &&
1848 a->CanBeMinusZero() && b->CanBeMinusZero());
1856 Range* HSub::InferRange(Zone* zone) {
1857 Representation r = representation();
1858 if (r.IsSmiOrInteger32()) {
1859 Range* a = left()->range();
1860 Range* b = right()->range();
1861 Range* res = a->Copy(zone);
1862 if (!res->SubAndCheckOverflow(r, b) ||
1863 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1864 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1865 ClearFlag(kCanOverflow);
1867 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1868 !CheckFlag(kAllUsesTruncatingToInt32) &&
1869 a->CanBeMinusZero() && b->CanBeZero());
1877 Range* HMul::InferRange(Zone* zone) {
1878 Representation r = representation();
1879 if (r.IsSmiOrInteger32()) {
1880 Range* a = left()->range();
1881 Range* b = right()->range();
1882 Range* res = a->Copy(zone);
1883 if (!res->MulAndCheckOverflow(r, b) ||
1884 (((r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1885 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) &&
1890 ClearFlag(kCanOverflow);
1892 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1893 !CheckFlag(kAllUsesTruncatingToInt32) &&
1894 ((a->CanBeZero() && b->CanBeNegative()) ||
1895 (a->CanBeNegative() && b->CanBeZero())));
1903 Range* HDiv::InferRange(Zone* zone) {
1904 if (representation().IsInteger32()) {
1905 Range* a = left()->range();
1906 Range* b = right()->range();
1907 Range* result =
new(zone) Range();
1908 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1909 (a->CanBeMinusZero() ||
1910 (a->CanBeZero() && b->CanBeNegative())));
1911 if (!a->Includes(
kMinInt) || !b->Includes(-1)) {
1912 ClearFlag(kCanOverflow);
1915 if (!b->CanBeZero()) {
1916 ClearFlag(kCanBeDivByZero);
1925 Range* HMathFloorOfDiv::InferRange(Zone* zone) {
1926 if (representation().IsInteger32()) {
1927 Range* a = left()->range();
1928 Range* b = right()->range();
1929 Range* result =
new(zone) Range();
1930 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1931 (a->CanBeMinusZero() ||
1932 (a->CanBeZero() && b->CanBeNegative())));
1934 ClearFlag(kLeftCanBeMinInt);
1937 if (!a->CanBeNegative()) {
1941 if (!a->CanBePositive()) {
1945 if (!a->Includes(
kMinInt) || !b->Includes(-1)) {
1946 ClearFlag(kCanOverflow);
1949 if (!b->CanBeZero()) {
1950 ClearFlag(kCanBeDivByZero);
1959 Range* HMod::InferRange(Zone* zone) {
1960 if (representation().IsInteger32()) {
1961 Range* a = left()->range();
1962 Range* b = right()->range();
1970 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative();
1971 Range* result =
new(zone) Range(left_can_be_negative ? -positive_bound : 0,
1972 a->CanBePositive() ? positive_bound : 0);
1974 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1975 left_can_be_negative);
1977 if (!a->CanBeNegative()) {
1981 if (!a->Includes(
kMinInt) || !b->Includes(-1)) {
1985 if (!b->CanBeZero()) {
1995 InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) {
1996 if (phi->block()->loop_information() ==
NULL)
return NULL;
1997 if (phi->OperandCount() != 2)
return NULL;
2000 candidate_increment = ComputeIncrement(phi, phi->OperandAt(0));
2001 if (candidate_increment != 0) {
2002 return new(phi->block()->graph()->zone())
2003 InductionVariableData(phi, phi->OperandAt(1), candidate_increment);
2006 candidate_increment = ComputeIncrement(phi, phi->OperandAt(1));
2007 if (candidate_increment != 0) {
2008 return new(phi->block()->graph()->zone())
2009 InductionVariableData(phi, phi->OperandAt(0), candidate_increment);
2024 void InductionVariableData::DecomposeBitwise(
2026 BitwiseDecompositionResult* result) {
2027 HValue* base = IgnoreOsrValue(value);
2028 result->base = value;
2030 if (!base->representation().IsInteger32())
return;
2032 if (base->IsBitwise()) {
2033 bool allow_offset =
false;
2036 HBitwise* bitwise = HBitwise::cast(base);
2037 if (bitwise->right()->IsInteger32Constant()) {
2038 mask = bitwise->right()->GetInteger32Constant();
2039 base = bitwise->left();
2040 }
else if (bitwise->left()->IsInteger32Constant()) {
2041 mask = bitwise->left()->GetInteger32Constant();
2042 base = bitwise->right();
2046 if (bitwise->op() == Token::BIT_AND) {
2047 result->and_mask = mask;
2048 allow_offset =
true;
2049 }
else if (bitwise->op() == Token::BIT_OR) {
2050 result->or_mask = mask;
2055 result->context = bitwise->context();
2058 if (base->IsAdd()) {
2059 HAdd* add = HAdd::cast(base);
2060 if (add->right()->IsInteger32Constant()) {
2062 }
else if (add->left()->IsInteger32Constant()) {
2063 base = add->right();
2065 }
else if (base->IsSub()) {
2066 HSub* sub = HSub::cast(base);
2067 if (sub->right()->IsInteger32Constant()) {
2073 result->base = base;
2078 void InductionVariableData::AddCheck(HBoundsCheck*
check,
2081 if (limit_validity() != check->block() &&
2082 !limit_validity()->Dominates(check->block()))
return;
2083 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2084 check->block()->current_loop()))
return;
2086 ChecksRelatedToLength* length_checks = checks();
2087 while (length_checks !=
NULL) {
2088 if (length_checks->length() == check->length())
break;
2089 length_checks = length_checks->next();
2091 if (length_checks ==
NULL) {
2092 length_checks =
new(check->block()->zone())
2093 ChecksRelatedToLength(check->length(), checks());
2094 checks_ = length_checks;
2097 length_checks->AddCheck(check, upper_limit);
2101 void InductionVariableData::ChecksRelatedToLength::CloseCurrentBlock() {
2102 if (checks() !=
NULL) {
2103 InductionVariableCheck* c = checks();
2104 HBasicBlock* current_block = c->check()->block();
2105 while (c !=
NULL && c->check()->block() == current_block) {
2106 c->set_upper_limit(current_upper_limit_);
2113 void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock(
2119 HValue* previous_index = first_check_in_block()->index();
2122 Zone* zone = index_base->block()->graph()->zone();
2123 set_added_constant(HConstant::New(zone, context, mask));
2124 if (added_index() !=
NULL) {
2125 added_constant()->InsertBefore(added_index());
2127 added_constant()->InsertBefore(first_check_in_block());
2130 if (added_index() ==
NULL) {
2131 first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index());
2132 HInstruction* new_index = HBitwise::New(zone, context, token, index_base,
2134 ASSERT(new_index->IsBitwise());
2135 new_index->ClearAllSideEffects();
2137 set_added_index(HBitwise::cast(new_index));
2138 added_index()->InsertBefore(first_check_in_block());
2140 ASSERT(added_index()->op() == token);
2142 added_index()->SetOperandAt(1, index_base);
2143 added_index()->SetOperandAt(2, added_constant());
2144 first_check_in_block()->SetOperandAt(0, added_index());
2145 if (previous_index->UseCount() == 0) {
2146 previous_index->DeleteAndReplaceWith(
NULL);
2150 void InductionVariableData::ChecksRelatedToLength::AddCheck(
2151 HBoundsCheck* check,
2153 BitwiseDecompositionResult decomposition;
2154 InductionVariableData::DecomposeBitwise(check->index(), &decomposition);
2156 if (first_check_in_block() ==
NULL ||
2157 first_check_in_block()->block() != check->block()) {
2158 CloseCurrentBlock();
2160 first_check_in_block_ =
check;
2161 set_added_index(
NULL);
2162 set_added_constant(
NULL);
2163 current_and_mask_in_block_ = decomposition.and_mask;
2164 current_or_mask_in_block_ = decomposition.or_mask;
2165 current_upper_limit_ = upper_limit;
2167 InductionVariableCheck* new_check =
new(check->block()->graph()->zone())
2168 InductionVariableCheck(check, checks_, upper_limit);
2169 checks_ = new_check;
2173 if (upper_limit > current_upper_limit()) {
2174 current_upper_limit_ = upper_limit;
2177 if (decomposition.and_mask != 0 &&
2178 current_or_mask_in_block() == 0) {
2179 if (current_and_mask_in_block() == 0 ||
2180 decomposition.and_mask > current_and_mask_in_block()) {
2181 UseNewIndexInCurrentBlock(Token::BIT_AND,
2182 decomposition.and_mask,
2184 decomposition.context);
2185 current_and_mask_in_block_ = decomposition.and_mask;
2187 check->set_skip_check();
2189 if (current_and_mask_in_block() == 0) {
2190 if (decomposition.or_mask > current_or_mask_in_block()) {
2191 UseNewIndexInCurrentBlock(Token::BIT_OR,
2192 decomposition.or_mask,
2194 decomposition.context);
2195 current_or_mask_in_block_ = decomposition.or_mask;
2197 check->set_skip_check();
2200 if (!check->skip_check()) {
2201 InductionVariableCheck* new_check =
new(check->block()->graph()->zone())
2202 InductionVariableCheck(check, checks_, upper_limit);
2203 checks_ = new_check;
2217 int32_t InductionVariableData::ComputeIncrement(HPhi* phi,
2218 HValue* phi_operand) {
2219 if (!phi_operand->representation().IsInteger32())
return 0;
2221 if (phi_operand->IsAdd()) {
2222 HAdd* operation = HAdd::cast(phi_operand);
2223 if (operation->left() == phi &&
2224 operation->right()->IsInteger32Constant()) {
2225 return operation->right()->GetInteger32Constant();
2226 }
else if (operation->right() == phi &&
2227 operation->left()->IsInteger32Constant()) {
2228 return operation->left()->GetInteger32Constant();
2230 }
else if (phi_operand->IsSub()) {
2231 HSub* operation = HSub::cast(phi_operand);
2232 if (operation->left() == phi &&
2233 operation->right()->IsInteger32Constant()) {
2234 return -operation->right()->GetInteger32Constant();
2248 void InductionVariableData::UpdateAdditionalLimit(
2249 InductionVariableLimitUpdate* update) {
2250 ASSERT(update->updated_variable ==
this);
2251 if (update->limit_is_upper) {
2252 swap(&additional_upper_limit_, &update->limit);
2253 swap(&additional_upper_limit_is_included_, &update->limit_is_included);
2255 swap(&additional_lower_limit_, &update->limit);
2256 swap(&additional_lower_limit_is_included_, &update->limit_is_included);
2261 int32_t InductionVariableData::ComputeUpperLimit(
int32_t and_mask,
2264 const int32_t MAX_LIMIT = 1 << 30;
2268 if (limit() !=
NULL &&
2269 limit()->IsInteger32Constant()) {
2270 int32_t limit_value = limit()->GetInteger32Constant();
2271 if (!limit_included()) {
2274 if (limit_value < result) result = limit_value;
2277 if (additional_upper_limit() !=
NULL &&
2278 additional_upper_limit()->IsInteger32Constant()) {
2279 int32_t limit_value = additional_upper_limit()->GetInteger32Constant();
2280 if (!additional_upper_limit_is_included()) {
2283 if (limit_value < result) result = limit_value;
2286 if (and_mask > 0 && and_mask < MAX_LIMIT) {
2287 if (and_mask < result) result = and_mask;
2294 return result >= MAX_LIMIT ? kNoLimit : result;
2298 HValue* InductionVariableData::IgnoreOsrValue(HValue* v) {
2299 if (!v->IsPhi())
return v;
2300 HPhi* phi = HPhi::cast(v);
2301 if (phi->OperandCount() != 2)
return v;
2302 if (phi->OperandAt(0)->block()->is_osr_entry()) {
2303 return phi->OperandAt(1);
2304 }
else if (phi->OperandAt(1)->block()->is_osr_entry()) {
2305 return phi->OperandAt(0);
2312 InductionVariableData* InductionVariableData::GetInductionVariableData(
2314 v = IgnoreOsrValue(v);
2316 return HPhi::cast(v)->induction_variable_data();
2334 bool InductionVariableData::CheckIfBranchIsLoopGuard(
2336 HBasicBlock* current_branch,
2337 HBasicBlock* other_branch) {
2338 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2339 current_branch->current_loop())) {
2343 if (phi()->block()->current_loop()->IsNestedInThisLoop(
2344 other_branch->current_loop())) {
2348 if (increment() > 0 && (token == Token::LT || token == Token::LTE)) {
2351 if (increment() < 0 && (token == Token::GT || token == Token::GTE)) {
2362 void InductionVariableData::ComputeLimitFromPredecessorBlock(
2364 LimitFromPredecessorBlock* result) {
2365 if (block->predecessors()->length() != 1)
return;
2366 HBasicBlock* predecessor = block->predecessors()->at(0);
2367 HInstruction* end = predecessor->last();
2369 if (!end->IsCompareNumericAndBranch())
return;
2375 HBasicBlock* other_target;
2376 if (block == branch->SuccessorAt(0)) {
2377 other_target = branch->SuccessorAt(1);
2379 other_target = branch->SuccessorAt(0);
2381 ASSERT(block == branch->SuccessorAt(1));
2384 InductionVariableData* data;
2386 data = GetInductionVariableData(branch->left());
2387 HValue* limit = branch->right();
2389 data = GetInductionVariableData(branch->right());
2391 limit = branch->left();
2395 result->variable = data;
2396 result->token = token;
2397 result->limit = limit;
2398 result->other_target = other_target;
2412 bool InductionVariableData::ComputeInductionVariableLimit(
2414 InductionVariableLimitUpdate* additional_limit) {
2415 LimitFromPredecessorBlock limit;
2416 ComputeLimitFromPredecessorBlock(block, &limit);
2417 if (!limit.LimitIsValid())
return false;
2419 if (limit.variable->CheckIfBranchIsLoopGuard(limit.token,
2421 limit.other_target)) {
2422 limit.variable->limit_ = limit.limit;
2423 limit.variable->limit_included_ = limit.LimitIsIncluded();
2424 limit.variable->limit_validity_ = block;
2425 limit.variable->induction_exit_block_ = block->predecessors()->at(0);
2426 limit.variable->induction_exit_target_ = limit.other_target;
2429 additional_limit->updated_variable = limit.variable;
2430 additional_limit->limit = limit.limit;
2431 additional_limit->limit_is_upper = limit.LimitIsUpper();
2432 additional_limit->limit_is_included = limit.LimitIsIncluded();
2438 Range* HMathMinMax::InferRange(Zone* zone) {
2439 if (representation().IsSmiOrInteger32()) {
2440 Range* a = left()->range();
2441 Range* b = right()->range();
2442 Range* res = a->Copy(zone);
2443 if (operation_ == kMathMax) {
2444 res->CombinedMax(b);
2446 ASSERT(operation_ == kMathMin);
2447 res->CombinedMin(b);
2456 void HPhi::PrintTo(StringStream* stream) {
2458 for (
int i = 0; i < OperandCount(); ++i) {
2459 HValue* value = OperandAt(i);
2461 value->PrintNameTo(stream);
2464 stream->Add(
" uses:%d_%ds_%di_%dd_%dt",
2466 smi_non_phi_uses() + smi_indirect_uses(),
2467 int32_non_phi_uses() + int32_indirect_uses(),
2468 double_non_phi_uses() + double_indirect_uses(),
2469 tagged_non_phi_uses() + tagged_indirect_uses());
2470 PrintRangeTo(stream);
2471 PrintTypeTo(stream);
2476 void HPhi::AddInput(HValue* value) {
2477 inputs_.Add(
NULL, value->block()->zone());
2478 SetOperandAt(OperandCount() - 1, value);
2480 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
2481 SetFlag(kIsArguments);
2486 bool HPhi::HasRealUses() {
2487 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2488 if (!it.value()->IsPhi())
return true;
2494 HValue* HPhi::GetRedundantReplacement() {
2495 HValue* candidate =
NULL;
2496 int count = OperandCount();
2498 while (position < count && candidate ==
NULL) {
2499 HValue* current = OperandAt(position++);
2500 if (current !=
this) candidate = current;
2502 while (position < count) {
2503 HValue* current = OperandAt(position++);
2504 if (current !=
this && current != candidate)
return NULL;
2506 ASSERT(candidate !=
this);
2511 void HPhi::DeleteFromGraph() {
2513 block()->RemovePhi(
this);
2518 void HPhi::InitRealUses(
int phi_id) {
2524 SetFlag(kTruncatingToSmi);
2525 SetFlag(kTruncatingToInt32);
2526 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2527 HValue* value = it.value();
2528 if (!value->IsPhi()) {
2529 Representation rep = value->observed_input_representation(it.index());
2530 non_phi_uses_[rep.kind()] += 1;
2531 if (FLAG_trace_representation) {
2532 PrintF(
"#%d Phi is used by real #%d %s as %s\n",
2533 id(), value->id(), value->Mnemonic(), rep.Mnemonic());
2535 if (!value->IsSimulate()) {
2536 if (!value->CheckFlag(kTruncatingToSmi)) {
2537 ClearFlag(kTruncatingToSmi);
2539 if (!value->CheckFlag(kTruncatingToInt32)) {
2540 ClearFlag(kTruncatingToInt32);
2548 void HPhi::AddNonPhiUsesFrom(HPhi* other) {
2549 if (FLAG_trace_representation) {
2550 PrintF(
"adding to #%d Phi uses of #%d Phi: s%d i%d d%d t%d\n",
2559 indirect_uses_[i] += other->non_phi_uses_[i];
2564 void HPhi::AddIndirectUsesTo(
int* dest) {
2566 dest[i] += indirect_uses_[i];
2571 void HSimulate::MergeWith(ZoneList<HSimulate*>* list) {
2572 while (!list->is_empty()) {
2573 HSimulate* from = list->RemoveLast();
2574 ZoneList<HValue*>* from_values = &from->values_;
2575 for (
int i = 0; i < from_values->length(); ++i) {
2576 if (from->HasAssignedIndexAt(i)) {
2577 int index = from->GetAssignedIndexAt(i);
2578 if (HasValueForIndex(index))
continue;
2579 AddAssignedValue(index, from_values->at(i));
2581 if (pop_count_ > 0) {
2584 AddPushedValue(from_values->at(i));
2588 pop_count_ += from->pop_count_;
2589 from->DeleteAndReplaceWith(
NULL);
2594 void HSimulate::PrintDataTo(StringStream* stream) {
2595 stream->Add(
"id=%d", ast_id().ToInt());
2596 if (pop_count_ > 0) stream->Add(
" pop %d", pop_count_);
2597 if (values_.length() > 0) {
2598 if (pop_count_ > 0) stream->Add(
" /");
2599 for (
int i = values_.length() - 1; i >= 0; --i) {
2600 if (HasAssignedIndexAt(i)) {
2601 stream->Add(
" var[%d] = ", GetAssignedIndexAt(i));
2603 stream->Add(
" push ");
2605 values_[i]->PrintNameTo(stream);
2606 if (i > 0) stream->Add(
",");
2612 void HSimulate::ReplayEnvironment(HEnvironment* env) {
2613 if (done_with_replay_)
return;
2615 env->set_ast_id(ast_id());
2616 env->Drop(pop_count());
2617 for (
int i = values()->length() - 1; i >= 0; --i) {
2618 HValue* value = values()->at(i);
2619 if (HasAssignedIndexAt(i)) {
2620 env->Bind(GetAssignedIndexAt(i), value);
2625 done_with_replay_ =
true;
2629 static void ReplayEnvironmentNested(
const ZoneList<HValue*>* values,
2630 HCapturedObject* other) {
2631 for (
int i = 0; i < values->length(); ++i) {
2632 HValue* value = values->at(i);
2633 if (value->IsCapturedObject()) {
2634 if (HCapturedObject::cast(value)->capture_id() == other->capture_id()) {
2635 values->at(i) = other;
2637 ReplayEnvironmentNested(HCapturedObject::cast(value)->values(), other);
2646 void HCapturedObject::ReplayEnvironment(HEnvironment* env) {
2648 while (env !=
NULL) {
2649 ReplayEnvironmentNested(env->values(),
this);
2655 void HCapturedObject::PrintDataTo(StringStream* stream) {
2656 stream->Add(
"#%d ", capture_id());
2661 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target,
2663 ASSERT(return_target->IsInlineReturnTarget());
2664 return_targets_.Add(return_target, zone);
2668 void HEnterInlined::PrintDataTo(StringStream* stream) {
2669 SmartArrayPointer<char> name =
function()->debug_name()->ToCString();
2670 stream->Add(
"%s, id=%d", name.get(),
function()->
id().ToInt());
2674 static bool IsInteger32(
double value) {
2675 double roundtrip_value =
static_cast<double>(
static_cast<int32_t>(value));
2676 return BitCast<int64_t>(roundtrip_value) == BitCast<int64_t>(value);
2680 HConstant::HConstant(Handle<Object>
handle, Representation r)
2681 : HTemplateInstruction<0>(HType::TypeFromValue(handle)),
2682 object_(Unique<
Object>::CreateUninitialized(handle)),
2683 has_smi_value_(
false),
2684 has_int32_value_(
false),
2685 has_double_value_(
false),
2686 has_external_reference_value_(
false),
2687 is_not_in_new_space_(
true),
2688 boolean_value_(handle->BooleanValue()),
2689 is_undetectable_(
false),
2690 instance_type_(kUnknownInstanceType) {
2691 if (handle->IsHeapObject()) {
2693 Heap* heap = heap_obj->GetHeap();
2694 is_not_in_new_space_ = !heap->InNewSpace(*handle);
2695 instance_type_ = heap_obj->map()->instance_type();
2696 is_undetectable_ = heap_obj->map()->is_undetectable();
2698 if (handle->IsNumber()) {
2699 double n = handle->Number();
2700 has_int32_value_ = IsInteger32(n);
2702 has_smi_value_ = has_int32_value_ &&
Smi::IsValid(int32_value_);
2704 has_double_value_ =
true;
2712 HConstant::HConstant(Unique<Object> unique,
2715 bool is_not_in_new_space,
2717 bool is_undetectable,
2719 : HTemplateInstruction<0>(type),
2721 has_smi_value_(
false),
2722 has_int32_value_(
false),
2723 has_double_value_(
false),
2724 has_external_reference_value_(
false),
2725 is_not_in_new_space_(is_not_in_new_space),
2726 boolean_value_(boolean_value),
2727 is_undetectable_(is_undetectable),
2728 instance_type_(instance_type) {
2729 ASSERT(!unique.handle().is_null());
2730 ASSERT(!type.IsTaggedNumber());
2735 HConstant::HConstant(
int32_t integer_value,
2737 bool is_not_in_new_space,
2738 Unique<Object>
object)
2740 has_smi_value_(Smi::IsValid(integer_value)),
2741 has_int32_value_(
true),
2742 has_double_value_(
true),
2743 has_external_reference_value_(
false),
2744 is_not_in_new_space_(is_not_in_new_space),
2745 boolean_value_(integer_value != 0),
2746 is_undetectable_(
false),
2747 int32_value_(integer_value),
2748 double_value_(
FastI2D(integer_value)),
2749 instance_type_(kUnknownInstanceType) {
2752 bool could_be_heapobject = r.IsTagged() && !
object.handle().is_null();
2753 bool is_smi = has_smi_value_ && !could_be_heapobject;
2754 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2759 HConstant::HConstant(
double double_value,
2761 bool is_not_in_new_space,
2762 Unique<Object>
object)
2764 has_int32_value_(IsInteger32(double_value)),
2765 has_double_value_(
true),
2766 has_external_reference_value_(
false),
2767 is_not_in_new_space_(is_not_in_new_space),
2768 boolean_value_(double_value != 0 && !std::
isnan(double_value)),
2769 is_undetectable_(
false),
2771 double_value_(double_value),
2772 instance_type_(kUnknownInstanceType) {
2773 has_smi_value_ = has_int32_value_ &&
Smi::IsValid(int32_value_);
2776 bool could_be_heapobject = r.IsTagged() && !
object.handle().is_null();
2777 bool is_smi = has_smi_value_ && !could_be_heapobject;
2778 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2783 HConstant::HConstant(ExternalReference reference)
2784 : HTemplateInstruction<0>(HType::
None()),
2786 has_smi_value_(
false),
2787 has_int32_value_(
false),
2788 has_double_value_(
false),
2789 has_external_reference_value_(
true),
2790 is_not_in_new_space_(
true),
2791 boolean_value_(
true),
2792 is_undetectable_(
false),
2793 external_reference_value_(reference),
2794 instance_type_(kUnknownInstanceType) {
2799 void HConstant::Initialize(Representation r) {
2801 if (has_smi_value_ && SmiValuesAre31Bits()) {
2803 }
else if (has_int32_value_) {
2805 }
else if (has_double_value_) {
2807 }
else if (has_external_reference_value_) {
2810 Handle<Object>
object = object_.handle();
2811 if (object->IsJSObject()) {
2814 if (js_object->map()->is_deprecated()) {
2821 set_representation(r);
2826 bool HConstant::ImmortalImmovable()
const {
2827 if (has_int32_value_) {
2830 if (has_double_value_) {
2831 if (IsSpecialDouble()) {
2836 if (has_external_reference_value_) {
2840 ASSERT(!object_.handle().is_null());
2841 Heap* heap = isolate()->heap();
2842 ASSERT(!object_.IsKnownGlobal(heap->minus_zero_value()));
2843 ASSERT(!object_.IsKnownGlobal(heap->nan_value()));
2845 #define IMMORTAL_IMMOVABLE_ROOT(name) \
2846 object_.IsKnownGlobal(heap->name()) ||
2848 #undef IMMORTAL_IMMOVABLE_ROOT
2849 #define INTERNALIZED_STRING(name, value) \
2850 object_.IsKnownGlobal(heap->name()) ||
2852 #undef INTERNALIZED_STRING
2853 #define STRING_TYPE(NAME, size, name, Name) \
2854 object_.IsKnownGlobal(heap->name##_map()) ||
2861 bool HConstant::EmitAtUses() {
2863 if (block()->graph()->has_osr() &&
2864 block()->graph()->IsStandardConstant(
this)) {
2868 if (UseCount() == 0)
return true;
2869 if (IsCell())
return false;
2870 if (representation().IsDouble())
return false;
2875 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone)
const {
2876 if (r.IsSmi() && !has_smi_value_)
return NULL;
2877 if (r.IsInteger32() && !has_int32_value_)
return NULL;
2878 if (r.IsDouble() && !has_double_value_)
return NULL;
2879 if (r.IsExternal() && !has_external_reference_value_)
return NULL;
2880 if (has_int32_value_) {
2881 return new(zone) HConstant(int32_value_, r, is_not_in_new_space_, object_);
2883 if (has_double_value_) {
2884 return new(zone) HConstant(double_value_, r, is_not_in_new_space_, object_);
2886 if (has_external_reference_value_) {
2887 return new(zone) HConstant(external_reference_value_);
2889 ASSERT(!object_.handle().is_null());
2890 return new(zone) HConstant(object_,
2893 is_not_in_new_space_,
2900 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) {
2901 HConstant* res =
NULL;
2902 if (has_int32_value_) {
2903 res =
new(zone) HConstant(int32_value_,
2905 is_not_in_new_space_,
2907 }
else if (has_double_value_) {
2910 is_not_in_new_space_,
2913 return Maybe<HConstant*>(res !=
NULL, res);
2917 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Zone* zone) {
2918 HConstant* res =
NULL;
2919 Handle<Object> handle = this->
handle(zone->isolate());
2920 if (handle->IsBoolean()) {
2921 res = handle->BooleanValue() ?
2922 new(zone) HConstant(1) :
new(zone) HConstant(0);
2923 }
else if (handle->IsUndefined()) {
2925 }
else if (handle->IsNull()) {
2926 res =
new(zone) HConstant(0);
2928 return Maybe<HConstant*>(res !=
NULL, res);
2932 void HConstant::PrintDataTo(StringStream* stream) {
2933 if (has_int32_value_) {
2934 stream->Add(
"%d ", int32_value_);
2935 }
else if (has_double_value_) {
2936 stream->Add(
"%f ", FmtElm(double_value_));
2937 }
else if (has_external_reference_value_) {
2938 stream->Add(
"%p ", reinterpret_cast<void*>(
2939 external_reference_value_.address()));
2941 handle(Isolate::Current())->ShortPrint(stream);
2943 if (!is_not_in_new_space_) {
2944 stream->Add(
"[new space] ");
2968 if (observed_output_representation_.
IsNone()) {
2982 for (
int i = 1; i <= 2; ++i) {
2996 bool HBinaryOperation::IgnoreObservedOutputRepresentation(
3001 (!this->IsMul() || HMul::cast(
this)->MulMinusOne());
3011 !IgnoreObservedOutputRepresentation(rep)) {
3012 return observed_output_representation_;
3026 ASSERT(CheckFlag(kFlexibleRepresentation));
3028 UpdateRepresentation(new_rep, h_infer,
"inputs");
3033 Range* HBitwise::InferRange(Zone* zone) {
3034 if (op() == Token::BIT_XOR) {
3035 if (left()->HasRange() && right()->HasRange()) {
3042 int64_t left_upper = left()->range()->upper();
3043 int64_t left_lower = left()->range()->lower();
3044 int64_t right_upper = right()->range()->upper();
3045 int64_t right_lower = right()->range()->lower();
3047 if (left_upper < 0) left_upper = ~left_upper;
3048 if (left_lower < 0) left_lower = ~left_lower;
3049 if (right_upper < 0) right_upper = ~right_upper;
3050 if (right_lower < 0) right_lower = ~right_lower;
3053 static_cast<uint32_t>(
3054 left_upper | left_lower | right_upper | right_lower));
3058 int32_t min = (left()->range()->CanBeNegative() ||
3059 right()->range()->CanBeNegative())
3060 ? static_cast<int32_t>(-limit) : 0;
3061 return new(zone) Range(min, static_cast<int32_t>(limit - 1));
3064 result->set_can_be_minus_zero(
false);
3069 ? left()->range()->Mask()
3072 ? right()->range()->Mask()
3074 int32_t result_mask = (op() == Token::BIT_AND)
3075 ? left_mask & right_mask
3076 : left_mask | right_mask;
3077 if (result_mask >= 0)
return new(zone) Range(0, result_mask);
3080 result->set_can_be_minus_zero(
false);
3085 Range* HSar::InferRange(Zone* zone) {
3086 if (right()->IsConstant()) {
3087 HConstant* c = HConstant::cast(right());
3088 if (c->HasInteger32Value()) {
3089 Range* result = (left()->range() !=
NULL)
3090 ? left()->range()->Copy(zone)
3091 :
new(zone) Range();
3092 result->Sar(c->Integer32Value());
3100 Range* HShr::InferRange(Zone* zone) {
3101 if (right()->IsConstant()) {
3102 HConstant* c = HConstant::cast(right());
3103 if (c->HasInteger32Value()) {
3104 int shift_count = c->Integer32Value() & 0x1f;
3105 if (left()->range()->CanBeNegative()) {
3107 return (shift_count >= 1)
3108 ?
new(zone) Range(0,
3109 static_cast<uint32_t>(0xffffffff) >> shift_count)
3110 :
new(zone) Range();
3113 Range* result = (left()->range() !=
NULL)
3114 ? left()->range()->Copy(zone)
3115 :
new(zone) Range();
3116 result->Sar(c->Integer32Value());
3125 Range* HShl::InferRange(Zone* zone) {
3126 if (right()->IsConstant()) {
3127 HConstant* c = HConstant::cast(right());
3128 if (c->HasInteger32Value()) {
3129 Range* result = (left()->range() !=
NULL)
3130 ? left()->range()->Copy(zone)
3131 :
new(zone) Range();
3132 result->Shl(c->Integer32Value());
3140 Range* HLoadNamedField::InferRange(Zone* zone) {
3141 if (access().representation().IsInteger8()) {
3144 if (access().representation().IsUInteger8()) {
3147 if (access().representation().IsInteger16()) {
3150 if (access().representation().IsUInteger16()) {
3153 if (access().IsStringLength()) {
3160 Range* HLoadKeyed::InferRange(Zone* zone) {
3161 switch (elements_kind()) {
3177 void HCompareGeneric::PrintDataTo(StringStream* stream) {
3214 if (FLAG_fold_constants &&
left()->IsConstant() &&
right()->IsConstant()) {
3215 *block = HConstant::cast(
left())->DataEquals(HConstant::cast(
right()))
3225 if (constant->HasNumberValue())
return false;
3226 if (constant->GetUnique().IsKnownGlobal(isolate->
heap()->null_value())) {
3229 if (constant->IsUndetectable())
return false;
3236 bool HIsObjectAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3237 if (FLAG_fold_constants && value()->IsConstant()) {
3239 ? FirstSuccessor() : SecondSuccessor();
3247 bool HIsStringAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3248 if (FLAG_fold_constants && value()->IsConstant()) {
3249 *block = HConstant::cast(value())->HasStringValue()
3250 ? FirstSuccessor() : SecondSuccessor();
3258 bool HIsUndetectableAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3259 if (FLAG_fold_constants && value()->IsConstant()) {
3260 *block = HConstant::cast(value())->IsUndetectable()
3261 ? FirstSuccessor() : SecondSuccessor();
3269 bool HHasInstanceTypeAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3270 if (FLAG_fold_constants && value()->IsConstant()) {
3271 InstanceType type = HConstant::cast(value())->GetInstanceType();
3272 *block = (from_ <= type) && (type <= to_)
3273 ? FirstSuccessor() : SecondSuccessor();
3281 void HCompareHoleAndBranch::InferRepresentation(
3282 HInferRepresentationPhase* h_infer) {
3283 ChangeRepresentation(value()->representation());
3287 bool HCompareMinusZeroAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3288 if (FLAG_fold_constants && value()->IsConstant()) {
3289 HConstant* constant = HConstant::cast(value());
3290 if (constant->HasDoubleValue()) {
3291 *block = IsMinusZero(constant->DoubleValue())
3292 ? FirstSuccessor() : SecondSuccessor();
3296 if (value()->representation().IsSmiOrInteger32()) {
3298 *block = SecondSuccessor();
3306 void HCompareMinusZeroAndBranch::InferRepresentation(
3307 HInferRepresentationPhase* h_infer) {
3308 ChangeRepresentation(value()->representation());
3313 void HGoto::PrintDataTo(StringStream* stream) {
3314 stream->Add(
"B%d", SuccessorAt(0)->block_id());
3358 stream->
Add(
"%u", index());
3362 void HLoadNamedField::PrintDataTo(StringStream* stream) {
3363 object()->PrintNameTo(stream);
3364 access_.PrintTo(stream);
3366 if (HasDependency()) {
3368 dependency()->PrintNameTo(stream);
3373 HCheckMaps* HCheckMaps::New(Zone* zone,
3377 CompilationInfo*
info,
3378 HValue* typecheck) {
3379 HCheckMaps* check_map =
new(zone) HCheckMaps(value, zone, typecheck);
3380 check_map->Add(map, zone);
3381 if (map->CanOmitMapChecks() &&
3382 value->IsConstant() &&
3383 HConstant::cast(value)->HasMap(map)) {
3385 check_map->omit_ =
true;
3386 if (map->CanTransition()) {
3387 map->AddDependentCompilationInfo(
3395 void HLoadNamedGeneric::PrintDataTo(StringStream* stream) {
3396 object()->PrintNameTo(stream);
3402 void HLoadKeyed::PrintDataTo(StringStream* stream) {
3403 if (!is_external()) {
3404 elements()->PrintNameTo(stream);
3408 elements()->PrintNameTo(stream);
3414 key()->PrintNameTo(stream);
3415 if (IsDehoisted()) {
3416 stream->Add(
" + %d]", index_offset());
3421 if (HasDependency()) {
3423 dependency()->PrintNameTo(stream);
3426 if (RequiresHoleCheck()) {
3427 stream->Add(
" check_hole");
3432 bool HLoadKeyed::UsesMustHandleHole()
const {
3443 return AllUsesCanTreatHoleAsNaN();
3453 if (!representation().IsTagged()) {
3457 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3458 HValue* use = it.value();
3459 if (!use->IsChange())
return false;
3466 bool HLoadKeyed::AllUsesCanTreatHoleAsNaN()
const {
3472 bool HLoadKeyed::RequiresHoleCheck()
const {
3481 return !UsesMustHandleHole();
3485 void HLoadKeyedGeneric::PrintDataTo(StringStream* stream) {
3486 object()->PrintNameTo(stream);
3488 key()->PrintNameTo(stream);
3497 if (key()->IsLoadKeyed()) {
3498 HLoadKeyed* key_load = HLoadKeyed::cast(key());
3499 if (key_load->elements()->IsForInCacheArray()) {
3500 HForInCacheArray* names_cache =
3501 HForInCacheArray::cast(key_load->elements());
3503 if (names_cache->enumerable() == object()) {
3504 HForInCacheArray* index_cache =
3505 names_cache->index_cache();
3506 HCheckMapValue* map_check =
3507 HCheckMapValue::New(block()->graph()->zone(),
3508 block()->graph()->GetInvalidContext(),
3510 names_cache->map());
3511 HInstruction* index = HLoadKeyed::New(
3512 block()->graph()->zone(),
3513 block()->graph()->GetInvalidContext(),
3517 key_load->elements_kind());
3518 map_check->InsertBefore(
this);
3519 index->InsertBefore(
this);
3520 return Prepend(
new(block()->zone()) HLoadFieldByIndex(
3530 void HStoreNamedGeneric::PrintDataTo(StringStream* stream) {
3531 object()->PrintNameTo(stream);
3536 value()->PrintNameTo(stream);
3540 void HStoreNamedField::PrintDataTo(StringStream* stream) {
3541 object()->PrintNameTo(stream);
3542 access_.PrintTo(stream);
3544 value()->PrintNameTo(stream);
3545 if (NeedsWriteBarrier()) {
3546 stream->Add(
" (write-barrier)");
3548 if (has_transition()) {
3549 stream->Add(
" (transition map %p)", *transition_map());
3554 void HStoreKeyed::PrintDataTo(StringStream* stream) {
3555 if (!is_external()) {
3556 elements()->PrintNameTo(stream);
3558 elements()->PrintNameTo(stream);
3566 key()->PrintNameTo(stream);
3567 if (IsDehoisted()) {
3568 stream->Add(
" + %d] = ", index_offset());
3570 stream->Add(
"] = ");
3573 value()->PrintNameTo(stream);
3577 void HStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
3578 object()->PrintNameTo(stream);
3580 key()->PrintNameTo(stream);
3581 stream->Add(
"] = ");
3582 value()->PrintNameTo(stream);
3586 void HTransitionElementsKind::PrintDataTo(StringStream* stream) {
3587 object()->PrintNameTo(stream);
3588 ElementsKind from_kind = original_map().handle()->elements_kind();
3589 ElementsKind to_kind = transitioned_map().handle()->elements_kind();
3590 stream->Add(
" %p [%s] -> %p [%s]",
3591 *original_map().
handle(),
3593 *transitioned_map().
handle(),
3599 void HLoadGlobalCell::PrintDataTo(StringStream* stream) {
3600 stream->Add(
"[%p]", *cell().
handle());
3601 if (!details_.IsDontDelete()) stream->Add(
" (deleteable)");
3602 if (details_.IsReadOnly()) stream->Add(
" (read-only)");
3606 bool HLoadGlobalCell::RequiresHoleCheck()
const {
3607 if (details_.IsDontDelete() && !details_.IsReadOnly())
return false;
3608 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3609 HValue* use = it.value();
3610 if (!use->IsChange())
return true;
3616 void HLoadGlobalGeneric::PrintDataTo(StringStream* stream) {
3617 stream->Add(
"%o ", *
name());
3621 void HInnerAllocatedObject::PrintDataTo(StringStream* stream) {
3622 base_object()->PrintNameTo(stream);
3623 stream->Add(
" offset ");
3624 offset()->PrintTo(stream);
3628 void HStoreGlobalCell::PrintDataTo(StringStream* stream) {
3629 stream->Add(
"[%p] = ", *cell().
handle());
3630 value()->PrintNameTo(stream);
3631 if (!details_.IsDontDelete()) stream->Add(
" (deleteable)");
3632 if (details_.IsReadOnly()) stream->Add(
" (read-only)");
3636 void HLoadContextSlot::PrintDataTo(StringStream* stream) {
3637 value()->PrintNameTo(stream);
3638 stream->Add(
"[%d]", slot_index());
3642 void HStoreContextSlot::PrintDataTo(StringStream* stream) {
3643 context()->PrintNameTo(stream);
3644 stream->Add(
"[%d] = ", slot_index());
3645 value()->PrintNameTo(stream);
3657 HType HPhi::CalculateInferredType() {
3658 if (OperandCount() == 0)
return HType::Tagged();
3659 HType result = OperandAt(0)->type();
3660 for (
int i = 1; i < OperandCount(); ++i) {
3661 HType current = OperandAt(i)->type();
3662 result = result.Combine(current);
3668 HType HChange::CalculateInferredType() {
3669 if (from().IsDouble() && to().IsTagged())
return HType::HeapNumber();
3674 Representation HUnaryMathOperation::RepresentationFromInputs() {
3675 Representation rep = representation();
3678 Representation input_rep = value()->representation();
3679 if (!input_rep.IsTagged()) {
3680 rep = rep.generalize(input_rep);
3686 bool HAllocate::HandleSideEffectDominator(
GVNFlag side_effect,
3687 HValue* dominator) {
3688 ASSERT(side_effect == kNewSpacePromotion);
3689 Zone* zone = block()->zone();
3690 if (!FLAG_use_allocation_folding)
return false;
3693 if (!dominator->IsAllocate()) {
3694 if (FLAG_trace_allocation_folding) {
3695 PrintF(
"#%d (%s) cannot fold into #%d (%s)\n",
3696 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3702 if (FLAG_use_local_allocation_folding && dominator->block() != block()) {
3703 if (FLAG_trace_allocation_folding) {
3704 PrintF(
"#%d (%s) cannot fold into #%d (%s), crosses basic blocks\n",
3705 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3710 HAllocate* dominator_allocate = HAllocate::cast(dominator);
3711 HValue* dominator_size = dominator_allocate->size();
3712 HValue* current_size =
size();
3715 if (!current_size->IsInteger32Constant() ||
3716 !dominator_size->IsInteger32Constant()) {
3717 if (FLAG_trace_allocation_folding) {
3718 PrintF(
"#%d (%s) cannot fold into #%d (%s), dynamic allocation size\n",
3719 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3724 dominator_allocate = GetFoldableDominator(dominator_allocate);
3725 if (dominator_allocate ==
NULL) {
3729 ASSERT((IsNewSpaceAllocation() &&
3730 dominator_allocate->IsNewSpaceAllocation()) ||
3731 (IsOldDataSpaceAllocation() &&
3732 dominator_allocate->IsOldDataSpaceAllocation()) ||
3733 (IsOldPointerSpaceAllocation() &&
3734 dominator_allocate->IsOldPointerSpaceAllocation()));
3737 dominator_size = dominator_allocate->size();
3738 int32_t original_object_size =
3739 HConstant::cast(dominator_size)->GetInteger32Constant();
3740 int32_t dominator_size_constant = original_object_size;
3741 int32_t current_size_constant =
3742 HConstant::cast(current_size)->GetInteger32Constant();
3743 int32_t new_dominator_size = dominator_size_constant + current_size_constant;
3745 if (MustAllocateDoubleAligned()) {
3746 if (!dominator_allocate->MustAllocateDoubleAligned()) {
3747 dominator_allocate->MakeDoubleAligned();
3758 if (FLAG_trace_allocation_folding) {
3759 PrintF(
"#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
3760 id(), Mnemonic(), dominator_allocate->id(),
3761 dominator_allocate->Mnemonic(), new_dominator_size);
3766 HInstruction* new_dominator_size_constant = HConstant::CreateAndInsertBefore(
3771 dominator_allocate);
3772 dominator_allocate->UpdateSize(new_dominator_size_constant);
3775 if (FLAG_verify_heap && dominator_allocate->IsNewSpaceAllocation()) {
3776 dominator_allocate->MakePrefillWithFiller();
3780 dominator_allocate->ClearNextMapWord(original_object_size);
3785 dominator_allocate->ClearNextMapWord(original_object_size);
3788 dominator_allocate->UpdateClearNextMapWord(MustClearNextMapWord());
3791 HInstruction* inner_offset = HConstant::CreateAndInsertBefore(
3794 dominator_size_constant,
3798 HInstruction* dominated_allocate_instr =
3799 HInnerAllocatedObject::New(zone,
3804 dominated_allocate_instr->InsertBefore(
this);
3805 DeleteAndReplaceWith(dominated_allocate_instr);
3806 if (FLAG_trace_allocation_folding) {
3807 PrintF(
"#%d (%s) folded into #%d (%s)\n",
3808 id(), Mnemonic(), dominator_allocate->id(),
3809 dominator_allocate->Mnemonic());
3815 HAllocate* HAllocate::GetFoldableDominator(HAllocate* dominator) {
3816 if (!IsFoldable(dominator)) {
3818 if (IsNewSpaceAllocation() || dominator->IsNewSpaceAllocation()) {
3819 if (FLAG_trace_allocation_folding) {
3820 PrintF(
"#%d (%s) cannot fold into #%d (%s), new space hoisting\n",
3821 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3826 HAllocate* dominator_dominator = dominator->dominating_allocate_;
3831 if (dominator_dominator ==
NULL) {
3832 dominating_allocate_ = dominator;
3833 if (FLAG_trace_allocation_folding) {
3834 PrintF(
"#%d (%s) cannot fold into #%d (%s), different spaces\n",
3835 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3845 if (block()->block_id() != dominator_dominator->block()->block_id()) {
3846 if (FLAG_trace_allocation_folding) {
3847 PrintF(
"#%d (%s) cannot fold into #%d (%s), different basic blocks\n",
3848 id(), Mnemonic(), dominator_dominator->id(),
3849 dominator_dominator->Mnemonic());
3854 ASSERT((IsOldDataSpaceAllocation() &&
3855 dominator_dominator->IsOldDataSpaceAllocation()) ||
3856 (IsOldPointerSpaceAllocation() &&
3857 dominator_dominator->IsOldPointerSpaceAllocation()));
3859 int32_t current_size = HConstant::cast(
size())->GetInteger32Constant();
3860 HStoreNamedField* dominator_free_space_size =
3861 dominator->filler_free_space_size_;
3862 if (dominator_free_space_size !=
NULL) {
3865 dominator->UpdateFreeSpaceFiller(current_size);
3869 dominator->CreateFreeSpaceFiller(current_size);
3873 return dominator_dominator;
3879 void HAllocate::UpdateFreeSpaceFiller(
int32_t free_space_size) {
3881 Zone* zone = block()->zone();
3885 HConstant* new_free_space_size = HConstant::CreateAndInsertBefore(
3888 filler_free_space_size_->value()->GetInteger32Constant() +
3891 filler_free_space_size_);
3892 filler_free_space_size_->UpdateValue(new_free_space_size);
3896 void HAllocate::CreateFreeSpaceFiller(
int32_t free_space_size) {
3898 Zone* zone = block()->zone();
3899 HInstruction* free_space_instr =
3900 HInnerAllocatedObject::New(zone, context(), dominating_allocate_,
3901 dominating_allocate_->size(), type());
3902 free_space_instr->InsertBefore(
this);
3903 HConstant* filler_map = HConstant::New(
3906 isolate()->factory()->free_space_map());
3907 filler_map->FinalizeUniqueness();
3908 filler_map->InsertAfter(free_space_instr);
3909 HInstruction* store_map = HStoreNamedField::New(zone, context(),
3910 free_space_instr, HObjectAccess::ForMap(), filler_map);
3912 store_map->InsertAfter(filler_map);
3917 HConstant* filler_size = HConstant::CreateAndInsertAfter(
3920 HObjectAccess access =
3921 HObjectAccess::ForMapAndOffset(isolate()->factory()->free_space_map(),
3924 HStoreNamedField* store_size = HStoreNamedField::New(zone, context(),
3925 free_space_instr, access, filler_size);
3927 store_size->InsertAfter(filler_size);
3928 filler_free_space_size_ = store_size;
3932 void HAllocate::ClearNextMapWord(
int offset) {
3933 if (MustClearNextMapWord()) {
3934 Zone* zone = block()->zone();
3935 HObjectAccess access =
3936 HObjectAccess::ForObservableJSObjectOffset(offset);
3937 HStoreNamedField* clear_next_map =
3938 HStoreNamedField::New(zone, context(),
this, access,
3939 block()->graph()->GetConstant0());
3940 clear_next_map->ClearAllSideEffects();
3941 clear_next_map->InsertAfter(
this);
3946 void HAllocate::PrintDataTo(StringStream* stream) {
3947 size()->PrintNameTo(stream);
3949 if (IsNewSpaceAllocation()) stream->Add(
"N");
3950 if (IsOldPointerSpaceAllocation()) stream->Add(
"P");
3951 if (IsOldDataSpaceAllocation()) stream->Add(
"D");
3952 if (MustAllocateDoubleAligned()) stream->Add(
"A");
3953 if (MustPrefillWithFiller()) stream->Add(
"F");
3958 bool HStoreKeyed::NeedsCanonicalization() {
3966 if (value()->IsConstant()) {
3970 if (value()->IsLoadKeyed()) {
3972 HLoadKeyed::cast(value())->elements_kind());
3975 if (value()->IsChange()) {
3976 if (HChange::cast(value())->from().IsSmiOrInteger32()) {
3979 if (HChange::cast(value())->value()->type().IsSmi()) {
3987 #define H_CONSTANT_INT(val) \
3988 HConstant::New(zone, context, static_cast<int32_t>(val))
3989 #define H_CONSTANT_DOUBLE(val) \
3990 HConstant::New(zone, context, static_cast<double>(val))
3992 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
3993 HInstruction* HInstr::New( \
3994 Zone* zone, HValue* context, HValue* left, HValue* right) { \
3995 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
3996 HConstant* c_left = HConstant::cast(left); \
3997 HConstant* c_right = HConstant::cast(right); \
3998 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
3999 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
4000 if (IsInt32Double(double_res)) { \
4001 return H_CONSTANT_INT(double_res); \
4003 return H_CONSTANT_DOUBLE(double_res); \
4006 return new(zone) HInstr(context, left, right); \
4014 #undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR
4017 HInstruction* HStringAdd::New(Zone* zone,
4023 Handle<AllocationSite> allocation_site) {
4024 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4025 HConstant* c_right = HConstant::cast(right);
4026 HConstant* c_left = HConstant::cast(left);
4027 if (c_left->HasStringValue() && c_right->HasStringValue()) {
4028 Handle<String> left_string = c_left->StringValue();
4029 Handle<String> right_string = c_right->StringValue();
4032 Handle<String> concat = zone->isolate()->factory()->NewFlatConcatString(
4033 c_left->StringValue(), c_right->StringValue());
4034 ASSERT(!concat.is_null());
4035 return HConstant::New(zone, context, concat);
4039 return new(zone) HStringAdd(
4040 context, left, right, pretenure_flag, flags, allocation_site);
4044 void HStringAdd::PrintDataTo(StringStream* stream) {
4046 stream->Add(
"_CheckBoth");
4048 stream->Add(
"_CheckLeft");
4050 stream->Add(
"_CheckRight");
4054 if (pretenure_flag() ==
NOT_TENURED) stream->Add(
"N");
4055 else if (pretenure_flag() ==
TENURED) stream->Add(
"D");
4060 HInstruction* HStringCharFromCode::New(
4061 Zone* zone, HValue* context, HValue* char_code) {
4062 if (FLAG_fold_constants && char_code->IsConstant()) {
4063 HConstant* c_code = HConstant::cast(char_code);
4064 Isolate* isolate = zone->isolate();
4065 if (c_code->HasNumberValue()) {
4067 uint32_t
code = c_code->NumberValueAsInteger32() & 0xffff;
4068 return HConstant::New(zone, context,
4071 return HConstant::New(zone, context, isolate->factory()->empty_string());
4074 return new(zone) HStringCharFromCode(context, char_code);
4078 HInstruction* HUnaryMathOperation::New(
4081 if (!FLAG_fold_constants)
break;
4082 if (!value->IsConstant())
break;
4083 HConstant* constant = HConstant::cast(value);
4084 if (!constant->HasNumberValue())
break;
4085 double d = constant->DoubleValue();
4139 return new(zone) HUnaryMathOperation(context, value, op);
4143 HInstruction* HPower::New(Zone* zone,
4147 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4148 HConstant* c_left = HConstant::cast(left);
4149 HConstant* c_right = HConstant::cast(right);
4150 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4152 c_right->DoubleValue());
4156 return new(zone) HPower(left, right);
4160 HInstruction* HMathMinMax::New(
4161 Zone* zone, HValue* context, HValue* left, HValue* right, Operation op) {
4162 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4163 HConstant* c_left = HConstant::cast(left);
4164 HConstant* c_right = HConstant::cast(right);
4165 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4166 double d_left = c_left->DoubleValue();
4167 double d_right = c_right->DoubleValue();
4168 if (op == kMathMin) {
4171 if (d_left == d_right) {
4179 if (d_left == d_right) {
4189 return new(zone) HMathMinMax(context, left, right, op);
4193 HInstruction* HMod::New(Zone* zone,
4197 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4198 HConstant* c_left = HConstant::cast(left);
4199 HConstant* c_right = HConstant::cast(right);
4200 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) {
4201 int32_t dividend = c_left->Integer32Value();
4202 int32_t divisor = c_right->Integer32Value();
4203 if (dividend ==
kMinInt && divisor == -1) {
4207 int32_t res = dividend % divisor;
4208 if ((res == 0) && (dividend < 0)) {
4215 return new(zone) HMod(context, left, right);
4219 HInstruction* HDiv::New(
4220 Zone* zone, HValue* context, HValue* left, HValue* right) {
4222 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4223 HConstant* c_left = HConstant::cast(left);
4224 HConstant* c_right = HConstant::cast(right);
4225 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4226 if (c_right->DoubleValue() != 0) {
4227 double double_res = c_left->DoubleValue() / c_right->DoubleValue();
4228 if (IsInt32Double(double_res)) {
4233 int sign = Double(c_left->DoubleValue()).Sign() *
4234 Double(c_right->DoubleValue()).Sign();
4239 return new(zone) HDiv(context, left, right);
4243 HInstruction* HBitwise::New(
4244 Zone* zone, HValue* context,
Token::Value op, HValue* left, HValue* right) {
4245 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4246 HConstant* c_left = HConstant::cast(left);
4247 HConstant* c_right = HConstant::cast(right);
4248 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4250 int32_t v_left = c_left->NumberValueAsInteger32();
4251 int32_t v_right = c_right->NumberValueAsInteger32();
4253 case Token::BIT_XOR:
4254 result = v_left ^ v_right;
4256 case Token::BIT_AND:
4257 result = v_left & v_right;
4260 result = v_left | v_right;
4269 return new(zone) HBitwise(context, op, left, right);
4273 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \
4274 HInstruction* HInstr::New( \
4275 Zone* zone, HValue* context, HValue* left, HValue* right) { \
4276 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4277 HConstant* c_left = HConstant::cast(left); \
4278 HConstant* c_right = HConstant::cast(right); \
4279 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4280 return H_CONSTANT_INT(result); \
4283 return new(zone) HInstr(context, left, right); \
4288 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f))
4292 #undef DEFINE_NEW_H_BITWISE_INSTR
4297 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4298 HConstant* c_left = HConstant::cast(left);
4299 HConstant* c_right = HConstant::cast(right);
4300 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4301 int32_t left_val = c_left->NumberValueAsInteger32();
4302 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f;
4303 if ((right_val == 0) && (left_val < 0)) {
4306 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val);
4309 return new(zone) HShr(context, left, right);
4313 HInstruction* HSeqStringGetChar::New(Zone* zone,
4318 if (FLAG_fold_constants && string->IsConstant() && index->IsConstant()) {
4319 HConstant* c_string = HConstant::cast(
string);
4320 HConstant* c_index = HConstant::cast(index);
4321 if (c_string->HasStringValue() && c_index->HasInteger32Value()) {
4323 int32_t i = c_index->Integer32Value();
4329 return new(zone) HSeqStringGetChar(encoding,
string, index);
4333 #undef H_CONSTANT_INT
4334 #undef H_CONSTANT_DOUBLE
4337 void HBitwise::PrintDataTo(StringStream* stream) {
4344 void HPhi::SimplifyConstantInputs() {
4347 if (!CheckUsesForFlag(kTruncatingToInt32))
return;
4348 for (
int i = 0; i < OperandCount(); ++i) {
4349 if (!OperandAt(i)->IsConstant())
return;
4351 HGraph* graph = block()->graph();
4352 for (
int i = 0; i < OperandCount(); ++i) {
4353 HConstant* operand = HConstant::cast(OperandAt(i));
4354 if (operand->HasInteger32Value()) {
4356 }
else if (operand->HasDoubleValue()) {
4357 HConstant* integer_input =
4358 HConstant::New(graph->zone(), graph->GetInvalidContext(),
4360 integer_input->InsertAfter(operand);
4361 SetOperandAt(i, integer_input);
4362 }
else if (operand->HasBooleanValue()) {
4363 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1()
4364 : graph->GetConstant0());
4365 }
else if (operand->ImmortalImmovable()) {
4366 SetOperandAt(i, graph->GetConstant0());
4370 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4371 HValue* use = it.value();
4372 if (use->IsBinaryOperation()) {
4380 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) {
4381 ASSERT(CheckFlag(kFlexibleRepresentation));
4382 Representation new_rep = RepresentationFromInputs();
4383 UpdateRepresentation(new_rep, h_infer,
"inputs");
4384 new_rep = RepresentationFromUses();
4385 UpdateRepresentation(new_rep, h_infer,
"uses");
4386 new_rep = RepresentationFromUseRequirements();
4387 UpdateRepresentation(new_rep, h_infer,
"use requirements");
4391 Representation HPhi::RepresentationFromInputs() {
4393 for (
int i = 0; i < OperandCount(); ++i) {
4394 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation());
4404 for (HUseIterator it(
uses()); !it.Done(); it.Advance()) {
4406 if (it.value()->block()->IsUnreachable())
continue;
4410 it.value()->RequiredInputRepresentation(it.index());
4427 for (HUseIterator it(
uses()); !it.Done(); it.Advance()) {
4430 it.value()->RequiredInputRepresentation(it.index());
4444 void HPhi::Verify() {
4445 ASSERT(OperandCount() == block()->predecessors()->length());
4446 for (
int i = 0; i < OperandCount(); ++i) {
4447 HValue* value = OperandAt(i);
4448 HBasicBlock* defining_block = value->
block();
4449 HBasicBlock* predecessor_block = block()->predecessors()->at(i);
4450 ASSERT(defining_block == predecessor_block ||
4451 defining_block->Dominates(predecessor_block));
4456 void HSimulate::Verify() {
4457 HInstruction::Verify();
4462 void HCheckHeapObject::Verify() {
4463 HInstruction::Verify();
4468 void HCheckValue::Verify() {
4469 HInstruction::Verify();
4476 HObjectAccess HObjectAccess::ForFixedArrayHeader(
int offset) {
4480 return HObjectAccess(kInobject, offset);
4484 HObjectAccess HObjectAccess::ForMapAndOffset(Handle<Map> map,
int offset,
4485 Representation representation) {
4487 Portion portion = kInobject;
4490 portion = kElementsPointer;
4494 bool existing_inobject_property =
true;
4495 if (!map.is_null()) {
4496 existing_inobject_property = (offset <
4497 map->instance_size() - map->unused_property_fields() *
kPointerSize);
4500 false, existing_inobject_property);
4504 HObjectAccess HObjectAccess::ForAllocationSiteOffset(
int offset) {
4521 return HObjectAccess(kInobject, offset);
4525 HObjectAccess HObjectAccess::ForContextSlot(
int index) {
4527 Portion portion = kInobject;
4534 HObjectAccess HObjectAccess::ForJSArrayOffset(
int offset) {
4536 Portion portion = kInobject;
4539 portion = kElementsPointer;
4541 portion = kArrayLengths;
4545 return HObjectAccess(portion, offset);
4549 HObjectAccess HObjectAccess::ForBackingStoreOffset(
int offset,
4550 Representation representation) {
4552 return HObjectAccess(kBackingStore, offset, representation,
4557 HObjectAccess HObjectAccess::ForField(Handle<Map> map,
4558 LookupResult* lookup,
4559 Handle<String> name) {
4560 ASSERT(lookup->IsField() || lookup->IsTransitionToField());
4562 Representation representation;
4563 if (lookup->IsField()) {
4564 index = lookup->GetLocalFieldIndexFromMap(*map);
4565 representation = lookup->representation();
4567 Map* transition = lookup->GetTransitionTarget();
4568 int descriptor = transition->LastAdded();
4569 index = transition->instance_descriptors()->GetFieldIndex(descriptor) -
4570 map->inobject_properties();
4571 PropertyDetails details =
4572 transition->instance_descriptors()->GetDetails(descriptor);
4573 representation = details.representation();
4578 int offset = (index *
kPointerSize) + map->instance_size();
4579 return HObjectAccess(kInobject, offset, representation, name,
false,
true);
4583 return HObjectAccess(kBackingStore, offset, representation, name,
4589 HObjectAccess HObjectAccess::ForCellPayload(Isolate* isolate) {
4590 return HObjectAccess(
4592 Handle<String>(isolate->heap()->cell_value_string()));
4598 if (access_type ==
STORE) {
4600 instr->SetDependsOnFlag(::v8::internal::kNewSpacePromotion);
4605 instr->SetDependsOnFlag(::v8::internal::kMaps);
4608 switch (portion()) {
4610 if (access_type ==
STORE) {
4611 instr->SetChangesFlag(::v8::internal::kArrayLengths);
4613 instr->SetDependsOnFlag(::v8::internal::kArrayLengths);
4616 case kStringLengths:
4617 if (access_type ==
STORE) {
4618 instr->SetChangesFlag(::v8::internal::kStringLengths);
4620 instr->SetDependsOnFlag(::v8::internal::kStringLengths);
4624 if (access_type ==
STORE) {
4625 instr->SetChangesFlag(::v8::internal::kInobjectFields);
4627 instr->SetDependsOnFlag(::v8::internal::kInobjectFields);
4631 if (access_type ==
STORE) {
4632 instr->SetChangesFlag(::v8::internal::kDoubleFields);
4634 instr->SetDependsOnFlag(::v8::internal::kDoubleFields);
4638 if (access_type ==
STORE) {
4639 instr->SetChangesFlag(::v8::internal::kBackingStoreFields);
4641 instr->SetDependsOnFlag(::v8::internal::kBackingStoreFields);
4644 case kElementsPointer:
4645 if (access_type ==
STORE) {
4646 instr->SetChangesFlag(::v8::internal::kElementsPointer);
4648 instr->SetDependsOnFlag(::v8::internal::kElementsPointer);
4652 if (access_type ==
STORE) {
4653 instr->SetChangesFlag(::v8::internal::kMaps);
4655 instr->SetDependsOnFlag(::v8::internal::kMaps);
4658 case kExternalMemory:
4659 if (access_type ==
STORE) {
4660 instr->SetChangesFlag(::v8::internal::kExternalMemory);
4662 instr->SetDependsOnFlag(::v8::internal::kExternalMemory);
4669 void HObjectAccess::PrintTo(StringStream* stream)
const {
4672 switch (portion()) {
4674 case kStringLengths:
4675 stream->Add(
"%length");
4677 case kElementsPointer:
4678 stream->Add(
"%elements");
4681 stream->Add(
"%map");
4685 if (!name_.is_null()) {
4688 stream->Add(
"[in-object]");
4691 if (!name_.is_null()) {
4694 stream->Add(
"[backing-store]");
4696 case kExternalMemory:
4697 stream->Add(
"[external-memory]");
4701 stream->Add(
"@%d", offset());
bool is_more_general_than(const Representation &other) const
bool HasObservableSideEffects() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
bool HasMonomorphicJSObjectType()
bool UpdateInferredType()
#define H_CONSTANT_INT(val)
#define IMMORTAL_IMMOVABLE_ROOT_LIST(V)
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
virtual Representation observed_input_representation(int index) V8_OVERRIDE
const intptr_t kDoubleAlignmentMask
bool IsExternalArrayElementsKind(ElementsKind kind)
const char * ToCString(const v8::String::Utf8Value &value)
static const int kValueOffset
static int SlotOffset(int index)
c_left NumberValueAsInteger32()<< (c_right-> NumberValueAsInteger32()&0x1f)) HInstruction *HShr::New(Zone *zone, HValue *context, HValue *left, HValue *right)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths true
static Representation Smi()
void PrintF(const char *format,...)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
#define IMMORTAL_IMMOVABLE_ROOT(name)
virtual bool DataEquals(HValue *other)
static String * cast(Object *obj)
HUseListNode * RemoveUse(HValue *value, int index)
HInstruction * previous() const
static const char * Name(Value tok)
Token::Value token() const
static const int kNoKnownSuccessorIndex
HBasicBlock * block() const
static Handle< T > cast(Handle< S > that)
int CountLeadingZeros(uint64_t value, int width)
double power_helper(double x, double y)
static Representation Integer32()
virtual bool KnownSuccessorBlock(HBasicBlock **block) V8_OVERRIDE
virtual void AssumeRepresentation(Representation r) V8_OVERRIDE
double fast_sqrt(double input)
bool ConstantIsObject(HConstant *constant, Isolate *isolate)
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
bool IsExternalFloatOrDoubleElementsKind(ElementsKind kind)
kSerializedDataOffset Object
Representation representation_
virtual void AssumeRepresentation(Representation r)
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
HBasicBlock * FirstSuccessor()
bool EqualsInteger32Constant(int32_t value)
const uint32_t kIsNotInternalizedMask
static bool IsArithmeticCompareOp(Value op)
virtual Representation observed_input_representation(int index) V8_OVERRIDE
unibrow::Mapping< unibrow::Ecma262Canonicalize > Canonicalize
static const int kSizeOffset
#define ASSERT(condition)
virtual void InferRepresentation(HInferRepresentationPhase *h_infer)
#define INTERNALIZED_STRING(name, value)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including on console Map counters to a file Enable debugger compile events enable GDBJIT enable GDBJIT interface for all code objects dump only objects containing this substring stress the GC compactor to flush out pretty print source code print source AST function name where to insert a breakpoint print scopes for builtins trace contexts operations print stuff during garbage collection report code statistics after GC report handles after GC trace cache state transitions print interface inference details prints when objects are turned into dictionaries report heap spill statistics along with trace isolate state changes trace regexp bytecode execution Minimal Log all events to the log file Log API events to the log file Log heap samples on garbage collection for the hp2ps tool log positions Log suspect operations Used with turns on browser compatible mode for profiling v8 log
virtual void PrintDataTo(StringStream *stream)
static Representation Double()
void set_observed_input_representation(int index, Representation rep)
#define INTERNALIZED_STRING_LIST(V)
#define STRING_TYPE_LIST(V)
void AddDependantsToWorklist(HInferRepresentationPhase *h_infer)
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
Representation representation() const
virtual void InferRepresentation(HInferRepresentationPhase *h_infer) V8_OVERRIDE
DEFINE_NEW_H_BITWISE_INSTR(HSar, c_left->NumberValueAsInteger32() >>(c_right->NumberValueAsInteger32()&0x1f)) DEFINE_NEW_H_BITWISE_INSTR(HShl
bool HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const
static const int kDependentCodeOffset
virtual int argument_count() const
void Add(Vector< const char > format, Vector< FmtElm > elms)
virtual void InferRepresentation(HInferRepresentationPhase *h_infer) V8_OVERRIDE
static Handle< Object > TryMigrateInstance(Handle< JSObject > instance)
static const int kPretenureCreateCountOffset
SmartArrayPointer< char > ToCString(AllowNullsFlag allow_nulls, RobustnessFlag robustness_flag, int offset, int length, int *length_output=0)
#define HYDROGEN_CONCRETE_INSTRUCTION_LIST(V)
double fast_exp(double input)
virtual HType CalculateInferredType()
void AddNewRange(Range *r, Zone *zone)
static const int kNestedSiteOffset
static bool IsInequalityOp(Value op)
static Value ReverseCompareOp(Value op)
virtual int OperandCount()=0
static const int kPretenureDataOffset
bool Equals(HValue *other)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
int32_t GetInteger32Constant()
void PrintRangeTo(StringStream *stream)
const char * Mnemonic() const
static GVNFlagSet AllSideEffectsFlagSet()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
bool IsFastPackedElementsKind(ElementsKind kind)
void PrintChangesTo(StringStream *stream)
#define DEFINE_COMPILE(type)
bool has_position() const
static bool IsValid(intptr_t value)
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
static const int kMinValue
static const int kTransitionInfoOffset
void check(i::Vector< const uint8_t > string)
Representation RepresentationFromUses()
bool IsDefinedAfter(HBasicBlock *other) const
void ComputeInitialRange(Zone *zone)
bool Equals(const Representation &other) const
bool IsSmiOrInteger32() const
static Value NegateCompareOp(Value op)
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
#define GVN_TRACKED_FLAG_LIST(V)
double power_double_double(double x, double y)
virtual void UpdateRepresentation(Representation new_rep, HInferRepresentationPhase *h_infer, const char *reason) V8_OVERRIDE
void RemoveLastAddedRange()
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
virtual Handle< Map > GetMonomorphicJSObjectMap()
static const int kMaxRegularHeapObjectSize
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
GVNFlagSet ChangesFlags() const
virtual void PrintTo(StringStream *stream) V8_OVERRIDE
HInstruction * next() const
#define ASSERT_LE(v1, v2)
virtual Representation RepresentationFromInputs() V8_OVERRIDE
void set_type(HType new_type)
Representation RepresentationFromOutput()
static const int kElementsOffset
const uint32_t kStringTag
static Representation External()
HBasicBlock * SuccessorAt(int i) V8_OVERRIDE
const uint32_t kInternalizedTag
Representation RepresentationFromUseRequirements()
void DeleteAndReplaceWith(HValue *other)
#define GVN_UNTRACKED_FLAG_LIST(V)
virtual intptr_t Hashcode()
static const int kLengthOffset
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
#define ASSERT_LT(v1, v2)
static ElementsAccessor * ForKind(ElementsKind elements_kind)
void PrintNameTo(StringStream *stream)
Representation generalize(Representation other)
void InsertAfter(HInstruction *previous)
static const int kHeaderSize
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
virtual Opcode opcode() const =0
virtual void DeleteFromGraph()=0
uint32_t DoubleToUint32(double x)
static const int kMapOffset
int32_t DoubleToInt32(double x)
const uint32_t kIsNotStringMask
virtual HSourcePosition position() const V8_OVERRIDE
static const int kLengthOffset
static double nan_value()
void ChangeRepresentation(Representation r)
const char * ElementsKindToString(ElementsKind kind)
Handle< T > handle(T *t, Isolate *isolate)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Handle< String > LookupSingleCharacterStringFromCode(Isolate *isolate, uint32_t index)
void SetOperandAt(int index, HValue *value)
HBasicBlock * SecondSuccessor()
virtual Range * InferRange(Zone *zone)
bool CheckFlag(Flag f) const
#define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op)
void set_position(HSourcePosition position)
virtual Representation RepresentationFromInputs()
void InsertBefore(HInstruction *next)
void AddToWorklist(HValue *current)
#define H_CONSTANT_DOUBLE(val)
static Handle< String > null()
bool CanReplaceWithDummyUses()
#define ASSERT_EQ(v1, v2)
void SetBlock(HBasicBlock *block)
virtual Representation observed_input_representation(int index)
virtual HValue * OperandAt(int index) const =0
bool CheckUsesForFlag(Flag f) const
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
static const int kWeakNextOffset
const char * Mnemonic() const
int known_successor_index() const
bool IsInteger32Constant()
static Representation None()
virtual void InternalSetOperandAt(int index, HValue *value)=0
static const int kMaxLength
void set_tail(HUseListNode *list)
static Representation Tagged()
void PrintTypeTo(StringStream *stream)
int MostSignificantBit(uint32_t x)
virtual void UpdateRepresentation(Representation new_rep, HInferRepresentationPhase *h_infer, const char *reason)
static const int kNoNumber
Isolate * isolate() const
void RegisterUse(int index, HValue *new_value)
static const int kMaxValue
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
static HValue * cast(HValue *value)
void ReplaceAllUsesWith(HValue *other)
Token::Value token() const
bool IsFastDoubleElementsKind(ElementsKind kind)
static bool IsOrderedRelationalCompareOp(Value op)
virtual Representation RequiredInputRepresentation(int index) V8_OVERRIDE
HUseIterator uses() const