49 #define DECL_ACCEPT(type) \
50 void type::Accept(AstVisitor* v) { v->Visit##type(this); }
60 return AsLiteral() !=
NULL && AsLiteral()->value()->IsSmi();
65 return AsLiteral() !=
NULL && AsLiteral()->value()->
IsString();
70 return AsLiteral() !=
NULL && AsLiteral()->value()->
IsNull();
75 VariableProxy* var_proxy = AsVariableProxy();
76 if (var_proxy ==
NULL)
return false;
81 var_proxy->name()->Equals(isolate->
heap()->undefined_string());
85 VariableProxy::VariableProxy(
Zone* zone,
Variable* var,
int position)
89 is_this_(var->is_this()),
97 VariableProxy::VariableProxy(Zone* zone,
102 : Expression(zone, position),
108 interface_(interface) {
110 ASSERT(name->IsInternalizedString());
114 void VariableProxy::BindTo(Variable* var) {
117 ASSERT(!FLAG_harmony_modules || interface_->IsUnified(var->interface()));
118 ASSERT((is_this() && var->is_this()) || name_.is_identical_to(var->name()));
125 var->set_is_used(
true);
129 Assignment::Assignment(Zone* zone,
134 : Expression(zone, pos),
138 binary_operation_(
NULL),
139 assignment_id_(GetNextId(zone)),
140 is_uninitialized_(
false),
146 case Token::ASSIGN_BIT_OR:
return Token::BIT_OR;
147 case Token::ASSIGN_BIT_XOR:
return Token::BIT_XOR;
148 case Token::ASSIGN_BIT_AND:
return Token::BIT_AND;
149 case Token::ASSIGN_SHL:
return Token::SHL;
150 case Token::ASSIGN_SAR:
return Token::SAR;
151 case Token::ASSIGN_SHR:
return Token::SHR;
156 case Token::ASSIGN_MOD:
return Token::MOD;
159 return Token::ILLEGAL;
163 bool FunctionLiteral::AllowsLazyCompilation() {
164 return scope()->AllowsLazyCompilation();
168 bool FunctionLiteral::AllowsLazyCompilationWithoutContext() {
169 return scope()->AllowsLazyCompilationWithoutContext();
173 int FunctionLiteral::start_position()
const {
174 return scope()->start_position();
178 int FunctionLiteral::end_position()
const {
179 return scope()->end_position();
183 StrictMode FunctionLiteral::strict_mode()
const {
184 return scope()->strict_mode();
188 void FunctionLiteral::InitializeSharedInfo(
189 Handle<Code> unoptimized_code) {
190 for (RelocIterator it(*unoptimized_code); !it.done(); it.next()) {
191 RelocInfo* rinfo = it.rinfo();
192 if (rinfo->rmode() != RelocInfo::EMBEDDED_OBJECT)
continue;
194 if (obj->IsSharedFunctionInfo()) {
196 if (shared->start_position() == start_position()) {
197 shared_info_ = Handle<SharedFunctionInfo>(shared);
205 ObjectLiteralProperty::ObjectLiteralProperty(
206 Zone* zone, Literal* key, Expression* value) {
210 Object* k = *key->value();
211 if (k->IsInternalizedString() &&
212 zone->isolate()->heap()->proto_string()->Equals(
String::cast(k))) {
214 }
else if (value_->AsMaterializedLiteral() !=
NULL) {
215 kind_ = MATERIALIZED_LITERAL;
216 }
else if (value_->AsLiteral() !=
NULL) {
224 ObjectLiteralProperty::ObjectLiteralProperty(
225 Zone* zone,
bool is_getter, FunctionLiteral* value) {
228 kind_ = is_getter ? GETTER : SETTER;
232 bool ObjectLiteral::Property::IsCompileTimeValue() {
234 (kind_ == MATERIALIZED_LITERAL &&
239 void ObjectLiteral::Property::set_emit_store(
bool emit_store) {
240 emit_store_ = emit_store;
244 bool ObjectLiteral::Property::emit_store() {
249 void ObjectLiteral::CalculateEmitStore(Zone* zone) {
250 ZoneAllocationPolicy allocator(zone);
254 for (
int i = properties()->length() - 1; i >= 0; i--) {
255 ObjectLiteral::Property*
property = properties()->at(i);
256 Literal* literal =
property->key();
257 if (literal->value()->IsNull())
continue;
258 uint32_t hash = literal->Hash();
261 if ((property->kind() == ObjectLiteral::Property::MATERIALIZED_LITERAL ||
262 property->kind() == ObjectLiteral::Property::COMPUTED) &&
263 table.Lookup(literal, hash,
false, allocator) !=
NULL) {
264 property->set_emit_store(
false);
267 table.Lookup(literal, hash,
true, allocator);
273 bool ObjectLiteral::IsBoilerplateProperty(ObjectLiteral::Property* property) {
274 return property !=
NULL &&
275 property->kind() != ObjectLiteral::Property::PROTOTYPE;
279 void ObjectLiteral::BuildConstantProperties(Isolate* isolate) {
280 if (!constant_properties_.is_null())
return;
283 Handle<FixedArray> constant_properties = isolate->factory()->NewFixedArray(
284 boilerplate_properties_ * 2,
TENURED);
288 bool is_simple =
true;
290 uint32_t max_element_index = 0;
291 uint32_t elements = 0;
292 for (
int i = 0; i < properties()->length(); i++) {
293 ObjectLiteral::Property*
property = properties()->at(i);
294 if (!IsBoilerplateProperty(property)) {
298 MaterializedLiteral* m_literal =
property->value()->AsMaterializedLiteral();
299 if (m_literal !=
NULL) {
300 m_literal->BuildConstants(isolate);
301 if (m_literal->depth() >= depth_acc) depth_acc = m_literal->depth() + 1;
307 Handle<Object> key =
property->key()->value();
308 Handle<Object> value = GetBoilerplateValue(property->value(), isolate);
315 if (FLAG_track_double_fields &&
316 (value->IsNumber() || value->IsUninitialized())) {
317 may_store_doubles_ =
true;
320 is_simple = is_simple && !value->IsUninitialized();
326 uint32_t element_index = 0;
329 && element_index > max_element_index) {
330 max_element_index = element_index;
332 }
else if (key->IsSmi()) {
335 && static_cast<uint32_t>(key_value) > max_element_index) {
336 max_element_index = key_value;
342 constant_properties->set(position++, *key);
343 constant_properties->set(position++, *value);
346 constant_properties_ = constant_properties;
348 (max_element_index <= 32) || ((2 * elements) >= max_element_index);
349 set_is_simple(is_simple);
350 set_depth(depth_acc);
354 void ArrayLiteral::BuildConstantElements(Isolate* isolate) {
355 if (!constant_elements_.is_null())
return;
358 Handle<JSArray> array =
363 bool is_simple =
true;
365 bool is_holey =
false;
366 for (
int i = 0, n = values()->length(); i < n; i++) {
367 Expression* element = values()->at(i);
368 MaterializedLiteral* m_literal = element->AsMaterializedLiteral();
369 if (m_literal !=
NULL) {
370 m_literal->BuildConstants(isolate);
371 if (m_literal->depth() + 1 > depth_acc) {
372 depth_acc = m_literal->depth() + 1;
375 Handle<Object> boilerplate_value = GetBoilerplateValue(element, isolate);
376 if (boilerplate_value->IsTheHole()) {
378 }
else if (boilerplate_value->IsUninitialized()) {
387 Handle<FixedArrayBase> element_values(array->elements());
391 if (is_simple && depth_acc == 1 && values()->length() > 0 &&
392 array->HasFastSmiOrObjectElements()) {
393 element_values->set_map(isolate->heap()->fixed_cow_array_map());
398 Handle<FixedArray>
literals = isolate->factory()->NewFixedArray(2,
TENURED);
404 literals->set(1, *element_values);
407 set_is_simple(is_simple);
408 set_depth(depth_acc);
414 if (expression->AsLiteral() !=
NULL) {
415 return expression->AsLiteral()->value();
420 return isolate->
factory()->uninitialized_value();
425 if (IsArrayLiteral()) {
426 return AsArrayLiteral()->BuildConstantElements(isolate);
428 if (IsObjectLiteral()) {
429 return AsObjectLiteral()->BuildConstantProperties(isolate);
431 ASSERT(IsRegExpLiteral());
436 void TargetCollector::AddTarget(Label* target,
Zone* zone) {
438 int length = targets_.length();
439 for (
int i = 0; i < length; i++) {
440 if (targets_[i] == target)
return;
442 targets_.Add(target, zone);
446 void UnaryOperation::RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) {
452 set_to_boolean_types(oracle->ToBooleanTypes(expression()->test_id()));
456 void BinaryOperation::RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) {
462 set_to_boolean_types(oracle->ToBooleanTypes(right()->test_id()));
466 bool BinaryOperation::ResultOverwriteAllowed() {
491 static bool IsTypeof(Expression* expr) {
492 UnaryOperation* maybe_unary = expr->AsUnaryOperation();
493 return maybe_unary !=
NULL && maybe_unary->op() == Token::TYPEOF;
498 static bool MatchLiteralCompareTypeof(Expression* left,
502 Handle<String>*
check) {
504 *expr = left->AsUnaryOperation()->expression();
512 bool CompareOperation::IsLiteralCompareTypeof(Expression** expr,
513 Handle<String>* check) {
514 return MatchLiteralCompareTypeof(left_, op_, right_, expr, check) ||
515 MatchLiteralCompareTypeof(right_, op_, left_, expr, check);
519 static bool IsVoidOfLiteral(Expression* expr) {
520 UnaryOperation* maybe_unary = expr->AsUnaryOperation();
521 return maybe_unary !=
NULL &&
523 maybe_unary->expression()->AsLiteral() !=
NULL;
529 static bool MatchLiteralCompareUndefined(Expression* left,
546 bool CompareOperation::IsLiteralCompareUndefined(
547 Expression** expr, Isolate* isolate) {
548 return MatchLiteralCompareUndefined(left_, op_, right_, expr, isolate) ||
549 MatchLiteralCompareUndefined(right_, op_, left_, expr, isolate);
554 static bool MatchLiteralCompareNull(Expression* left,
566 bool CompareOperation::IsLiteralCompareNull(Expression** expr) {
567 return MatchLiteralCompareNull(left_, op_, right_, expr) ||
568 MatchLiteralCompareNull(right_, op_, left_, expr);
576 return proxy()->var()->IsStackAllocated();
579 bool FunctionDeclaration::IsInlineable()
const {
595 int Call::ComputeFeedbackSlotCount(
Isolate* isolate) {
596 CallType call_type = GetCallType(isolate);
597 if (call_type == LOOKUP_SLOT_CALL || call_type == OTHER_CALL) {
606 Call::CallType Call::GetCallType(Isolate* isolate)
const {
607 VariableProxy* proxy = expression()->AsVariableProxy();
609 if (proxy->var()->is_possibly_eval(isolate)) {
610 return POSSIBLY_EVAL_CALL;
611 }
else if (proxy->var()->IsUnallocated()) {
613 }
else if (proxy->var()->IsLookupSlot()) {
614 return LOOKUP_SLOT_CALL;
618 Property*
property = expression()->AsProperty();
619 return property !=
NULL ? PROPERTY_CALL : OTHER_CALL;
623 bool Call::ComputeGlobalTarget(Handle<GlobalObject> global,
624 LookupResult* lookup) {
627 ASSERT(lookup->IsFound() &&
628 lookup->type() ==
NORMAL &&
629 lookup->holder() == *global);
630 cell_ = Handle<Cell>(global->GetPropertyCell(lookup));
631 if (cell_->value()->IsJSFunction()) {
635 if (!lookup->isolate()->heap()->InNewSpace(*candidate)) {
644 void CallNew::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
645 int allocation_site_feedback_slot = FLAG_pretenuring_call_new
646 ? AllocationSiteFeedbackSlot()
647 : CallNewFeedbackSlot();
649 oracle->GetCallNewAllocationSite(allocation_site_feedback_slot);
650 is_monomorphic_ = oracle->CallNewIsMonomorphic(CallNewFeedbackSlot());
651 if (is_monomorphic_) {
652 target_ = oracle->GetCallNewTarget(CallNewFeedbackSlot());
653 if (!allocation_site_.is_null()) {
654 elements_kind_ = allocation_site_->GetElementsKind();
660 void ObjectLiteral::Property::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
661 TypeFeedbackId
id = key()->LiteralFeedbackId();
663 oracle->CollectReceiverTypes(
id, &maps);
664 receiver_type_ = maps.length() == 1 ? maps.at(0)
672 void AstVisitor::VisitDeclarations(ZoneList<Declaration*>* declarations) {
673 for (
int i = 0; i < declarations->length(); i++) {
674 Visit(declarations->at(i));
679 void AstVisitor::VisitStatements(ZoneList<Statement*>* statements) {
680 for (
int i = 0; i < statements->length(); i++) {
681 Statement* stmt = statements->at(i);
683 if (stmt->IsJump())
break;
688 void AstVisitor::VisitExpressions(ZoneList<Expression*>* expressions) {
689 for (
int i = 0; i < expressions->length(); i++) {
694 Expression* expression = expressions->at(i);
695 if (expression !=
NULL) Visit(expression);
703 #define MAKE_ACCEPT(Name) \
704 void* RegExp##Name::Accept(RegExpVisitor* visitor, void* data) { \
705 return visitor->Visit##Name(this, data); \
710 #define MAKE_TYPE_CASE(Name) \
711 RegExp##Name* RegExpTree::As##Name() { \
714 bool RegExpTree::Is##Name() { return false; }
716 #undef MAKE_TYPE_CASE
718 #define MAKE_TYPE_CASE(Name) \
719 RegExp##Name* RegExp##Name::As##Name() { \
722 bool RegExp##Name::Is##Name() { return true; }
724 #undef MAKE_TYPE_CASE
727 static Interval ListCaptureRegisters(ZoneList<RegExpTree*>* children) {
729 for (
int i = 0; i < children->length(); i++)
730 result = result.Union(children->at(i)->CaptureRegisters());
735 Interval RegExpAlternative::CaptureRegisters() {
736 return ListCaptureRegisters(nodes());
740 Interval RegExpDisjunction::CaptureRegisters() {
741 return ListCaptureRegisters(alternatives());
745 Interval RegExpLookahead::CaptureRegisters() {
746 return body()->CaptureRegisters();
750 Interval RegExpCapture::CaptureRegisters() {
751 Interval
self(StartRegister(index()), EndRegister(index()));
752 return self.Union(body()->CaptureRegisters());
756 Interval RegExpQuantifier::CaptureRegisters() {
757 return body()->CaptureRegisters();
761 bool RegExpAssertion::IsAnchoredAtStart() {
762 return assertion_type() == RegExpAssertion::START_OF_INPUT;
766 bool RegExpAssertion::IsAnchoredAtEnd() {
767 return assertion_type() == RegExpAssertion::END_OF_INPUT;
771 bool RegExpAlternative::IsAnchoredAtStart() {
772 ZoneList<RegExpTree*>* nodes = this->nodes();
773 for (
int i = 0; i < nodes->length(); i++) {
774 RegExpTree* node = nodes->at(i);
775 if (node->IsAnchoredAtStart()) {
return true; }
776 if (node->max_match() > 0) {
return false; }
782 bool RegExpAlternative::IsAnchoredAtEnd() {
783 ZoneList<RegExpTree*>* nodes = this->nodes();
784 for (
int i = nodes->length() - 1; i >= 0; i--) {
785 RegExpTree* node = nodes->at(i);
786 if (node->IsAnchoredAtEnd()) {
return true; }
787 if (node->max_match() > 0) {
return false; }
793 bool RegExpDisjunction::IsAnchoredAtStart() {
794 ZoneList<RegExpTree*>* alternatives = this->alternatives();
795 for (
int i = 0; i < alternatives->length(); i++) {
796 if (!alternatives->at(i)->IsAnchoredAtStart())
803 bool RegExpDisjunction::IsAnchoredAtEnd() {
804 ZoneList<RegExpTree*>* alternatives = this->alternatives();
805 for (
int i = 0; i < alternatives->length(); i++) {
806 if (!alternatives->at(i)->IsAnchoredAtEnd())
813 bool RegExpLookahead::IsAnchoredAtStart() {
814 return is_positive() && body()->IsAnchoredAtStart();
818 bool RegExpCapture::IsAnchoredAtStart() {
819 return body()->IsAnchoredAtStart();
823 bool RegExpCapture::IsAnchoredAtEnd() {
824 return body()->IsAnchoredAtEnd();
833 class RegExpUnparser
V8_FINAL :
public RegExpVisitor {
835 explicit RegExpUnparser(Zone* zone);
836 void VisitCharacterRange(CharacterRange that);
838 #define MAKE_CASE(Name) virtual void* Visit##Name(RegExp##Name*, \
839 void* data) V8_OVERRIDE;
843 StringStream* stream() {
return &
stream_; }
844 HeapStringAllocator alloc_;
850 RegExpUnparser::RegExpUnparser(Zone* zone) :
stream_(&alloc_), zone_(zone) {
854 void* RegExpUnparser::VisitDisjunction(RegExpDisjunction* that,
void* data) {
856 for (
int i = 0; i < that->alternatives()->length(); i++) {
858 that->alternatives()->at(i)->Accept(
this, data);
865 void* RegExpUnparser::VisitAlternative(RegExpAlternative* that,
void* data) {
867 for (
int i = 0; i < that->nodes()->length(); i++) {
869 that->nodes()->at(i)->Accept(
this, data);
876 void RegExpUnparser::VisitCharacterRange(CharacterRange that) {
877 stream()->Add(
"%k", that.from());
878 if (!that.IsSingleton()) {
879 stream()->Add(
"-%k", that.to());
885 void* RegExpUnparser::VisitCharacterClass(RegExpCharacterClass* that,
887 if (that->is_negated())
890 for (
int i = 0; i < that->ranges(zone_)->length(); i++) {
891 if (i > 0) stream()->Add(
" ");
892 VisitCharacterRange(that->ranges(zone_)->at(i));
899 void* RegExpUnparser::VisitAssertion(RegExpAssertion* that,
void* data) {
900 switch (that->assertion_type()) {
901 case RegExpAssertion::START_OF_INPUT:
902 stream()->Add(
"@^i");
904 case RegExpAssertion::END_OF_INPUT:
905 stream()->Add(
"@$i");
907 case RegExpAssertion::START_OF_LINE:
908 stream()->Add(
"@^l");
910 case RegExpAssertion::END_OF_LINE:
911 stream()->Add(
"@$l");
913 case RegExpAssertion::BOUNDARY:
916 case RegExpAssertion::NON_BOUNDARY:
924 void* RegExpUnparser::VisitAtom(RegExpAtom* that,
void* data) {
926 Vector<const uc16> chardata = that->data();
927 for (
int i = 0; i < chardata.length(); i++) {
928 stream()->Add(
"%k", chardata[i]);
935 void* RegExpUnparser::VisitText(RegExpText* that,
void* data) {
936 if (that->elements()->length() == 1) {
937 that->elements()->at(0).tree()->Accept(
this, data);
940 for (
int i = 0; i < that->elements()->length(); i++) {
942 that->elements()->at(i).tree()->Accept(
this, data);
950 void* RegExpUnparser::VisitQuantifier(RegExpQuantifier* that,
void* data) {
951 stream()->Add(
"(# %i ", that->min());
952 if (that->max() == RegExpTree::kInfinity) {
955 stream()->Add(
"%i ", that->max());
957 stream()->Add(that->is_greedy() ?
"g " : that->is_possessive() ?
"p " :
"n ");
958 that->body()->Accept(
this, data);
964 void* RegExpUnparser::VisitCapture(RegExpCapture* that,
void* data) {
965 stream()->Add(
"(^ ");
966 that->body()->Accept(
this, data);
972 void* RegExpUnparser::VisitLookahead(RegExpLookahead* that,
void* data) {
973 stream()->Add(
"(-> ");
974 stream()->Add(that->is_positive() ?
"+ " :
"- ");
975 that->body()->Accept(
this, data);
981 void* RegExpUnparser::VisitBackReference(RegExpBackReference* that,
983 stream()->Add(
"(<- %i)", that->index());
988 void* RegExpUnparser::VisitEmpty(RegExpEmpty* that,
void* data) {
995 RegExpUnparser unparser(zone);
996 Accept(&unparser,
NULL);
997 return unparser.ToString();
1002 : alternatives_(alternatives) {
1003 ASSERT(alternatives->length() > 1);
1005 min_match_ = first_alternative->min_match();
1006 max_match_ = first_alternative->max_match();
1007 for (
int i = 1; i < alternatives->length(); i++) {
1009 min_match_ =
Min(min_match_, alternative->
min_match());
1010 max_match_ =
Max(max_match_, alternative->
max_match());
1015 static int IncreaseBy(
int previous,
int increase) {
1019 return previous + increase;
1023 RegExpAlternative::RegExpAlternative(ZoneList<RegExpTree*>* nodes)
1025 ASSERT(nodes->length() > 1);
1028 for (
int i = 0; i < nodes->length(); i++) {
1029 RegExpTree* node = nodes->at(i);
1030 int node_min_match = node->min_match();
1031 min_match_ = IncreaseBy(min_match_, node_min_match);
1032 int node_max_match = node->max_match();
1033 max_match_ = IncreaseBy(max_match_, node_max_match);
1038 CaseClause::CaseClause(Zone* zone,
1040 ZoneList<Statement*>* statements,
1042 : Expression(zone, pos),
1044 statements_(statements),
1046 compare_id_(AstNode::GetNextId(zone)),
1047 entry_id_(AstNode::GetNextId(zone)) {
1051 #define REGULAR_NODE(NodeType) \
1052 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1053 increase_node_count(); \
1055 #define REGULAR_NODE_WITH_FEEDBACK_SLOTS(NodeType) \
1056 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1057 increase_node_count(); \
1058 add_slot_node(node); \
1060 #define DONT_OPTIMIZE_NODE(NodeType) \
1061 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1062 increase_node_count(); \
1063 set_dont_optimize_reason(k##NodeType); \
1064 add_flag(kDontInline); \
1065 add_flag(kDontSelfOptimize); \
1067 #define DONT_SELFOPTIMIZE_NODE(NodeType) \
1068 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1069 increase_node_count(); \
1070 add_flag(kDontSelfOptimize); \
1072 #define DONT_SELFOPTIMIZE_NODE_WITH_FEEDBACK_SLOTS(NodeType) \
1073 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1074 increase_node_count(); \
1075 add_slot_node(node); \
1076 add_flag(kDontSelfOptimize); \
1078 #define DONT_CACHE_NODE(NodeType) \
1079 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1080 increase_node_count(); \
1081 set_dont_optimize_reason(k##NodeType); \
1082 add_flag(kDontInline); \
1083 add_flag(kDontSelfOptimize); \
1084 add_flag(kDontCache); \
1144 void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) {
1145 increase_node_count();
1146 if (node->is_jsruntime()) {
1151 (node->name()->IsOneByteEqualTo(
1162 #undef DONT_OPTIMIZE_NODE
1163 #undef DONT_SELFOPTIMIZE_NODE
1164 #undef DONT_CACHE_NODE
1167 Handle<String> Literal::ToString() {
1169 ASSERT(value_->IsNumber());
1173 if (value_->IsSmi()) {
1180 return isolate_->factory()->NewStringFromAscii(
CStrVector(str));
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
TypeFeedbackId test_id() const
#define MAKE_TYPE_CASE(Name)
#define FOR_EACH_REG_EXP_TREE_TYPE(VISIT)
byte ToBooleanTypes(TypeFeedbackId id)
ElementsKind GetPackedElementsKind(ElementsKind holey_kind)
#define DONT_SELFOPTIMIZE_NODE(NodeType)
#define DONT_SELFOPTIMIZE_NODE_WITH_FEEDBACK_SLOTS(NodeType)
static String * cast(Object *obj)
void BuildConstants(Isolate *isolate)
static Smi * FromInt(int value)
static Handle< T > cast(Handle< S > that)
kSerializedDataOffset Object
TypeImpl< ZoneTypeConfig > Type
Location location() const
#define DECL_ACCEPT(type)
#define ASSERT(condition)
static bool IsCompileTimeValue(Expression *expression)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including on console Map counters to a file Enable debugger compile events enable GDBJIT interface(disables compacting GC)") DEFINE_bool(gdbjit_full
static SharedFunctionInfo * cast(Object *obj)
#define REGULAR_NODE_WITH_FEEDBACK_SLOTS(NodeType)
static Smi * cast(Object *object)
virtual bool IsInlineable() const
V8_INLINE bool IsNull() const
VariableProxy * proxy() const
#define REGULAR_NODE(NodeType)
static const int kInfinity
void check(i::Vector< const uint8_t > string)
#define DONT_CACHE_NODE(NodeType)
const char * DoubleToCString(double v, Vector< char > buffer)
virtual int min_match()=0
static Handle< FixedArray > GetValue(Isolate *isolate, Expression *expression)
Handle< Object > GetBoilerplateValue(Expression *expression, Isolate *isolate)
static Handle< Object > SetOwnElement(Handle< JSObject > object, uint32_t index, Handle< Object > value, StrictMode strict_mode)
bool IsUndefinedLiteral(Isolate *isolate)
static bool IsEqualityOp(Value op)
#define STATIC_ASCII_VECTOR(x)
Vector< const char > CStrVector(const char *data)
V8_INLINE bool IsString() const
static int SNPrintF(Vector< char > str, const char *format,...)
virtual void RecordToBooleanTypeFeedback(TypeFeedbackOracle *oracle)
Handle< T > handle(T *t, Isolate *isolate)
static Handle< T > null()
static void Expand(Handle< JSArray > array, int minimum_size_of_backing_fixed_array)
StringCharacterStream *const stream_
static const uint32_t kDefaultHashMapCapacity
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric literals(0o77, 0b11)") DEFINE_bool(harmony_strings
#define MAKE_ACCEPT(Name)
SmartArrayPointer< const char > ToString()
virtual int max_match()=0
TemplateHashMapImpl< ZoneAllocationPolicy > ZoneHashMap
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
#define DONT_OPTIMIZE_NODE(NodeType)
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
static JSFunction * cast(Object *obj)