47 #define DECL_ACCEPT(type) \
48 void type::Accept(AstVisitor* v) { v->Visit##type(this); }
58 return AsLiteral() !=
NULL && AsLiteral()->handle()->IsSmi();
63 return AsLiteral() !=
NULL && AsLiteral()->handle()->IsString();
68 return AsLiteral() !=
NULL && AsLiteral()->handle()->IsNull();
76 is_this_(var->is_this()),
79 position_(RelocInfo::kNoPosition),
97 interface_(interface) {
127 binary_operation_(
NULL),
128 assignment_id_(GetNextId(isolate)),
129 is_monomorphic_(
false) { }
134 case Token::ASSIGN_BIT_OR:
return Token::BIT_OR;
135 case Token::ASSIGN_BIT_XOR:
return Token::BIT_XOR;
136 case Token::ASSIGN_BIT_AND:
return Token::BIT_AND;
137 case Token::ASSIGN_SHL:
return Token::SHL;
138 case Token::ASSIGN_SAR:
return Token::SAR;
139 case Token::ASSIGN_SHR:
return Token::SHR;
144 case Token::ASSIGN_MOD:
return Token::MOD;
147 return Token::ILLEGAL;
188 }
else if (value_->AsLiteral() !=
NULL) {
199 kind_ = is_getter ? GETTER : SETTER;
204 return kind_ == CONSTANT ||
205 (kind_ == MATERIALIZED_LITERAL &&
211 emit_store_ = emit_store;
221 ASSERT((*reinterpret_cast<String**>(first))->IsString());
222 ASSERT((*reinterpret_cast<String**>(second))->IsString());
225 return (*h1)->Equals(*h2);
230 ASSERT((*reinterpret_cast<Object**>(first))->IsNumber());
231 ASSERT((*reinterpret_cast<Object**>(second))->IsNumber());
236 return h2->IsSmi() && *h1 == *h2;
238 if (h2->IsSmi())
return false;
243 return n1->value() == n2->value();
252 for (
int i =
properties()->length() - 1; i >= 0; i--) {
254 Literal* literal =
property->key();
255 if (literal->
handle()->IsNull())
continue;
256 uint32_t hash = literal->
Hash();
260 table.
Lookup(literal, hash,
false, allocator) !=
NULL) {
261 property->set_emit_store(
false);
264 table.
Lookup(literal, hash,
true, allocator);
272 int length = targets_.length();
273 for (
int i = 0; i < length; i++) {
274 if (targets_[i] == target)
return;
276 targets_.Add(target, zone);
318 return maybe_unary !=
NULL && maybe_unary->
op() == Token::TYPEOF;
323 static bool MatchLiteralCompareTypeof(
Expression* left,
329 *expr = left->AsUnaryOperation()->expression();
339 return MatchLiteralCompareTypeof(left_, op_, right_, expr, check) ||
340 MatchLiteralCompareTypeof(right_, op_, left_, expr, check);
344 static bool IsVoidOfLiteral(
Expression* expr) {
346 return maybe_unary !=
NULL &&
353 static bool MatchLiteralCompareUndefined(
Expression* left,
366 return MatchLiteralCompareUndefined(left_, op_, right_, expr) ||
367 MatchLiteralCompareUndefined(right_, op_, left_, expr);
372 static bool MatchLiteralCompareNull(
Expression* left,
385 return MatchLiteralCompareNull(left_, op_, right_, expr) ||
386 MatchLiteralCompareNull(right_, op_, left_, expr);
394 return proxy()->var()->IsStackAllocated();
409 if (is_uninitialized_)
return;
412 receiver_types_.Clear();
414 if (oracle->
LoadIsBuiltin(
this, Builtins::kLoadIC_ArrayLength)) {
415 is_array_length_ =
true;
416 }
else if (oracle->
LoadIsBuiltin(
this, Builtins::kLoadIC_StringLength)) {
417 is_string_length_ =
true;
419 Builtins::kLoadIC_FunctionPrototype)) {
420 is_function_prototype_ =
true;
427 }
else if (oracle->
LoadIsBuiltin(
this, Builtins::kKeyedLoadIC_String)) {
428 is_string_access_ =
true;
429 }
else if (is_monomorphic_) {
441 Property* prop = target()->AsProperty();
445 receiver_types_.Clear();
451 }
else if (is_monomorphic_) {
465 receiver_types_.Clear();
466 if (is_monomorphic_) {
480 compare_type_ = SMI_ONLY;
482 compare_type_ = SYMBOL_ONLY;
484 compare_type_ = STRING_ONLY;
486 compare_type_ = OBJECT_ONLY;
495 if (type->has_named_interceptor())
return false;
504 LookupResult lookup(type->GetIsolate());
506 type->LookupDescriptor(
NULL, *name, &lookup);
507 if (lookup.IsFound()) {
508 switch (lookup.type()) {
528 if (!type->prototype()->IsJSObject())
return false;
531 if (!holder_->HasFastProperties())
return false;
538 LookupResult* lookup) {
541 ASSERT(lookup->IsFound() &&
542 lookup->type() ==
NORMAL &&
543 lookup->holder() == *global);
545 if (cell_->value()->IsJSFunction()) {
549 if (!
HEAP->InNewSpace(*candidate)) {
561 Property*
property = expression()->AsProperty();
562 if (property ==
NULL) {
567 Literal* key =
property->key()->AsLiteral();
570 receiver_types_.Clear();
574 int length = receiver_types_.length();
575 for (
int i = 0; i < length; i++) {
582 if (is_monomorphic_) {
584 if (receiver_types_.length() > 0) {
586 map = receiver_types_.at(0);
593 is_monomorphic_ = ComputeTarget(map, name);
601 if (is_monomorphic_) {
610 compare_type_ = SMI_ONLY;
612 compare_type_ = OBJECT_ONLY;
629 bool AstVisitor::CheckStackOverflow() {
630 if (stack_overflow_)
return true;
631 StackLimitCheck
check(isolate_);
632 if (!check.HasOverflowed())
return false;
633 return (stack_overflow_ =
true);
638 for (
int i = 0; i < declarations->length(); i++) {
639 Visit(declarations->
at(i));
644 void AstVisitor::VisitStatements(ZoneList<Statement*>* statements) {
645 for (
int i = 0; i < statements->length(); i++) {
646 Visit(statements->at(i));
651 void AstVisitor::VisitExpressions(ZoneList<Expression*>* expressions) {
652 for (
int i = 0; i < expressions->length(); i++) {
658 if (expression !=
NULL) Visit(expression);
666 #define MAKE_ACCEPT(Name) \
667 void* RegExp##Name::Accept(RegExpVisitor* visitor, void* data) { \
668 return visitor->Visit##Name(this, data); \
673 #define MAKE_TYPE_CASE(Name) \
674 RegExp##Name* RegExpTree::As##Name() { \
677 bool RegExpTree::Is##Name() { return false; }
679 #undef MAKE_TYPE_CASE
681 #define MAKE_TYPE_CASE(Name) \
682 RegExp##Name* RegExp##Name::As##Name() { \
685 bool RegExp##Name::Is##Name() { return true; }
687 #undef MAKE_TYPE_CASE
690 static Interval ListCaptureRegisters(ZoneList<RegExpTree*>* children) {
692 for (
int i = 0; i < children->length(); i++)
693 result = result.Union(children->at(i)->CaptureRegisters());
699 return ListCaptureRegisters(nodes());
704 return ListCaptureRegisters(alternatives());
709 return body()->CaptureRegisters();
714 Interval self(StartRegister(index()), EndRegister(index()));
715 return self.
Union(body()->CaptureRegisters());
720 return body()->CaptureRegisters();
736 for (
int i = 0; i < nodes->length(); i++) {
739 if (node->
max_match() > 0) {
return false; }
747 for (
int i = nodes->length() - 1; i >= 0; i--) {
750 if (node->
max_match() > 0) {
return false; }
758 for (
int i = 0; i < alternatives->length(); i++) {
759 if (!alternatives->
at(i)->IsAnchoredAtStart())
768 for (
int i = 0; i < alternatives->length(); i++) {
769 if (!alternatives->
at(i)->IsAnchoredAtEnd())
777 return is_positive() && body()->IsAnchoredAtStart();
782 return body()->IsAnchoredAtStart();
787 return body()->IsAnchoredAtEnd();
801 #define MAKE_CASE(Name) virtual void* Visit##Name(RegExp##Name*, void* data);
805 StringStream* stream() {
return &stream_; }
806 HeapStringAllocator alloc_;
807 StringStream stream_;
818 for (
int i = 0; i < that->
alternatives()->length(); i++) {
827 void* RegExpUnparser::VisitAlternative(RegExpAlternative* that,
void* data) {
829 for (
int i = 0; i < that->nodes()->length(); i++) {
831 that->nodes()->at(i)->Accept(
this, data);
839 stream()->
Add(
"%k", that.
from());
841 stream()->
Add(
"-%k", that.
to());
852 for (
int i = 0; i < that->
ranges(zone_)->length(); i++) {
853 if (i > 0) stream()->
Add(
" ");
861 void* RegExpUnparser::VisitAssertion(RegExpAssertion* that,
void* data) {
862 switch (that->type()) {
864 stream()->
Add(
"@^i");
867 stream()->
Add(
"@$i");
870 stream()->
Add(
"@^l");
873 stream()->
Add(
"@$l");
886 void* RegExpUnparser::VisitAtom(RegExpAtom* that,
void* data) {
888 Vector<const uc16> chardata = that->data();
889 for (
int i = 0; i < chardata.length(); i++) {
890 stream()->
Add(
"%k", chardata[i]);
897 void* RegExpUnparser::VisitText(RegExpText* that,
void* data) {
898 if (that->elements()->length() == 1) {
899 that->elements()->at(0).data.u_atom->Accept(
this, data);
902 for (
int i = 0; i < that->elements()->length(); i++) {
904 that->elements()->at(i).data.u_atom->Accept(
this, data);
912 void* RegExpUnparser::VisitQuantifier(RegExpQuantifier* that,
void* data) {
913 stream()->
Add(
"(# %i ", that->min());
917 stream()->
Add(
"%i ", that->max());
919 stream()->
Add(that->is_greedy() ?
"g " : that->is_possessive() ?
"p " :
"n ");
920 that->body()->Accept(
this, data);
926 void* RegExpUnparser::VisitCapture(RegExpCapture* that,
void* data) {
927 stream()->
Add(
"(^ ");
928 that->body()->Accept(
this, data);
934 void* RegExpUnparser::VisitLookahead(RegExpLookahead* that,
void* data) {
935 stream()->
Add(
"(-> ");
936 stream()->
Add(that->is_positive() ?
"+ " :
"- ");
937 that->body()->Accept(
this, data);
943 void* RegExpUnparser::VisitBackReference(RegExpBackReference* that,
945 stream()->
Add(
"(<- %i)", that->index());
950 void* RegExpUnparser::VisitEmpty(RegExpEmpty* that,
void* data) {
964 : alternatives_(alternatives) {
965 ASSERT(alternatives->length() > 1);
967 min_match_ = first_alternative->min_match();
968 max_match_ = first_alternative->max_match();
969 for (
int i = 1; i < alternatives->length(); i++) {
977 static int IncreaseBy(
int previous,
int increase) {
981 return previous + increase;
987 ASSERT(nodes->length() > 1);
990 for (
int i = 0; i < nodes->length(); i++) {
993 min_match_ = IncreaseBy(min_match_, node_min_match);
995 max_match_ = IncreaseBy(max_match_, node_max_match);
1005 statements_(statements),
1007 compare_type_(
NONE),
1008 compare_id_(
AstNode::GetNextId(isolate)),
1009 entry_id_(
AstNode::GetNextId(isolate)) {
1013 #define REGULAR_NODE(NodeType) \
1014 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1015 increase_node_count(); \
1017 #define DONT_OPTIMIZE_NODE(NodeType) \
1018 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1019 increase_node_count(); \
1020 add_flag(kDontOptimize); \
1021 add_flag(kDontInline); \
1022 add_flag(kDontSelfOptimize); \
1024 #define DONT_INLINE_NODE(NodeType) \
1025 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1026 increase_node_count(); \
1027 add_flag(kDontInline); \
1029 #define DONT_SELFOPTIMIZE_NODE(NodeType) \
1030 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1031 increase_node_count(); \
1032 add_flag(kDontSelfOptimize); \
1034 #define DONT_CACHE_NODE(NodeType) \
1035 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1036 increase_node_count(); \
1037 add_flag(kDontOptimize); \
1038 add_flag(kDontInline); \
1039 add_flag(kDontSelfOptimize); \
1040 add_flag(kDontCache); \
1099 void AstConstructionVisitor::VisitCallRuntime(
CallRuntime* node) {
1100 increase_node_count();
1101 if (node->is_jsruntime()) {
1106 (node->name()->IsEqualTo(
CStrVector(
"_ArgumentsLength")) ||
1107 node->name()->IsEqualTo(
CStrVector(
"_Arguments")))) {
1116 #undef DONT_OPTIMIZE_NODE
1117 #undef DONT_INLINE_NODE
1118 #undef DONT_SELFOPTIMIZE_NODE
1119 #undef DONT_CACHE_NODE
1124 ASSERT(handle_->IsNumber());
1128 if (handle_->IsSmi()) {
bool ComputeGlobalTarget(Handle< GlobalObject > global, LookupResult *lookup)
bool FLAG_enable_slow_asserts
virtual bool IsAnchoredAtStart()
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including on console Map counters to a file Enable debugger compile events enable GDBJIT interface(disables compacting GC)") DEFINE_bool(gdbjit_full
Handle< JSFunction > GetCallNewTarget(CallNew *expr)
virtual bool IsAnchoredAtEnd()
bool AllowsLazyCompilationWithoutContext()
#define MAKE_TYPE_CASE(Name)
#define FOR_EACH_REG_EXP_TREE_TYPE(VISIT)
void CollectKeyedReceiverTypes(TypeFeedbackId ast_id, SmallMapList *types)
void RecordTypeFeedback(TypeFeedbackOracle *oracle, CallKind call_kind)
virtual bool ResultOverwriteAllowed()
#define DONT_SELFOPTIMIZE_NODE(NodeType)
static String * cast(Object *obj)
SmartArrayPointer< const char > ToString()
RegExpUnparser(Zone *zone)
ZoneList< CharacterRange > * ranges(Zone *zone)
VariableProxy(Isolate *isolate, Variable *var)
static Handle< T > cast(Handle< S > that)
DONT_INLINE_NODE(ArrayLiteral) void AstConstructionVisitor
virtual bool IsAnchoredAtStart()
bool is_identical_to(const Handle< T > other) const
virtual bool IsAnchoredAtEnd()
SmartArrayPointer< const char > ToString(Zone *zone)
#define DECL_ACCEPT(type)
virtual bool ResultOverwriteAllowed()
Handle< Map > LoadMonomorphicReceiverType(Property *expr)
virtual bool IsAnchoredAtEnd()
virtual bool IsAnchoredAtStart()
void RecordTypeFeedback(TypeFeedbackOracle *oracle)
#define ASSERT(condition)
void RecordTypeFeedback(TypeFeedbackOracle *oracle)
bool StoreIsMonomorphicNormal(TypeFeedbackId ast_id)
static bool IsCompileTimeValue(Expression *expression)
Expression(Isolate *isolate)
virtual MaterializedLiteral * AsMaterializedLiteral()
virtual bool IsInlineable() const
virtual Interval CaptureRegisters()
virtual Interval CaptureRegisters()
void RecordTypeFeedback(TypeFeedbackOracle *oracle, Zone *znoe)
Token::Value binary_op() const
Handle< String > name() const
bool IsCompileTimeValue()
static Smi * cast(Object *object)
void CallReceiverTypes(Call *expr, Handle< String > name, CallKind call_kind, SmallMapList *types)
void LoadReceiverTypes(Property *expr, Handle< String > name, SmallMapList *types)
void Add(Vector< const char > format, Vector< FmtElm > elms)
bool AllowsLazyCompilation() const
virtual bool IsInlineable() const
void set_emit_store(bool emit_store)
TypeInfo SwitchType(CaseClause *clause)
virtual bool IsAnchoredAtStart()
virtual Interval CaptureRegisters()
int start_position() const
void RecordTypeFeedback(TypeFeedbackOracle *oracle)
bool IsLiteralCompareNull(Expression **expr)
void RecordTypeFeedback(TypeFeedbackOracle *oracle, Zone *zone)
bool IsEqualString(void *first, void *second)
#define REGULAR_NODE(NodeType)
Interval Union(Interval that)
LanguageMode language_mode() const
static const int kInfinity
CheckType GetCallCheckType(Call *expr)
#define DONT_CACHE_NODE(NodeType)
const char * DoubleToCString(double v, Vector< char > buffer)
ZoneList< RegExpTree * > * alternatives()
virtual int min_match()=0
RegExpAlternative(ZoneList< RegExpTree * > *nodes)
void CalculateEmitStore(Zone *zone)
Entry * Lookup(void *key, uint32_t hash, bool insert, AllocationPolicy allocator=AllocationPolicy())
bool StoreIsMegamorphicWithTypeInfo(TypeFeedbackId ast_id)
ZoneList< Property * > * properties() const
Handle< JSObject > GetPrototypeForPrimitiveCheck(CheckType check)
Handle< Map > StoreMonomorphicReceiverType(TypeFeedbackId ast_id)
bool LoadIsUninitialized(Property *expr)
virtual bool IsAnchoredAtEnd()
TypeInfo CompareType(CompareOperation *expr)
static bool IsEqualityOp(Value op)
bool IsEqualNumber(void *first, void *second)
virtual bool IsPropertyName()
Handle< Object > handle() const
activate correct semantics for inheriting readonliness false
Vector< const char > CStrVector(const char *data)
void AddTarget(Label *target, Zone *zone)
virtual Interval CaptureRegisters()
static int SNPrintF(Vector< char > str, const char *format,...)
virtual Interval CaptureRegisters()
Assignment(Isolate *isolate, Token::Value op, Expression *target, Expression *value, int pos)
bool IsLiteralCompareTypeof(Expression **expr, Handle< String > *check)
virtual void * Accept(RegExpVisitor *visitor, void *data)=0
CaseClause(Isolate *isolate, Expression *label, ZoneList< Statement * > *statements, int pos)
void VisitCharacterRange(CharacterRange that)
Handle< Map > GetObjectLiteralStoreMap(ObjectLiteral::Property *prop)
virtual bool IsAnchoredAtStart()
bool AllowsLazyCompilationWithoutContext() const
bool LoadIsBuiltin(Property *expr, Builtins::Name id)
int start_position() const
bool IsLiteralCompareUndefined(Expression **expr)
static Handle< T > null()
bool ComputeTarget(Handle< Map > type, Handle< String > name)
RegExpDisjunction(ZoneList< RegExpTree * > *alternatives)
void BindTo(Variable *var)
void set_is_used(bool flag)
static const uint32_t kDefaultHashMapCapacity
void RecordTypeFeedback(TypeFeedbackOracle *oracle)
void StoreReceiverTypes(Assignment *expr, Handle< String > name, SmallMapList *types)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
bool CallIsMonomorphic(Call *expr)
Expression * expression() const
Handle< JSFunction > GetCallTarget(Call *expr)
bool ObjectLiteralStoreIsMonomorphic(ObjectLiteral::Property *prop)
#define MAKE_ACCEPT(Name)
LanguageMode language_mode() const
static bool Match(void *literal1, void *literal2)
const int kMaxKeyedPolymorphism
virtual bool IsAnchoredAtEnd()
virtual int max_match()=0
bool AllowsLazyCompilation()
bool LoadIsMonomorphicNormal(Property *expr)
bool LoadIsMegamorphicWithTypeInfo(Property *expr)
void check(i::Vector< const char > string)
#define DONT_OPTIMIZE_NODE(NodeType)
Property(Literal *key, Expression *value, Isolate *isolate)
static JSObject * cast(Object *obj)
bool CallNewIsMonomorphic(CallNew *expr)
virtual bool IsAnchoredAtStart()
static JSFunction * cast(Object *obj)