v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
stub-cache.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_STUB_CACHE_H_
29 #define V8_STUB_CACHE_H_
30 
31 #include "allocation.h"
32 #include "arguments.h"
33 #include "code-stubs.h"
34 #include "ic-inl.h"
35 #include "macro-assembler.h"
36 #include "objects.h"
37 #include "zone-inl.h"
38 
39 namespace v8 {
40 namespace internal {
41 
42 
43 // The stub cache is used for megamorphic calls and property accesses.
44 // It maps (map, name, type)->Code*
45 
46 // The design of the table uses the inline cache stubs used for
47 // mono-morphic calls. The beauty of this, we do not have to
48 // invalidate the cache whenever a prototype map is changed. The stub
49 // validates the map chain as in the mono-morphic case.
50 
51 
52 class CallOptimization;
53 class SmallMapList;
54 class StubCache;
55 
56 
58  public:
59  Address address() const { return address_; }
60 
61  private:
62  explicit SCTableReference(Address address) : address_(address) {}
63 
64  Address address_;
65 
66  friend class StubCache;
67 };
68 
69 
70 class StubCache {
71  public:
72  struct Entry {
75  Map* map;
76  };
77 
78  void Initialize();
79 
81  Handle<JSObject> holder);
82 
84  Handle<Map> stub_holder_map,
85  Code::Kind kind,
86  ExtraICState extra_state = kNoExtraICState,
87  InlineCacheHolderFlag cache_holder = OWN_MAP);
88 
91  Code::Kind kind,
92  InlineCacheHolderFlag cache_holder,
93  Code::StubType type);
94 
97  Handle<HeapType> type,
98  Handle<Code> handler,
99  ExtraICState extra_ic_state);
100 
102 
104 
106  StrictMode strict_mode,
107  KeyedAccessStoreMode store_mode);
108 
109  // ---
110 
111  Handle<Code> ComputeLoad(InlineCacheState ic_state, ExtraICState extra_state);
113  ExtraICState extra_state);
114 
115  // ---
116 
118  CompareNilICStub& stub);
119 
120  // ---
121 
124  KeyedAccessStoreMode store_mode,
125  StrictMode strict_mode);
126 
128  TypeHandleList* types,
129  CodeHandleList* handlers,
130  int number_of_valid_maps,
132  ExtraICState extra_ic_state);
133 
134  // Finds the Code object stored in the Heap::non_monomorphic_cache().
135  Code* FindPreMonomorphicIC(Code::Kind kind, ExtraICState extra_ic_state);
136 
137  // Update cache for entry hash(name, map).
138  Code* Set(Name* name, Map* map, Code* code);
139 
140  // Clear the lookup table (@ mark compact collection).
141  void Clear();
142 
143  // Collect all maps that match the name and flags.
144  void CollectMatchingMaps(SmallMapList* types,
147  Handle<Context> native_context,
148  Zone* zone);
149 
150  // Generate code for probing the stub cache table.
151  // Arguments extra, extra2 and extra3 may be used to pass additional scratch
152  // registers. Set to no_reg if not needed.
153  void GenerateProbe(MacroAssembler* masm,
155  Register receiver,
156  Register name,
157  Register scratch,
158  Register extra,
159  Register extra2 = no_reg,
160  Register extra3 = no_reg);
161 
162  enum Table {
165  };
166 
167 
169  return SCTableReference(
170  reinterpret_cast<Address>(&first_entry(table)->key));
171  }
172 
173 
175  return SCTableReference(
176  reinterpret_cast<Address>(&first_entry(table)->map));
177  }
178 
179 
181  return SCTableReference(
182  reinterpret_cast<Address>(&first_entry(table)->value));
183  }
184 
185 
187  switch (table) {
188  case StubCache::kPrimary: return StubCache::primary_;
189  case StubCache::kSecondary: return StubCache::secondary_;
190  }
191  UNREACHABLE();
192  return NULL;
193  }
194 
195  Isolate* isolate() { return isolate_; }
196  Heap* heap() { return isolate()->heap(); }
197  Factory* factory() { return isolate()->factory(); }
198 
199  // These constants describe the structure of the interceptor arguments on the
200  // stack. The arguments are pushed by the (platform-specific)
201  // PushInterceptorArguments and read by LoadPropertyWithInterceptorOnly and
202  // LoadWithInterceptor.
203  static const int kInterceptorArgsNameIndex = 0;
204  static const int kInterceptorArgsInfoIndex = 1;
205  static const int kInterceptorArgsThisIndex = 2;
206  static const int kInterceptorArgsHolderIndex = 3;
207  static const int kInterceptorArgsLength = 4;
208 
209  private:
210  explicit StubCache(Isolate* isolate);
211 
212  // The stub cache has a primary and secondary level. The two levels have
213  // different hashing algorithms in order to avoid simultaneous collisions
214  // in both caches. Unlike a probing strategy (quadratic or otherwise) the
215  // update strategy on updates is fairly clear and simple: Any existing entry
216  // in the primary cache is moved to the secondary cache, and secondary cache
217  // entries are overwritten.
218 
219  // Hash algorithm for the primary table. This algorithm is replicated in
220  // assembler for every architecture. Returns an index into the table that
221  // is scaled by 1 << kHeapObjectTagSize.
222  static int PrimaryOffset(Name* name, Code::Flags flags, Map* map) {
223  // This works well because the heap object tag size and the hash
224  // shift are equal. Shifting down the length field to get the
225  // hash code would effectively throw away two bits of the hash
226  // code.
228  // Compute the hash of the name (use entire hash field).
229  ASSERT(name->HasHashCode());
230  uint32_t field = name->hash_field();
231  // Using only the low bits in 64-bit mode is unlikely to increase the
232  // risk of collision even if the heap is spread over an area larger than
233  // 4Gb (and not at all if it isn't).
234  uint32_t map_low32bits =
235  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map));
236  // We always set the in_loop bit to zero when generating the lookup code
237  // so do it here too so the hash codes match.
238  uint32_t iflags =
239  (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
240  // Base the offset on a simple combination of name, flags, and map.
241  uint32_t key = (map_low32bits + field) ^ iflags;
242  return key & ((kPrimaryTableSize - 1) << kHeapObjectTagSize);
243  }
244 
245  // Hash algorithm for the secondary table. This algorithm is replicated in
246  // assembler for every architecture. Returns an index into the table that
247  // is scaled by 1 << kHeapObjectTagSize.
248  static int SecondaryOffset(Name* name, Code::Flags flags, int seed) {
249  // Use the seed from the primary cache in the secondary cache.
250  uint32_t name_low32bits =
251  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name));
252  // We always set the in_loop bit to zero when generating the lookup code
253  // so do it here too so the hash codes match.
254  uint32_t iflags =
255  (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
256  uint32_t key = (seed - name_low32bits) + iflags;
257  return key & ((kSecondaryTableSize - 1) << kHeapObjectTagSize);
258  }
259 
260  // Compute the entry for a given offset in exactly the same way as
261  // we do in generated code. We generate an hash code that already
262  // ends in Name::kHashShift 0s. Then we multiply it so it is a multiple
263  // of sizeof(Entry). This makes it easier to avoid making mistakes
264  // in the hashed offset computations.
265  static Entry* entry(Entry* table, int offset) {
266  const int multiplier = sizeof(*table) >> Name::kHashShift;
267  return reinterpret_cast<Entry*>(
268  reinterpret_cast<Address>(table) + offset * multiplier);
269  }
270 
271  static const int kPrimaryTableBits = 11;
272  static const int kPrimaryTableSize = (1 << kPrimaryTableBits);
273  static const int kSecondaryTableBits = 9;
274  static const int kSecondaryTableSize = (1 << kSecondaryTableBits);
275 
276  Entry primary_[kPrimaryTableSize];
277  Entry secondary_[kSecondaryTableSize];
278  Isolate* isolate_;
279 
280  friend class Isolate;
281  friend class SCTableReference;
282 
284 };
285 
286 
287 // ------------------------------------------------------------------------
288 
289 
290 // Support functions for IC stubs for callbacks.
291 DECLARE_RUNTIME_FUNCTION(MaybeObject*, StoreCallbackProperty);
292 
293 
294 // Support functions for IC stubs for interceptors.
295 DECLARE_RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorOnly);
296 DECLARE_RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorForLoad);
297 DECLARE_RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorForCall);
298 DECLARE_RUNTIME_FUNCTION(MaybeObject*, StoreInterceptorProperty);
299 DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedLoadPropertyWithInterceptor);
300 
301 
304 
305 
306 // The stub compilers compile stubs for the stub cache.
307 class StubCompiler BASE_EMBEDDED {
308  public:
309  explicit StubCompiler(Isolate* isolate,
310  ExtraICState extra_ic_state = kNoExtraICState)
311  : isolate_(isolate), extra_ic_state_(extra_ic_state),
312  masm_(isolate, NULL, 256), failure_(NULL) { }
313 
314  Handle<Code> CompileLoadInitialize(Code::Flags flags);
315  Handle<Code> CompileLoadPreMonomorphic(Code::Flags flags);
316  Handle<Code> CompileLoadMegamorphic(Code::Flags flags);
317 
318  Handle<Code> CompileStoreInitialize(Code::Flags flags);
319  Handle<Code> CompileStorePreMonomorphic(Code::Flags flags);
320  Handle<Code> CompileStoreGeneric(Code::Flags flags);
321  Handle<Code> CompileStoreMegamorphic(Code::Flags flags);
322 
323  // Static functions for generating parts of stubs.
324  static void GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
325  int index,
326  Register prototype);
327 
328  // Helper function used to check that the dictionary doesn't contain
329  // the property. This function may return false negatives, so miss_label
330  // must always call a backup property check that is complete.
331  // This function is safe to call if the receiver has fast properties.
332  // Name must be unique and receiver must be a heap object.
333  static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
334  Label* miss_label,
335  Register receiver,
337  Register r0,
338  Register r1);
339 
340  // Generates prototype loading code that uses the objects from the
341  // context we were in when this function was called. If the context
342  // has changed, a jump to miss is performed. This ties the generated
343  // code to a particular context and so must not be used in cases
344  // where the generated code is not allowed to have references to
345  // objects from a context.
346  static void GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler* masm,
347  int index,
348  Register prototype,
349  Label* miss);
350 
351  static void GenerateFastPropertyLoad(MacroAssembler* masm,
352  Register dst,
353  Register src,
354  bool inobject,
355  int index,
356  Representation representation);
357 
358  static void GenerateLoadArrayLength(MacroAssembler* masm,
359  Register receiver,
360  Register scratch,
361  Label* miss_label);
362 
363  static void GenerateLoadFunctionPrototype(MacroAssembler* masm,
364  Register receiver,
365  Register scratch1,
366  Register scratch2,
367  Label* miss_label);
368 
369  // Generate code to check that a global property cell is empty. Create
370  // the property cell at compilation time if no cell exists for the
371  // property.
372  static void GenerateCheckPropertyCell(MacroAssembler* masm,
373  Handle<JSGlobalObject> global,
375  Register scratch,
376  Label* miss);
377 
378  static void TailCallBuiltin(MacroAssembler* masm, Builtins::Name name);
379 
380  // Generates code that verifies that the property holder has not changed
381  // (checking maps of objects in the prototype chain for fast and global
382  // objects or doing negative lookup for slow objects, ensures that the
383  // property cells for global objects are still empty) and checks that the map
384  // of the holder has not changed. If necessary the function also generates
385  // code for security check in case of global object holders. Helps to make
386  // sure that the current IC is still valid.
387  //
388  // The scratch and holder registers are always clobbered, but the object
389  // register is only clobbered if it the same as the holder register. The
390  // function returns a register containing the holder - either object_reg or
391  // holder_reg.
392  Register CheckPrototypes(Handle<HeapType> type,
393  Register object_reg,
394  Handle<JSObject> holder,
395  Register holder_reg,
396  Register scratch1,
397  Register scratch2,
399  Label* miss,
401 
402  void GenerateBooleanCheck(Register object, Label* miss);
403 
404  static void GenerateFastApiCall(MacroAssembler* masm,
405  const CallOptimization& optimization,
406  Handle<Map> receiver_map,
407  Register receiver,
408  Register scratch,
409  bool is_store,
410  int argc,
411  Register* values);
412 
413  protected:
414  Handle<Code> GetCodeWithFlags(Code::Flags flags, const char* name);
415  Handle<Code> GetCodeWithFlags(Code::Flags flags, Handle<Name> name);
416 
417  ExtraICState extra_state() { return extra_ic_state_; }
418 
419  MacroAssembler* masm() { return &masm_; }
420  void set_failure(Failure* failure) { failure_ = failure; }
421 
422  static void LookupPostInterceptor(Handle<JSObject> holder,
424  LookupResult* lookup);
425 
426  Isolate* isolate() { return isolate_; }
427  Heap* heap() { return isolate()->heap(); }
428  Factory* factory() { return isolate()->factory(); }
429 
430  static void GenerateTailCall(MacroAssembler* masm, Handle<Code> code);
431 
432  private:
433  Isolate* isolate_;
434  const ExtraICState extra_ic_state_;
435  MacroAssembler masm_;
436  Failure* failure_;
437 };
438 
439 
441 
442 
443 class BaseLoadStoreStubCompiler: public StubCompiler {
444  public:
447  ExtraICState extra_ic_state = kNoExtraICState,
448  InlineCacheHolderFlag cache_holder = OWN_MAP)
449  : StubCompiler(isolate, extra_ic_state),
450  kind_(kind),
451  cache_holder_(cache_holder) {
453  }
455 
457  Handle<Code> handler,
459 
461  CodeHandleList* handlers,
463  Code::StubType type,
465 
467  switch (kind) {
468  case Code::LOAD_IC: return Builtins::kLoadIC_Miss;
469  case Code::STORE_IC: return Builtins::kStoreIC_Miss;
470  case Code::KEYED_LOAD_IC: return Builtins::kKeyedLoadIC_Miss;
471  case Code::KEYED_STORE_IC: return Builtins::kKeyedStoreIC_Miss;
472  default: UNREACHABLE();
473  }
474  return Builtins::kLoadIC_Miss;
475  }
476 
477  protected:
479  Register object_reg,
480  Handle<JSObject> holder,
482  Label* miss) = 0;
483 
484  virtual void HandlerFrontendFooter(Handle<Name> name, Label* miss) = 0;
485 
487  Register object_reg,
488  Handle<JSObject> holder,
490 
492  Code::StubType type,
494 
496  Code::StubType type,
498  InlineCacheState state = MONOMORPHIC);
499  Code::Kind kind() { return kind_; }
500 
502  if (!code->is_inline_cache_stub()) return Logger::STUB_TAG;
503  if (kind_ == Code::LOAD_IC) {
504  return code->ic_state() == MONOMORPHIC
505  ? Logger::LOAD_IC_TAG : Logger::LOAD_POLYMORPHIC_IC_TAG;
506  } else if (kind_ == Code::KEYED_LOAD_IC) {
507  return code->ic_state() == MONOMORPHIC
508  ? Logger::KEYED_LOAD_IC_TAG : Logger::KEYED_LOAD_POLYMORPHIC_IC_TAG;
509  } else if (kind_ == Code::STORE_IC) {
510  return code->ic_state() == MONOMORPHIC
511  ? Logger::STORE_IC_TAG : Logger::STORE_POLYMORPHIC_IC_TAG;
512  } else {
513  return code->ic_state() == MONOMORPHIC
514  ? Logger::KEYED_STORE_IC_TAG : Logger::KEYED_STORE_POLYMORPHIC_IC_TAG;
515  }
516  }
518 
519  Register receiver() { return registers_[0]; }
520  Register name() { return registers_[1]; }
521  Register scratch1() { return registers_[2]; }
522  Register scratch2() { return registers_[3]; }
523  Register scratch3() { return registers_[4]; }
524 
525  void InitializeRegisters();
526 
527  bool IncludesNumberType(TypeHandleList* types);
528 
532 };
533 
534 
536  public:
538  ExtraICState extra_ic_state = kNoExtraICState,
539  InlineCacheHolderFlag cache_holder = OWN_MAP,
540  Code::Kind kind = Code::LOAD_IC)
541  : BaseLoadStoreStubCompiler(isolate, kind, extra_ic_state,
542  cache_holder) { }
543  virtual ~LoadStubCompiler() { }
544 
546  Handle<JSObject> holder,
548  PropertyIndex index,
549  Representation representation);
550 
552  Handle<JSObject> holder,
555 
557  Handle<JSObject> holder,
559  const CallOptimization& call_optimization);
560 
562  Handle<JSObject> holder,
564  Handle<Object> value);
565 
567  Handle<JSObject> holder,
569 
571  Handle<JSObject> holder,
573  Handle<JSFunction> getter);
574 
575  static void GenerateLoadViaGetter(MacroAssembler* masm,
576  Handle<HeapType> type,
578  Handle<JSFunction> getter);
579 
583  }
584 
586  Handle<JSObject> last,
588 
590  Handle<GlobalObject> holder,
593  bool is_dont_delete);
594 
595  protected:
597  return LoadIC::GetContextualMode(extra_state());
598  }
599 
601  Register object_reg,
602  Handle<JSObject> holder,
604  Label* miss);
605 
606  virtual void HandlerFrontendFooter(Handle<Name> name, Label* miss);
607 
609  Register object_reg,
610  Handle<JSObject> holder,
612  Handle<Object> callback);
614  Handle<JSObject> last,
616 
617  void GenerateLoadField(Register reg,
618  Handle<JSObject> holder,
619  PropertyIndex field,
620  Representation representation);
624  void GenerateLoadCallback(const CallOptimization& call_optimization,
625  Handle<Map> receiver_map);
626  void GenerateLoadInterceptor(Register holder_reg,
627  Handle<Object> object,
628  Handle<JSObject> holder,
629  LookupResult* lookup,
632  Handle<JSObject> interceptor_holder,
634  LookupResult* lookup);
635 
636  private:
637  static Register* registers();
638  Register scratch4() { return registers_[5]; }
640 };
641 
642 
644  public:
646  ExtraICState extra_ic_state = kNoExtraICState,
647  InlineCacheHolderFlag cache_holder = OWN_MAP)
648  : LoadStubCompiler(isolate, extra_ic_state, cache_holder,
649  Code::KEYED_LOAD_IC) { }
650 
652 
653  void CompileElementHandlers(MapHandleList* receiver_maps,
654  CodeHandleList* handlers);
655 
657 
658  private:
659  static Register* registers();
661 };
662 
663 
665  public:
667  ExtraICState extra_ic_state,
668  Code::Kind kind = Code::STORE_IC)
669  : BaseLoadStoreStubCompiler(isolate, kind, extra_ic_state) {}
670 
671  virtual ~StoreStubCompiler() { }
672 
674  LookupResult* lookup,
675  Handle<Map> transition,
677 
679  LookupResult* lookup,
681 
683  LookupResult* lookup,
685 
687 
689  Handle<JSObject> holder,
690  Register holder_reg,
692  Label* miss);
693 
695  Handle<JSObject> object,
696  LookupResult* lookup,
697  Handle<Map> transition,
699  Register receiver_reg,
700  Register name_reg,
701  Register value_reg,
705  Label* miss_label,
706  Label* slow);
707 
709  Handle<JSObject> object,
710  LookupResult* lookup,
711  Register receiver_reg,
712  Register name_reg,
713  Register value_reg,
716  Label* miss_label);
717 
719  Handle<JSObject> holder,
722 
724  Handle<JSObject> holder,
726  const CallOptimization& call_optimization);
727 
728  static void GenerateStoreViaSetter(MacroAssembler* masm,
729  Handle<HeapType> type,
731  Handle<JSFunction> setter);
732 
736  }
737 
739  Handle<JSObject> holder,
741  Handle<JSFunction> setter);
742 
745 
747  switch (kind) {
748  case Code::STORE_IC: return Builtins::kStoreIC_Slow;
749  case Code::KEYED_STORE_IC: return Builtins::kKeyedStoreIC_Slow;
750  default: UNREACHABLE();
751  }
752  return Builtins::kStoreIC_Slow;
753  }
754 
755  protected:
757  Register object_reg,
758  Handle<JSObject> holder,
760  Label* miss);
761 
762  virtual void HandlerFrontendFooter(Handle<Name> name, Label* miss);
764  Label* label,
766 
767  private:
768  static Register* registers();
769  static Register value();
771 };
772 
773 
775  public:
777  ExtraICState extra_ic_state)
778  : StoreStubCompiler(isolate, extra_ic_state, Code::KEYED_STORE_IC) {}
779 
781 
783  CodeHandleList* handler_stubs,
784  MapHandleList* transitioned_maps);
785 
787 
789 
790  private:
791  static Register* registers();
792 
793  KeyedAccessStoreMode store_mode() {
794  return KeyedStoreIC::GetKeyedAccessStoreMode(extra_state());
795  }
796 
797  Register transition_map() { return scratch1(); }
798 
800 };
801 
802 
803 // Holds information about possible function call optimizations.
804 class CallOptimization BASE_EMBEDDED {
805  public:
806  explicit CallOptimization(LookupResult* lookup);
807 
808  explicit CallOptimization(Handle<JSFunction> function);
809 
810  bool is_constant_call() const {
811  return !constant_function_.is_null();
812  }
813 
815  ASSERT(is_constant_call());
816  return constant_function_;
817  }
818 
819  bool is_simple_api_call() const {
820  return is_simple_api_call_;
821  }
822 
824  ASSERT(is_simple_api_call());
825  return expected_receiver_type_;
826  }
827 
829  ASSERT(is_simple_api_call());
830  return api_call_info_;
831  }
832 
836  kHolderFound
837  };
838  Handle<JSObject> LookupHolderOfExpectedType(
839  Handle<Map> receiver_map,
840  HolderLookup* holder_lookup) const;
841 
842  // Check if the api holder is between the receiver and the holder.
843  bool IsCompatibleReceiver(Handle<Object> receiver,
844  Handle<JSObject> holder) const;
845 
846  private:
847  void Initialize(Handle<JSFunction> function);
848 
849  // Determines whether the given function can be called using the
850  // fast api call builtin.
851  void AnalyzePossibleApiFunction(Handle<JSFunction> function);
852 
853  Handle<JSFunction> constant_function_;
854  bool is_simple_api_call_;
855  Handle<FunctionTemplateInfo> expected_receiver_type_;
856  Handle<CallHandlerInfo> api_call_info_;
857 };
858 
859 
860 } } // namespace v8::internal
861 
862 #endif // V8_STUB_CACHE_H_
byte * Address
Definition: globals.h:186
virtual void HandlerFrontendFooter(Handle< Name > name, Label *miss)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
Handle< Code > ComputeLoadNonexistent(Handle< Name > name, Handle< HeapType > type)
Definition: stub-cache.cc:171
Handle< Code > CompileLoadConstant(Handle< HeapType > type, Handle< JSObject > holder, Handle< Name > name, Handle< Object > value)
Definition: stub-cache.cc:922
void GenerateRestoreName(MacroAssembler *masm, Label *label, Handle< Name > name)
virtual Register HandlerFrontendHeader(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name, Label *miss)
Definition: stub-cache.cc:831
BaseLoadStoreStubCompiler(Isolate *isolate, Code::Kind kind, ExtraICState extra_ic_state=kNoExtraICState, InlineCacheHolderFlag cache_holder=OWN_MAP)
Definition: stub-cache.h:445
bool is_simple_api_call() const
Definition: stub-cache.h:819
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
void GenerateProbe(MacroAssembler *masm, Code::Flags flags, Register receiver, Register name, Register scratch, Register extra, Register extra2=no_reg, Register extra3=no_reg)
Handle< Code > CompileMonomorphicIC(Handle< HeapType > type, Handle< Code > handler, Handle< Name > name)
Definition: stub-cache.cc:1019
Handle< Code > CompileStoreInterceptor(Handle< JSObject > object, Handle< Name > name)
void set_failure(Failure *failure)
Definition: stub-cache.h:420
virtual Register HandlerFrontendHeader(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name, Label *miss)=0
static ContextualMode GetContextualMode(ExtraICState state)
Definition: ic.h:314
void CollectMatchingMaps(SmallMapList *types, Handle< Name > name, Code::Flags flags, Handle< Context > native_context, Zone *zone)
Definition: stub-cache.cc:436
void GenerateLoadField(Register reg, Handle< JSObject > holder, PropertyIndex field, Representation representation)
bool is_constant_call() const
Definition: stub-cache.h:810
KeyedAccessStoreMode
Definition: objects.h:164
static const int kInterceptorArgsLength
Definition: stub-cache.h:207
static Builtins::Name SlowBuiltin(Code::Kind kind)
Definition: stub-cache.h:746
static const int kInterceptorArgsNameIndex
Definition: stub-cache.h:203
uint32_t Flags
Definition: objects.h:5184
SCTableReference map_reference(StubCache::Table table)
Definition: stub-cache.h:174
#define ASSERT(condition)
Definition: checks.h:329
Handle< Code > ComputeKeyedLoadElement(Handle< Map > receiver_map)
Definition: stub-cache.cc:207
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< JSObject > holder, Handle< Name > name, Handle< ExecutableAccessorInfo > callback)
virtual Register HandlerFrontendHeader(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name, Label *miss)
Definition: stub-cache.cc:790
Factory * factory()
Definition: isolate.h:995
virtual void HandlerFrontendFooter(Handle< Name > name, Label *miss)
friend class SCTableReference
Definition: stub-cache.h:281
Handle< Code > CompileLoadInterceptor(Handle< HeapType > type, Handle< JSObject > holder, Handle< Name > name)
Definition: stub-cache.cc:966
Code * value
Definition: stub-cache.h:74
MacroAssembler * masm()
Definition: stub-cache.h:419
#define UNREACHABLE()
Definition: checks.h:52
void GenerateLoadPostInterceptor(Register reg, Handle< JSObject > interceptor_holder, Handle< Name > name, LookupResult *lookup)
Definition: stub-cache.cc:983
StoreStubCompiler(Isolate *isolate, ExtraICState extra_ic_state, Code::Kind kind=Code::STORE_IC)
Definition: stub-cache.h:666
InlineCacheHolderFlag cache_holder_
Definition: stub-cache.h:530
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Definition: flags.cc:665
Handle< Code > ComputeMonomorphicIC(Code::Kind kind, Handle< Name > name, Handle< HeapType > type, Handle< Code > handler, ExtraICState extra_ic_state)
Definition: stub-cache.cc:129
Handle< JSObject > StubHolder(Handle< JSObject > receiver, Handle< JSObject > holder)
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > setter)
virtual void HandlerFrontendFooter(Handle< Name > name, Label *miss)=0
KeyedLoadStubCompiler(Isolate *isolate, ExtraICState extra_ic_state=kNoExtraICState, InlineCacheHolderFlag cache_holder=OWN_MAP)
Definition: stub-cache.h:645
Handle< Code > ComputePolymorphicIC(Code::Kind kind, TypeHandleList *types, CodeHandleList *handlers, int number_of_valid_maps, Handle< Name > name, ExtraICState extra_ic_state)
Definition: stub-cache.cc:374
ExtraICState extra_state()
Definition: stub-cache.h:417
Handle< Code > ComputeStore(InlineCacheState ic_state, ExtraICState extra_state)
Definition: stub-cache.cc:300
Definition: stub-cache.h:72
StubCache::Entry * first_entry(StubCache::Table table)
Definition: stub-cache.h:186
Map * map
Definition: stub-cache.h:75
void check(i::Vector< const uint8_t > string)
static KeyedAccessStoreMode GetKeyedAccessStoreMode(ExtraICState extra_state)
Definition: ic.h:597
Handle< Code > CompilePolymorphicIC(TypeHandleList *types, CodeHandleList *handlers, Handle< Name > name, Code::StubType type, IcCheckType check)
Handle< Code > CompileStoreTransition(Handle< JSObject > object, LookupResult *lookup, Handle< Map > transition, Handle< Name > name)
Definition: stub-cache.cc:1045
void GenerateLoadConstant(Handle< Object > value)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
ContextualMode contextual_mode()
Definition: stub-cache.h:596
static const int kInterceptorArgsThisIndex
Definition: stub-cache.h:205
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition: globals.h:359
Handle< Code > CompileStoreElement(Handle< Map > receiver_map)
Definition: stub-cache.cc:1203
Handle< Code > CompileStoreArrayLength(Handle< JSObject > object, LookupResult *lookup, Handle< Name > name)
Definition: stub-cache.cc:1126
static void GenerateLoadViaGetterForDeopt(MacroAssembler *masm)
Definition: stub-cache.h:580
Handle< Code > ComputeKeyedStoreElement(Handle< Map > receiver_map, StrictMode strict_mode, KeyedAccessStoreMode store_mode)
Definition: stub-cache.cc:223
void GenerateNegativeHolderLookup(MacroAssembler *masm, Handle< JSObject > holder, Register holder_reg, Handle< Name > name, Label *miss)
const Register r0
Handle< Code > CompileLoadNonexistent(Handle< HeapType > type, Handle< JSObject > last, Handle< Name > name)
Handle< Code > CompileLoadGlobal(Handle< HeapType > type, Handle< GlobalObject > holder, Handle< PropertyCell > cell, Handle< Name > name, bool is_dont_delete)
Handle< Code > FindIC(Handle< Name > name, Handle< Map > stub_holder_map, Code::Kind kind, ExtraICState extra_state=kNoExtraICState, InlineCacheHolderFlag cache_holder=OWN_MAP)
Definition: stub-cache.cc:103
#define BASE_EMBEDDED
Definition: allocation.h:68
StubCompiler(Isolate *isolate, ExtraICState extra_ic_state=kNoExtraICState)
Definition: stub-cache.h:309
static void GenerateLoadDictionaryElement(MacroAssembler *masm)
Handle< Code > CompileStoreViaSetter(Handle< JSObject > object, Handle< JSObject > holder, Handle< Name > name, Handle< JSFunction > setter)
Definition: stub-cache.cc:1150
void GenerateLoadInterceptor(Register holder_reg, Handle< Object > object, Handle< JSObject > holder, LookupResult *lookup, Handle< Name > name)
void CompileElementHandlers(MapHandleList *receiver_maps, CodeHandleList *handlers)
Definition: stub-cache.cc:1293
Handle< Code > ComputeLoad(InlineCacheState ic_state, ExtraICState extra_state)
Definition: stub-cache.cc:276
Handle< Code > CompileLoadField(Handle< HeapType > type, Handle< JSObject > holder, Handle< Name > name, PropertyIndex index, Representation representation)
Definition: stub-cache.cc:908
static const int kInterceptorArgsInfoIndex
Definition: stub-cache.h:204
Handle< FunctionTemplateInfo > expected_receiver_type() const
Definition: stub-cache.h:823
SCTableReference key_reference(StubCache::Table table)
Definition: stub-cache.h:168
void GenerateLoadCallback(Register reg, Handle< ExecutableAccessorInfo > callback)
static Builtins::Name MissBuiltin(Code::Kind kind)
Definition: stub-cache.h:466
Handle< CallHandlerInfo > api_call_info() const
Definition: stub-cache.h:828
const Register r1
Register CallbackHandlerFrontend(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name, Handle< Object > callback)
KeyedStoreStubCompiler(Isolate *isolate, ExtraICState extra_ic_state)
Definition: stub-cache.h:776
void GenerateStoreField(MacroAssembler *masm, Handle< JSObject > object, LookupResult *lookup, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Label *miss_label)
Handle< Code > ComputeCompareNil(Handle< Map > receiver_map, CompareNilICStub &stub)
Definition: stub-cache.cc:327
friend class Isolate
Definition: stub-cache.h:280
DECLARE_RUNTIME_FUNCTION(MaybeObject *, KeyedLoadIC_MissFromStubFailure)
Handle< Code > CompileLoadViaGetter(Handle< HeapType > type, Handle< JSObject > holder, Handle< Name > name, Handle< JSFunction > getter)
Definition: stub-cache.cc:1032
Handle< Code > ComputeStoreElementPolymorphic(MapHandleList *receiver_maps, KeyedAccessStoreMode store_mode, StrictMode strict_mode)
Definition: stub-cache.cc:397
void NonexistentHandlerFrontend(Handle< HeapType > type, Handle< JSObject > last, Handle< Name > name)
Definition: stub-cache.cc:864
const int kHeapObjectTagSize
Definition: v8.h:5474
SCTableReference value_reference(StubCache::Table table)
Definition: stub-cache.h:180
int ExtraICState
Definition: objects.h:310
Code * Set(Name *name, Map *map, Code *code)
Definition: stub-cache.cc:59
static const int kHashShift
Definition: objects.h:8642
static const int kFlagsNotUsedInLookup
Definition: objects.h:5684
const Register no_reg
Handle< Code > GetCode(Code::Kind kind, Code::StubType type, Handle< Name > name)
Definition: stub-cache.cc:1281
void GenerateStoreTransition(MacroAssembler *masm, Handle< JSObject > object, LookupResult *lookup, Handle< Map > transition, Handle< Name > name, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Register scratch3, Label *miss_label, Label *slow)
uint32_t hash_field()
Definition: objects-inl.h:2941
Register HandlerFrontend(Handle< HeapType > type, Register object_reg, Handle< JSObject > holder, Handle< Name > name)
Definition: stub-cache.cc:850
Handle< Code > CompileStoreElementPolymorphic(MapHandleList *receiver_maps)
Definition: stub-cache.cc:1326
Logger::LogEventsAndTags log_kind(Handle< Code > code)
Definition: stub-cache.h:501
Handle< Code > CompileStorePolymorphic(MapHandleList *receiver_maps, CodeHandleList *handler_stubs, MapHandleList *transitioned_maps)
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > getter)
static const int kInterceptorArgsHolderIndex
Definition: stub-cache.h:206
static void GenerateStoreViaSetterForDeopt(MacroAssembler *masm)
Definition: stub-cache.h:733
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
Handle< JSFunction > constant_function() const
Definition: stub-cache.h:814
void JitEvent(Handle< Name > name, Handle< Code > code)
Definition: stub-cache.cc:1239
Name * key
Definition: stub-cache.h:73
Handle< Code > GetICCode(Code::Kind kind, Code::StubType type, Handle< Name > name, InlineCacheState state=MONOMORPHIC)
Definition: stub-cache.cc:1269
Handle< Code > CompileLoadCallback(Handle< HeapType > type, Handle< JSObject > holder, Handle< Name > name, Handle< ExecutableAccessorInfo > callback)
Definition: stub-cache.cc:935
Handle< Code > CompileLoadElement(Handle< Map > receiver_map)
Definition: stub-cache.cc:1179
Code * FindPreMonomorphicIC(Code::Kind kind, ExtraICState extra_ic_state)
Definition: stub-cache.cc:263
Handle< Code > CompileStoreField(Handle< JSObject > object, LookupResult *lookup, Handle< Name > name)
Definition: stub-cache.cc:1102
LoadStubCompiler(Isolate *isolate, ExtraICState extra_ic_state=kNoExtraICState, InlineCacheHolderFlag cache_holder=OWN_MAP, Code::Kind kind=Code::LOAD_IC)
Definition: stub-cache.h:537
Address address() const
Definition: stub-cache.h:59
Handle< Code > FindHandler(Handle< Name > name, Handle< Map > map, Code::Kind kind, InlineCacheHolderFlag cache_holder, Code::StubType type)
Definition: stub-cache.cc:116
Handle< Code > ComputeLoadElementPolymorphic(MapHandleList *receiver_maps)
Definition: stub-cache.cc:349
static void GenerateStoreDictionaryElement(MacroAssembler *masm)
Definition: stub-cache.cc:1380
bool IncludesNumberType(TypeHandleList *types)
Definition: stub-cache.cc:842