v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
stub-cache.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "api.h"
31 #include "arguments.h"
32 #include "ast.h"
33 #include "code-stubs.h"
34 #include "cpu-profiler.h"
35 #include "gdb-jit.h"
36 #include "ic-inl.h"
37 #include "stub-cache.h"
38 #include "type-info.h"
39 #include "vm-state-inl.h"
40 
41 namespace v8 {
42 namespace internal {
43 
44 // -----------------------------------------------------------------------
45 // StubCache implementation.
46 
47 
48 StubCache::StubCache(Isolate* isolate)
49  : isolate_(isolate) { }
50 
51 
52 void StubCache::Initialize() {
53  ASSERT(IsPowerOf2(kPrimaryTableSize));
54  ASSERT(IsPowerOf2(kSecondaryTableSize));
55  Clear();
56 }
57 
58 
59 Code* StubCache::Set(Name* name, Map* map, Code* code) {
60  // Get the flags from the code.
61  Code::Flags flags = Code::RemoveTypeFromFlags(code->flags());
62 
63  // Validate that the name does not move on scavenge, and that we
64  // can use identity checks instead of structural equality checks.
65  ASSERT(!heap()->InNewSpace(name));
66  ASSERT(name->IsUniqueName());
67 
68  // The state bits are not important to the hash function because
69  // the stub cache only contains monomorphic stubs. Make sure that
70  // the bits are the least significant so they will be the ones
71  // masked out.
72  ASSERT(Code::ExtractICStateFromFlags(flags) == MONOMORPHIC);
73  STATIC_ASSERT((Code::ICStateField::kMask & 1) == 1);
74 
75  // Make sure that the code type is not included in the hash.
76  ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
77 
78  // Compute the primary entry.
79  int primary_offset = PrimaryOffset(name, flags, map);
80  Entry* primary = entry(primary_, primary_offset);
81  Code* old_code = primary->value;
82 
83  // If the primary entry has useful data in it, we retire it to the
84  // secondary cache before overwriting it.
85  if (old_code != isolate_->builtins()->builtin(Builtins::kIllegal)) {
86  Map* old_map = primary->map;
87  Code::Flags old_flags = Code::RemoveTypeFromFlags(old_code->flags());
88  int seed = PrimaryOffset(primary->key, old_flags, old_map);
89  int secondary_offset = SecondaryOffset(primary->key, old_flags, seed);
90  Entry* secondary = entry(secondary_, secondary_offset);
91  *secondary = *primary;
92  }
93 
94  // Update primary cache.
95  primary->key = name;
96  primary->value = code;
97  primary->map = map;
98  isolate()->counters()->megamorphic_stub_cache_updates()->Increment();
99  return code;
100 }
101 
102 
103 Handle<Code> StubCache::FindIC(Handle<Name> name,
104  Handle<Map> stub_holder,
105  Code::Kind kind,
106  ExtraICState extra_state,
107  InlineCacheHolderFlag cache_holder) {
108  Code::Flags flags = Code::ComputeMonomorphicFlags(
109  kind, extra_state, cache_holder);
110  Handle<Object> probe(stub_holder->FindInCodeCache(*name, flags), isolate_);
111  if (probe->IsCode()) return Handle<Code>::cast(probe);
112  return Handle<Code>::null();
113 }
114 
115 
116 Handle<Code> StubCache::FindHandler(Handle<Name> name,
117  Handle<Map> stub_holder,
118  Code::Kind kind,
119  InlineCacheHolderFlag cache_holder,
120  Code::StubType type) {
121  Code::Flags flags = Code::ComputeHandlerFlags(kind, type, cache_holder);
122 
123  Handle<Object> probe(stub_holder->FindInCodeCache(*name, flags), isolate_);
124  if (probe->IsCode()) return Handle<Code>::cast(probe);
125  return Handle<Code>::null();
126 }
127 
128 
129 Handle<Code> StubCache::ComputeMonomorphicIC(
130  Code::Kind kind,
132  Handle<HeapType> type,
133  Handle<Code> handler,
134  ExtraICState extra_ic_state) {
135  InlineCacheHolderFlag flag = IC::GetCodeCacheFlag(*type);
136 
137  Handle<Map> stub_holder;
138  Handle<Code> ic;
139  // There are multiple string maps that all use the same prototype. That
140  // prototype cannot hold multiple handlers, one for each of the string maps,
141  // for a single name. Hence, turn off caching of the IC.
142  bool can_be_cached = !type->Is(HeapType::String());
143  if (can_be_cached) {
144  stub_holder = IC::GetCodeCacheHolder(flag, *type, isolate());
145  ic = FindIC(name, stub_holder, kind, extra_ic_state, flag);
146  if (!ic.is_null()) return ic;
147  }
148 
149  if (kind == Code::LOAD_IC) {
150  LoadStubCompiler ic_compiler(isolate(), extra_ic_state, flag);
151  ic = ic_compiler.CompileMonomorphicIC(type, handler, name);
152  } else if (kind == Code::KEYED_LOAD_IC) {
153  KeyedLoadStubCompiler ic_compiler(isolate(), extra_ic_state, flag);
154  ic = ic_compiler.CompileMonomorphicIC(type, handler, name);
155  } else if (kind == Code::STORE_IC) {
156  StoreStubCompiler ic_compiler(isolate(), extra_ic_state);
157  ic = ic_compiler.CompileMonomorphicIC(type, handler, name);
158  } else {
159  ASSERT(kind == Code::KEYED_STORE_IC);
161  KeyedStoreIC::GetKeyedAccessStoreMode(extra_ic_state));
162  KeyedStoreStubCompiler ic_compiler(isolate(), extra_ic_state);
163  ic = ic_compiler.CompileMonomorphicIC(type, handler, name);
164  }
165 
166  if (can_be_cached) Map::UpdateCodeCache(stub_holder, name, ic);
167  return ic;
168 }
169 
170 
171 Handle<Code> StubCache::ComputeLoadNonexistent(Handle<Name> name,
172  Handle<HeapType> type) {
173  InlineCacheHolderFlag flag = IC::GetCodeCacheFlag(*type);
174  Handle<Map> stub_holder = IC::GetCodeCacheHolder(flag, *type, isolate());
175  // If no dictionary mode objects are present in the prototype chain, the load
176  // nonexistent IC stub can be shared for all names for a given map and we use
177  // the empty string for the map cache in that case. If there are dictionary
178  // mode objects involved, we need to do negative lookups in the stub and
179  // therefore the stub will be specific to the name.
180  Handle<Map> current_map = stub_holder;
181  Handle<Name> cache_name = current_map->is_dictionary_map()
182  ? name : Handle<Name>::cast(isolate()->factory()->nonexistent_symbol());
183  Handle<Object> next(current_map->prototype(), isolate());
185  while (!next->IsNull()) {
186  last = Handle<JSObject>::cast(next);
187  next = handle(current_map->prototype(), isolate());
188  current_map = handle(Handle<HeapObject>::cast(next)->map());
189  if (current_map->is_dictionary_map()) cache_name = name;
190  }
191 
192  // Compile the stub that is either shared for all names or
193  // name specific if there are global objects involved.
194  Handle<Code> handler = FindHandler(
195  cache_name, stub_holder, Code::LOAD_IC, flag, Code::FAST);
196  if (!handler.is_null()) {
197  return handler;
198  }
199 
200  LoadStubCompiler compiler(isolate_, kNoExtraICState, flag);
201  handler = compiler.CompileLoadNonexistent(type, last, cache_name);
202  Map::UpdateCodeCache(stub_holder, cache_name, handler);
203  return handler;
204 }
205 
206 
207 Handle<Code> StubCache::ComputeKeyedLoadElement(Handle<Map> receiver_map) {
208  Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC);
210  isolate()->factory()->KeyedLoadElementMonomorphic_string();
211 
212  Handle<Object> probe(receiver_map->FindInCodeCache(*name, flags), isolate_);
213  if (probe->IsCode()) return Handle<Code>::cast(probe);
214 
215  KeyedLoadStubCompiler compiler(isolate());
216  Handle<Code> code = compiler.CompileLoadElement(receiver_map);
217 
218  Map::UpdateCodeCache(receiver_map, name, code);
219  return code;
220 }
221 
222 
223 Handle<Code> StubCache::ComputeKeyedStoreElement(
224  Handle<Map> receiver_map,
225  StrictMode strict_mode,
226  KeyedAccessStoreMode store_mode) {
227  ExtraICState extra_state =
228  KeyedStoreIC::ComputeExtraICState(strict_mode, store_mode);
229  Code::Flags flags = Code::ComputeMonomorphicFlags(
230  Code::KEYED_STORE_IC, extra_state);
231 
232  ASSERT(store_mode == STANDARD_STORE ||
233  store_mode == STORE_AND_GROW_NO_TRANSITION ||
235  store_mode == STORE_NO_TRANSITION_HANDLE_COW);
236 
238  isolate()->factory()->KeyedStoreElementMonomorphic_string();
239  Handle<Object> probe(receiver_map->FindInCodeCache(*name, flags), isolate_);
240  if (probe->IsCode()) return Handle<Code>::cast(probe);
241 
242  KeyedStoreStubCompiler compiler(isolate(), extra_state);
243  Handle<Code> code = compiler.CompileStoreElement(receiver_map);
244 
245  Map::UpdateCodeCache(receiver_map, name, code);
246  ASSERT(KeyedStoreIC::GetKeyedAccessStoreMode(code->extra_ic_state())
247  == store_mode);
248  return code;
249 }
250 
251 
252 #define CALL_LOGGER_TAG(kind, type) (Logger::KEYED_##type)
253 
254 static void FillCache(Isolate* isolate, Handle<Code> code) {
255  Handle<UnseededNumberDictionary> dictionary =
256  UnseededNumberDictionary::Set(isolate->factory()->non_monomorphic_cache(),
257  code->flags(),
258  code);
259  isolate->heap()->public_set_non_monomorphic_cache(*dictionary);
260 }
261 
262 
263 Code* StubCache::FindPreMonomorphicIC(Code::Kind kind, ExtraICState state) {
264  Code::Flags flags = Code::ComputeFlags(kind, PREMONOMORPHIC, state);
265  UnseededNumberDictionary* dictionary =
266  isolate()->heap()->non_monomorphic_cache();
267  int entry = dictionary->FindEntry(isolate(), flags);
268  ASSERT(entry != -1);
269  Object* code = dictionary->ValueAt(entry);
270  // This might be called during the marking phase of the collector
271  // hence the unchecked cast.
272  return reinterpret_cast<Code*>(code);
273 }
274 
275 
276 Handle<Code> StubCache::ComputeLoad(InlineCacheState ic_state,
277  ExtraICState extra_state) {
278  Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, ic_state, extra_state);
280  isolate_->factory()->non_monomorphic_cache();
281  int entry = cache->FindEntry(isolate_, flags);
282  if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
283 
284  StubCompiler compiler(isolate_);
286  if (ic_state == UNINITIALIZED) {
287  code = compiler.CompileLoadInitialize(flags);
288  } else if (ic_state == PREMONOMORPHIC) {
289  code = compiler.CompileLoadPreMonomorphic(flags);
290  } else if (ic_state == MEGAMORPHIC) {
291  code = compiler.CompileLoadMegamorphic(flags);
292  } else {
293  UNREACHABLE();
294  }
295  FillCache(isolate_, code);
296  return code;
297 }
298 
299 
300 Handle<Code> StubCache::ComputeStore(InlineCacheState ic_state,
301  ExtraICState extra_state) {
302  Code::Flags flags = Code::ComputeFlags(Code::STORE_IC, ic_state, extra_state);
304  isolate_->factory()->non_monomorphic_cache();
305  int entry = cache->FindEntry(isolate_, flags);
306  if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
307 
308  StubCompiler compiler(isolate_);
310  if (ic_state == UNINITIALIZED) {
311  code = compiler.CompileStoreInitialize(flags);
312  } else if (ic_state == PREMONOMORPHIC) {
313  code = compiler.CompileStorePreMonomorphic(flags);
314  } else if (ic_state == GENERIC) {
315  code = compiler.CompileStoreGeneric(flags);
316  } else if (ic_state == MEGAMORPHIC) {
317  code = compiler.CompileStoreMegamorphic(flags);
318  } else {
319  UNREACHABLE();
320  }
321 
322  FillCache(isolate_, code);
323  return code;
324 }
325 
326 
327 Handle<Code> StubCache::ComputeCompareNil(Handle<Map> receiver_map,
328  CompareNilICStub& stub) {
329  Handle<String> name(isolate_->heap()->empty_string());
330  if (!receiver_map->is_shared()) {
331  Handle<Code> cached_ic = FindIC(name, receiver_map, Code::COMPARE_NIL_IC,
332  stub.GetExtraICState());
333  if (!cached_ic.is_null()) return cached_ic;
334  }
335 
337  pattern.Add(isolate_->factory()->meta_map(), receiver_map);
338  Handle<Code> ic = stub.GetCodeCopy(isolate_, pattern);
339 
340  if (!receiver_map->is_shared()) {
341  Map::UpdateCodeCache(receiver_map, name, ic);
342  }
343 
344  return ic;
345 }
346 
347 
348 // TODO(verwaest): Change this method so it takes in a TypeHandleList.
349 Handle<Code> StubCache::ComputeLoadElementPolymorphic(
350  MapHandleList* receiver_maps) {
351  Code::Flags flags = Code::ComputeFlags(Code::KEYED_LOAD_IC, POLYMORPHIC);
353  isolate_->factory()->polymorphic_code_cache();
354  Handle<Object> probe = cache->Lookup(receiver_maps, flags);
355  if (probe->IsCode()) return Handle<Code>::cast(probe);
356 
357  TypeHandleList types(receiver_maps->length());
358  for (int i = 0; i < receiver_maps->length(); i++) {
359  types.Add(HeapType::Class(receiver_maps->at(i), isolate()));
360  }
361  CodeHandleList handlers(receiver_maps->length());
362  KeyedLoadStubCompiler compiler(isolate_);
363  compiler.CompileElementHandlers(receiver_maps, &handlers);
364  Handle<Code> code = compiler.CompilePolymorphicIC(
365  &types, &handlers, factory()->empty_string(), Code::NORMAL, ELEMENT);
366 
367  isolate()->counters()->keyed_load_polymorphic_stubs()->Increment();
368 
369  PolymorphicCodeCache::Update(cache, receiver_maps, flags, code);
370  return code;
371 }
372 
373 
374 Handle<Code> StubCache::ComputePolymorphicIC(
375  Code::Kind kind,
376  TypeHandleList* types,
377  CodeHandleList* handlers,
378  int number_of_valid_types,
380  ExtraICState extra_ic_state) {
381  Handle<Code> handler = handlers->at(0);
382  Code::StubType type = number_of_valid_types == 1 ? handler->type()
383  : Code::NORMAL;
384  if (kind == Code::LOAD_IC) {
385  LoadStubCompiler ic_compiler(isolate_, extra_ic_state);
386  return ic_compiler.CompilePolymorphicIC(
387  types, handlers, name, type, PROPERTY);
388  } else {
389  ASSERT(kind == Code::STORE_IC);
390  StoreStubCompiler ic_compiler(isolate_, extra_ic_state);
391  return ic_compiler.CompilePolymorphicIC(
392  types, handlers, name, type, PROPERTY);
393  }
394 }
395 
396 
397 Handle<Code> StubCache::ComputeStoreElementPolymorphic(
398  MapHandleList* receiver_maps,
399  KeyedAccessStoreMode store_mode,
400  StrictMode strict_mode) {
401  ASSERT(store_mode == STANDARD_STORE ||
402  store_mode == STORE_AND_GROW_NO_TRANSITION ||
404  store_mode == STORE_NO_TRANSITION_HANDLE_COW);
406  isolate_->factory()->polymorphic_code_cache();
407  ExtraICState extra_state = KeyedStoreIC::ComputeExtraICState(
408  strict_mode, store_mode);
410  Code::ComputeFlags(Code::KEYED_STORE_IC, POLYMORPHIC, extra_state);
411  Handle<Object> probe = cache->Lookup(receiver_maps, flags);
412  if (probe->IsCode()) return Handle<Code>::cast(probe);
413 
414  KeyedStoreStubCompiler compiler(isolate_, extra_state);
415  Handle<Code> code = compiler.CompileStoreElementPolymorphic(receiver_maps);
416  PolymorphicCodeCache::Update(cache, receiver_maps, flags, code);
417  return code;
418 }
419 
420 
421 void StubCache::Clear() {
422  Code* empty = isolate_->builtins()->builtin(Builtins::kIllegal);
423  for (int i = 0; i < kPrimaryTableSize; i++) {
424  primary_[i].key = heap()->empty_string();
425  primary_[i].map = NULL;
426  primary_[i].value = empty;
427  }
428  for (int j = 0; j < kSecondaryTableSize; j++) {
429  secondary_[j].key = heap()->empty_string();
430  secondary_[j].map = NULL;
431  secondary_[j].value = empty;
432  }
433 }
434 
435 
436 void StubCache::CollectMatchingMaps(SmallMapList* types,
439  Handle<Context> native_context,
440  Zone* zone) {
441  for (int i = 0; i < kPrimaryTableSize; i++) {
442  if (primary_[i].key == *name) {
443  Map* map = primary_[i].map;
444  // Map can be NULL, if the stub is constant function call
445  // with a primitive receiver.
446  if (map == NULL) continue;
447 
448  int offset = PrimaryOffset(*name, flags, map);
449  if (entry(primary_, offset) == &primary_[i] &&
450  !TypeFeedbackOracle::CanRetainOtherContext(map, *native_context)) {
451  types->AddMapIfMissing(Handle<Map>(map), zone);
452  }
453  }
454  }
455 
456  for (int i = 0; i < kSecondaryTableSize; i++) {
457  if (secondary_[i].key == *name) {
458  Map* map = secondary_[i].map;
459  // Map can be NULL, if the stub is constant function call
460  // with a primitive receiver.
461  if (map == NULL) continue;
462 
463  // Lookup in primary table and skip duplicates.
464  int primary_offset = PrimaryOffset(*name, flags, map);
465 
466  // Lookup in secondary table and add matches.
467  int offset = SecondaryOffset(*name, flags, primary_offset);
468  if (entry(secondary_, offset) == &secondary_[i] &&
469  !TypeFeedbackOracle::CanRetainOtherContext(map, *native_context)) {
470  types->AddMapIfMissing(Handle<Map>(map), zone);
471  }
472  }
473  }
474 }
475 
476 
477 // ------------------------------------------------------------------------
478 // StubCompiler implementation.
479 
480 
481 RUNTIME_FUNCTION(MaybeObject*, StoreCallbackProperty) {
482  JSObject* receiver = JSObject::cast(args[0]);
483  JSObject* holder = JSObject::cast(args[1]);
484  ExecutableAccessorInfo* callback = ExecutableAccessorInfo::cast(args[2]);
485  Address setter_address = v8::ToCData<Address>(callback->setter());
487  FUNCTION_CAST<v8::AccessorSetterCallback>(setter_address);
488  ASSERT(fun != NULL);
489  ASSERT(callback->IsCompatibleReceiver(receiver));
490  Handle<Name> name = args.at<Name>(3);
491  Handle<Object> value = args.at<Object>(4);
492  HandleScope scope(isolate);
493 
494  // TODO(rossberg): Support symbols in the API.
495  if (name->IsSymbol()) return *value;
497 
498  LOG(isolate, ApiNamedPropertyAccess("store", receiver, *name));
500  custom_args(isolate, callback->data(), receiver, holder);
501  custom_args.Call(fun, v8::Utils::ToLocal(str), v8::Utils::ToLocal(value));
503  return *value;
504 }
505 
506 
514 RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorOnly) {
515  ASSERT(args.length() == StubCache::kInterceptorArgsLength);
516  Handle<Name> name_handle =
517  args.at<Name>(StubCache::kInterceptorArgsNameIndex);
518  Handle<InterceptorInfo> interceptor_info =
519  args.at<InterceptorInfo>(StubCache::kInterceptorArgsInfoIndex);
520 
521  // TODO(rossberg): Support symbols in the API.
522  if (name_handle->IsSymbol())
523  return isolate->heap()->no_interceptor_result_sentinel();
525 
526  Address getter_address = v8::ToCData<Address>(interceptor_info->getter());
528  FUNCTION_CAST<v8::NamedPropertyGetterCallback>(getter_address);
529  ASSERT(getter != NULL);
530 
531  Handle<JSObject> receiver =
532  args.at<JSObject>(StubCache::kInterceptorArgsThisIndex);
533  Handle<JSObject> holder =
534  args.at<JSObject>(StubCache::kInterceptorArgsHolderIndex);
535  PropertyCallbackArguments callback_args(
536  isolate, interceptor_info->data(), *receiver, *holder);
537  {
538  // Use the interceptor getter.
539  HandleScope scope(isolate);
541  callback_args.Call(getter, v8::Utils::ToLocal(name));
543  if (!r.IsEmpty()) {
545  result->VerifyApiCallResultType();
546  return *v8::Utils::OpenHandle(*r);
547  }
548  }
549 
550  return isolate->heap()->no_interceptor_result_sentinel();
551 }
552 
553 
554 static MaybeObject* ThrowReferenceError(Isolate* isolate, Name* name) {
555  // If the load is non-contextual, just return the undefined result.
556  // Note that both keyed and non-keyed loads may end up here.
557  HandleScope scope(isolate);
558  LoadIC ic(IC::NO_EXTRA_FRAME, isolate);
559  if (ic.contextual_mode() != CONTEXTUAL) {
560  return isolate->heap()->undefined_value();
561  }
562 
563  // Throw a reference error.
564  Handle<Name> name_handle(name);
565  Handle<Object> error =
566  isolate->factory()->NewReferenceError("not_defined",
567  HandleVector(&name_handle, 1));
568  return isolate->Throw(*error);
569 }
570 
571 
572 static Handle<Object> LoadWithInterceptor(Arguments* args,
573  PropertyAttributes* attrs) {
574  ASSERT(args->length() == StubCache::kInterceptorArgsLength);
575  Handle<Name> name_handle =
576  args->at<Name>(StubCache::kInterceptorArgsNameIndex);
577  Handle<InterceptorInfo> interceptor_info =
578  args->at<InterceptorInfo>(StubCache::kInterceptorArgsInfoIndex);
579  Handle<JSObject> receiver_handle =
580  args->at<JSObject>(StubCache::kInterceptorArgsThisIndex);
581  Handle<JSObject> holder_handle =
582  args->at<JSObject>(StubCache::kInterceptorArgsHolderIndex);
583 
584  Isolate* isolate = receiver_handle->GetIsolate();
585 
586  // TODO(rossberg): Support symbols in the API.
587  if (name_handle->IsSymbol()) {
588  return JSObject::GetPropertyPostInterceptor(
589  holder_handle, receiver_handle, name_handle, attrs);
590  }
591  Handle<String> name = Handle<String>::cast(name_handle);
592 
593  Address getter_address = v8::ToCData<Address>(interceptor_info->getter());
595  FUNCTION_CAST<v8::NamedPropertyGetterCallback>(getter_address);
596  ASSERT(getter != NULL);
597 
598  PropertyCallbackArguments callback_args(isolate,
599  interceptor_info->data(),
600  *receiver_handle,
601  *holder_handle);
602  {
603  HandleScope scope(isolate);
604  // Use the interceptor getter.
606  callback_args.Call(getter, v8::Utils::ToLocal(name));
608  if (!r.IsEmpty()) {
609  *attrs = NONE;
610  Handle<Object> result = v8::Utils::OpenHandle(*r);
611  result->VerifyApiCallResultType();
612  return scope.CloseAndEscape(result);
613  }
614  }
615 
616  Handle<Object> result = JSObject::GetPropertyPostInterceptor(
617  holder_handle, receiver_handle, name_handle, attrs);
618  return result;
619 }
620 
621 
626 RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorForLoad) {
627  PropertyAttributes attr = NONE;
628  HandleScope scope(isolate);
629  Handle<Object> result = LoadWithInterceptor(&args, &attr);
630  RETURN_IF_EMPTY_HANDLE(isolate, result);
631 
632  // If the property is present, return it.
633  if (attr != ABSENT) return *result;
634  return ThrowReferenceError(isolate, Name::cast(args[0]));
635 }
636 
637 
638 RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorForCall) {
639  PropertyAttributes attr;
640  HandleScope scope(isolate);
641  Handle<Object> result = LoadWithInterceptor(&args, &attr);
642  RETURN_IF_EMPTY_HANDLE(isolate, result);
643  // This is call IC. In this case, we simply return the undefined result which
644  // will lead to an exception when trying to invoke the result as a
645  // function.
646  return *result;
647 }
648 
649 
650 RUNTIME_FUNCTION(MaybeObject*, StoreInterceptorProperty) {
651  HandleScope scope(isolate);
652  ASSERT(args.length() == 3);
653  StoreIC ic(IC::NO_EXTRA_FRAME, isolate);
654  Handle<JSObject> receiver = args.at<JSObject>(0);
655  Handle<Name> name = args.at<Name>(1);
656  Handle<Object> value = args.at<Object>(2);
657  ASSERT(receiver->HasNamedInterceptor());
658  PropertyAttributes attr = NONE;
659  Handle<Object> result = JSObject::SetPropertyWithInterceptor(
660  receiver, name, value, attr, ic.strict_mode());
661  RETURN_IF_EMPTY_HANDLE(isolate, result);
662  return *result;
663 }
664 
665 
666 RUNTIME_FUNCTION(MaybeObject*, KeyedLoadPropertyWithInterceptor) {
667  HandleScope scope(isolate);
668  Handle<JSObject> receiver = args.at<JSObject>(0);
669  ASSERT(args.smi_at(1) >= 0);
670  uint32_t index = args.smi_at(1);
671  Handle<Object> result =
672  JSObject::GetElementWithInterceptor(receiver, receiver, index);
673  RETURN_IF_EMPTY_HANDLE(isolate, result);
674  return *result;
675 }
676 
677 
678 Handle<Code> StubCompiler::CompileLoadInitialize(Code::Flags flags) {
679  LoadIC::GenerateInitialize(masm());
680  Handle<Code> code = GetCodeWithFlags(flags, "CompileLoadInitialize");
681  PROFILE(isolate(),
682  CodeCreateEvent(Logger::LOAD_INITIALIZE_TAG, *code, 0));
683  GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *code));
684  return code;
685 }
686 
687 
688 Handle<Code> StubCompiler::CompileLoadPreMonomorphic(Code::Flags flags) {
689  LoadIC::GeneratePreMonomorphic(masm());
690  Handle<Code> code = GetCodeWithFlags(flags, "CompileLoadPreMonomorphic");
691  PROFILE(isolate(),
692  CodeCreateEvent(Logger::LOAD_PREMONOMORPHIC_TAG, *code, 0));
693  GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *code));
694  return code;
695 }
696 
697 
698 Handle<Code> StubCompiler::CompileLoadMegamorphic(Code::Flags flags) {
699  LoadIC::GenerateMegamorphic(masm());
700  Handle<Code> code = GetCodeWithFlags(flags, "CompileLoadMegamorphic");
701  PROFILE(isolate(),
702  CodeCreateEvent(Logger::LOAD_MEGAMORPHIC_TAG, *code, 0));
703  GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *code));
704  return code;
705 }
706 
707 
708 Handle<Code> StubCompiler::CompileStoreInitialize(Code::Flags flags) {
709  StoreIC::GenerateInitialize(masm());
710  Handle<Code> code = GetCodeWithFlags(flags, "CompileStoreInitialize");
711  PROFILE(isolate(),
712  CodeCreateEvent(Logger::STORE_INITIALIZE_TAG, *code, 0));
713  GDBJIT(AddCode(GDBJITInterface::STORE_IC, *code));
714  return code;
715 }
716 
717 
718 Handle<Code> StubCompiler::CompileStorePreMonomorphic(Code::Flags flags) {
719  StoreIC::GeneratePreMonomorphic(masm());
720  Handle<Code> code = GetCodeWithFlags(flags, "CompileStorePreMonomorphic");
721  PROFILE(isolate(),
722  CodeCreateEvent(Logger::STORE_PREMONOMORPHIC_TAG, *code, 0));
723  GDBJIT(AddCode(GDBJITInterface::STORE_IC, *code));
724  return code;
725 }
726 
727 
728 Handle<Code> StubCompiler::CompileStoreGeneric(Code::Flags flags) {
729  ExtraICState extra_state = Code::ExtractExtraICStateFromFlags(flags);
730  StrictMode strict_mode = StoreIC::GetStrictMode(extra_state);
731  StoreIC::GenerateRuntimeSetProperty(masm(), strict_mode);
732  Handle<Code> code = GetCodeWithFlags(flags, "CompileStoreGeneric");
733  PROFILE(isolate(),
734  CodeCreateEvent(Logger::STORE_GENERIC_TAG, *code, 0));
735  GDBJIT(AddCode(GDBJITInterface::STORE_IC, *code));
736  return code;
737 }
738 
739 
740 Handle<Code> StubCompiler::CompileStoreMegamorphic(Code::Flags flags) {
741  StoreIC::GenerateMegamorphic(masm());
742  Handle<Code> code = GetCodeWithFlags(flags, "CompileStoreMegamorphic");
743  PROFILE(isolate(),
744  CodeCreateEvent(Logger::STORE_MEGAMORPHIC_TAG, *code, 0));
745  GDBJIT(AddCode(GDBJITInterface::STORE_IC, *code));
746  return code;
747 }
748 
749 
750 #undef CALL_LOGGER_TAG
751 
752 
753 Handle<Code> StubCompiler::GetCodeWithFlags(Code::Flags flags,
754  const char* name) {
755  // Create code object in the heap.
756  CodeDesc desc;
757  masm_.GetCode(&desc);
758  Handle<Code> code = factory()->NewCode(desc, flags, masm_.CodeObject());
759  if (code->has_major_key()) {
760  code->set_major_key(CodeStub::NoCache);
761  }
762 #ifdef ENABLE_DISASSEMBLER
763  if (FLAG_print_code_stubs) code->Disassemble(name);
764 #endif
765  return code;
766 }
767 
768 
769 Handle<Code> StubCompiler::GetCodeWithFlags(Code::Flags flags,
770  Handle<Name> name) {
771  return (FLAG_print_code_stubs && !name.is_null() && name->IsString())
772  ? GetCodeWithFlags(flags, Handle<String>::cast(name)->ToCString().get())
773  : GetCodeWithFlags(flags, NULL);
774 }
775 
776 
777 void StubCompiler::LookupPostInterceptor(Handle<JSObject> holder,
778  Handle<Name> name,
779  LookupResult* lookup) {
780  holder->LocalLookupRealNamedProperty(*name, lookup);
781  if (lookup->IsFound()) return;
782  if (holder->GetPrototype()->IsNull()) return;
783  holder->GetPrototype()->Lookup(*name, lookup);
784 }
785 
786 
787 #define __ ACCESS_MASM(masm())
788 
789 
790 Register LoadStubCompiler::HandlerFrontendHeader(
791  Handle<HeapType> type,
792  Register object_reg,
793  Handle<JSObject> holder,
794  Handle<Name> name,
795  Label* miss) {
796  PrototypeCheckType check_type = CHECK_ALL_MAPS;
797  int function_index = -1;
798  if (type->Is(HeapType::String())) {
799  function_index = Context::STRING_FUNCTION_INDEX;
800  } else if (type->Is(HeapType::Symbol())) {
801  function_index = Context::SYMBOL_FUNCTION_INDEX;
802  } else if (type->Is(HeapType::Number())) {
803  function_index = Context::NUMBER_FUNCTION_INDEX;
804  } else if (type->Is(HeapType::Boolean())) {
805  // Booleans use the generic oddball map, so an additional check is needed to
806  // ensure the receiver is really a boolean.
807  GenerateBooleanCheck(object_reg, miss);
808  function_index = Context::BOOLEAN_FUNCTION_INDEX;
809  } else {
810  check_type = SKIP_RECEIVER;
811  }
812 
813  if (check_type == CHECK_ALL_MAPS) {
814  GenerateDirectLoadGlobalFunctionPrototype(
815  masm(), function_index, scratch1(), miss);
816  Object* function = isolate()->native_context()->get(function_index);
817  Object* prototype = JSFunction::cast(function)->instance_prototype();
818  type = IC::CurrentTypeOf(handle(prototype, isolate()), isolate());
819  object_reg = scratch1();
820  }
821 
822  // Check that the maps starting from the prototype haven't changed.
823  return CheckPrototypes(
824  type, object_reg, holder, scratch1(), scratch2(), scratch3(),
825  name, miss, check_type);
826 }
827 
828 
829 // HandlerFrontend for store uses the name register. It has to be restored
830 // before a miss.
831 Register StoreStubCompiler::HandlerFrontendHeader(
832  Handle<HeapType> type,
833  Register object_reg,
834  Handle<JSObject> holder,
835  Handle<Name> name,
836  Label* miss) {
837  return CheckPrototypes(type, object_reg, holder, this->name(),
838  scratch1(), scratch2(), name, miss, SKIP_RECEIVER);
839 }
840 
841 
842 bool BaseLoadStoreStubCompiler::IncludesNumberType(TypeHandleList* types) {
843  for (int i = 0; i < types->length(); ++i) {
844  if (types->at(i)->Is(HeapType::Number())) return true;
845  }
846  return false;
847 }
848 
849 
850 Register BaseLoadStoreStubCompiler::HandlerFrontend(Handle<HeapType> type,
851  Register object_reg,
852  Handle<JSObject> holder,
853  Handle<Name> name) {
854  Label miss;
855 
856  Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
857 
858  HandlerFrontendFooter(name, &miss);
859 
860  return reg;
861 }
862 
863 
864 void LoadStubCompiler::NonexistentHandlerFrontend(Handle<HeapType> type,
865  Handle<JSObject> last,
866  Handle<Name> name) {
867  Label miss;
868 
869  Register holder;
870  Handle<Map> last_map;
871  if (last.is_null()) {
872  holder = receiver();
873  last_map = IC::TypeToMap(*type, isolate());
874  // If |type| has null as its prototype, |last| is Handle<JSObject>::null().
875  ASSERT(last_map->prototype() == isolate()->heap()->null_value());
876  } else {
877  holder = HandlerFrontendHeader(type, receiver(), last, name, &miss);
878  last_map = handle(last->map());
879  }
880 
881  if (last_map->is_dictionary_map() &&
882  !last_map->IsJSGlobalObjectMap() &&
883  !last_map->IsJSGlobalProxyMap()) {
884  if (!name->IsUniqueName()) {
885  ASSERT(name->IsString());
886  name = factory()->InternalizeString(Handle<String>::cast(name));
887  }
888  ASSERT(last.is_null() ||
889  last->property_dictionary()->FindEntry(*name) ==
890  NameDictionary::kNotFound);
891  GenerateDictionaryNegativeLookup(masm(), &miss, holder, name,
892  scratch2(), scratch3());
893  }
894 
895  // If the last object in the prototype chain is a global object,
896  // check that the global property cell is empty.
897  if (last_map->IsJSGlobalObjectMap()) {
898  Handle<JSGlobalObject> global = last.is_null()
899  ? Handle<JSGlobalObject>::cast(type->AsConstant())
901  GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss);
902  }
903 
904  HandlerFrontendFooter(name, &miss);
905 }
906 
907 
908 Handle<Code> LoadStubCompiler::CompileLoadField(
909  Handle<HeapType> type,
910  Handle<JSObject> holder,
911  Handle<Name> name,
912  PropertyIndex field,
913  Representation representation) {
914  Register reg = HandlerFrontend(type, receiver(), holder, name);
915  GenerateLoadField(reg, holder, field, representation);
916 
917  // Return the generated code.
918  return GetCode(kind(), Code::FAST, name);
919 }
920 
921 
922 Handle<Code> LoadStubCompiler::CompileLoadConstant(
923  Handle<HeapType> type,
924  Handle<JSObject> holder,
925  Handle<Name> name,
926  Handle<Object> value) {
927  HandlerFrontend(type, receiver(), holder, name);
928  GenerateLoadConstant(value);
929 
930  // Return the generated code.
931  return GetCode(kind(), Code::FAST, name);
932 }
933 
934 
935 Handle<Code> LoadStubCompiler::CompileLoadCallback(
936  Handle<HeapType> type,
937  Handle<JSObject> holder,
938  Handle<Name> name,
940  Register reg = CallbackHandlerFrontend(
941  type, receiver(), holder, name, callback);
942  GenerateLoadCallback(reg, callback);
943 
944  // Return the generated code.
945  return GetCode(kind(), Code::FAST, name);
946 }
947 
948 
949 Handle<Code> LoadStubCompiler::CompileLoadCallback(
950  Handle<HeapType> type,
951  Handle<JSObject> holder,
952  Handle<Name> name,
953  const CallOptimization& call_optimization) {
954  ASSERT(call_optimization.is_simple_api_call());
955  Handle<JSFunction> callback = call_optimization.constant_function();
956  CallbackHandlerFrontend(type, receiver(), holder, name, callback);
957  Handle<Map>receiver_map = IC::TypeToMap(*type, isolate());
958  GenerateFastApiCall(
959  masm(), call_optimization, receiver_map,
960  receiver(), scratch1(), false, 0, NULL);
961  // Return the generated code.
962  return GetCode(kind(), Code::FAST, name);
963 }
964 
965 
966 Handle<Code> LoadStubCompiler::CompileLoadInterceptor(
967  Handle<HeapType> type,
968  Handle<JSObject> holder,
969  Handle<Name> name) {
970  LookupResult lookup(isolate());
971  LookupPostInterceptor(holder, name, &lookup);
972 
973  Register reg = HandlerFrontend(type, receiver(), holder, name);
974  // TODO(368): Compile in the whole chain: all the interceptors in
975  // prototypes and ultimate answer.
976  GenerateLoadInterceptor(reg, type, holder, &lookup, name);
977 
978  // Return the generated code.
979  return GetCode(kind(), Code::FAST, name);
980 }
981 
982 
983 void LoadStubCompiler::GenerateLoadPostInterceptor(
984  Register interceptor_reg,
985  Handle<JSObject> interceptor_holder,
986  Handle<Name> name,
987  LookupResult* lookup) {
988  Handle<JSObject> holder(lookup->holder());
989  if (lookup->IsField()) {
990  PropertyIndex field = lookup->GetFieldIndex();
991  if (interceptor_holder.is_identical_to(holder)) {
992  GenerateLoadField(
993  interceptor_reg, holder, field, lookup->representation());
994  } else {
995  // We found FIELD property in prototype chain of interceptor's holder.
996  // Retrieve a field from field's holder.
997  Register reg = HandlerFrontend(
998  IC::CurrentTypeOf(interceptor_holder, isolate()),
999  interceptor_reg, holder, name);
1000  GenerateLoadField(
1001  reg, holder, field, lookup->representation());
1002  }
1003  } else {
1004  // We found CALLBACKS property in prototype chain of interceptor's
1005  // holder.
1006  ASSERT(lookup->type() == CALLBACKS);
1008  ExecutableAccessorInfo::cast(lookup->GetCallbackObject()));
1009  ASSERT(callback->getter() != NULL);
1010 
1011  Register reg = CallbackHandlerFrontend(
1012  IC::CurrentTypeOf(interceptor_holder, isolate()),
1013  interceptor_reg, holder, name, callback);
1014  GenerateLoadCallback(reg, callback);
1015  }
1016 }
1017 
1018 
1019 Handle<Code> BaseLoadStoreStubCompiler::CompileMonomorphicIC(
1020  Handle<HeapType> type,
1021  Handle<Code> handler,
1022  Handle<Name> name) {
1023  TypeHandleList types(1);
1024  CodeHandleList handlers(1);
1025  types.Add(type);
1026  handlers.Add(handler);
1027  Code::StubType stub_type = handler->type();
1028  return CompilePolymorphicIC(&types, &handlers, name, stub_type, PROPERTY);
1029 }
1030 
1031 
1032 Handle<Code> LoadStubCompiler::CompileLoadViaGetter(
1033  Handle<HeapType> type,
1034  Handle<JSObject> holder,
1035  Handle<Name> name,
1036  Handle<JSFunction> getter) {
1037  HandlerFrontend(type, receiver(), holder, name);
1038  GenerateLoadViaGetter(masm(), type, receiver(), getter);
1039 
1040  // Return the generated code.
1041  return GetCode(kind(), Code::FAST, name);
1042 }
1043 
1044 
1045 Handle<Code> StoreStubCompiler::CompileStoreTransition(
1046  Handle<JSObject> object,
1047  LookupResult* lookup,
1048  Handle<Map> transition,
1049  Handle<Name> name) {
1050  Label miss, slow;
1051 
1052  // Ensure no transitions to deprecated maps are followed.
1053  __ CheckMapDeprecated(transition, scratch1(), &miss);
1054 
1055  // Check that we are allowed to write this.
1056  if (object->GetPrototype()->IsJSObject()) {
1057  Handle<JSObject> holder;
1058  // holder == object indicates that no property was found.
1059  if (lookup->holder() != *object) {
1060  holder = Handle<JSObject>(lookup->holder());
1061  } else {
1062  // Find the top object.
1063  holder = object;
1064  do {
1065  holder = Handle<JSObject>(JSObject::cast(holder->GetPrototype()));
1066  } while (holder->GetPrototype()->IsJSObject());
1067  }
1068 
1069  Register holder_reg = HandlerFrontendHeader(
1070  IC::CurrentTypeOf(object, isolate()), receiver(), holder, name, &miss);
1071 
1072  // If no property was found, and the holder (the last object in the
1073  // prototype chain) is in slow mode, we need to do a negative lookup on the
1074  // holder.
1075  if (lookup->holder() == *object) {
1076  GenerateNegativeHolderLookup(masm(), holder, holder_reg, name, &miss);
1077  }
1078  }
1079 
1080  GenerateStoreTransition(masm(),
1081  object,
1082  lookup,
1083  transition,
1084  name,
1085  receiver(), this->name(), value(),
1086  scratch1(), scratch2(), scratch3(),
1087  &miss,
1088  &slow);
1089 
1090  // Handle store cache miss.
1091  GenerateRestoreName(masm(), &miss, name);
1092  TailCallBuiltin(masm(), MissBuiltin(kind()));
1093 
1094  GenerateRestoreName(masm(), &slow, name);
1095  TailCallBuiltin(masm(), SlowBuiltin(kind()));
1096 
1097  // Return the generated code.
1098  return GetCode(kind(), Code::FAST, name);
1099 }
1100 
1101 
1102 Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
1103  LookupResult* lookup,
1104  Handle<Name> name) {
1105  Label miss;
1106 
1107  HandlerFrontendHeader(IC::CurrentTypeOf(object, isolate()),
1108  receiver(), object, name, &miss);
1109 
1110  // Generate store field code.
1111  GenerateStoreField(masm(),
1112  object,
1113  lookup,
1114  receiver(), this->name(), value(), scratch1(), scratch2(),
1115  &miss);
1116 
1117  // Handle store cache miss.
1118  __ bind(&miss);
1119  TailCallBuiltin(masm(), MissBuiltin(kind()));
1120 
1121  // Return the generated code.
1122  return GetCode(kind(), Code::FAST, name);
1123 }
1124 
1125 
1126 Handle<Code> StoreStubCompiler::CompileStoreArrayLength(Handle<JSObject> object,
1127  LookupResult* lookup,
1128  Handle<Name> name) {
1129  // This accepts as a receiver anything JSArray::SetElementsLength accepts
1130  // (currently anything except for external arrays which means anything with
1131  // elements of FixedArray type). Value must be a number, but only smis are
1132  // accepted as the most common case.
1133  Label miss;
1134 
1135  // Check that value is a smi.
1136  __ JumpIfNotSmi(value(), &miss);
1137 
1138  // Generate tail call to StoreIC_ArrayLength.
1139  GenerateStoreArrayLength();
1140 
1141  // Handle miss case.
1142  __ bind(&miss);
1143  TailCallBuiltin(masm(), MissBuiltin(kind()));
1144 
1145  // Return the generated code.
1146  return GetCode(kind(), Code::FAST, name);
1147 }
1148 
1149 
1150 Handle<Code> StoreStubCompiler::CompileStoreViaSetter(
1151  Handle<JSObject> object,
1152  Handle<JSObject> holder,
1153  Handle<Name> name,
1154  Handle<JSFunction> setter) {
1155  Handle<HeapType> type = IC::CurrentTypeOf(object, isolate());
1156  HandlerFrontend(type, receiver(), holder, name);
1157  GenerateStoreViaSetter(masm(), type, receiver(), setter);
1158 
1159  return GetCode(kind(), Code::FAST, name);
1160 }
1161 
1162 
1163 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1164  Handle<JSObject> object,
1165  Handle<JSObject> holder,
1166  Handle<Name> name,
1167  const CallOptimization& call_optimization) {
1168  HandlerFrontend(IC::CurrentTypeOf(object, isolate()),
1169  receiver(), holder, name);
1170  Register values[] = { value() };
1171  GenerateFastApiCall(
1172  masm(), call_optimization, handle(object->map()),
1173  receiver(), scratch1(), true, 1, values);
1174  // Return the generated code.
1175  return GetCode(kind(), Code::FAST, name);
1176 }
1177 
1178 
1179 Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
1180  Handle<Map> receiver_map) {
1181  ElementsKind elements_kind = receiver_map->elements_kind();
1182  if (receiver_map->has_fast_elements() ||
1183  receiver_map->has_external_array_elements() ||
1184  receiver_map->has_fixed_typed_array_elements()) {
1186  receiver_map->instance_type() == JS_ARRAY_TYPE,
1187  elements_kind).GetCode(isolate());
1188  __ DispatchMap(receiver(), scratch1(), receiver_map, stub, DO_SMI_CHECK);
1189  } else {
1190  Handle<Code> stub = FLAG_compiled_keyed_dictionary_loads
1191  ? KeyedLoadDictionaryElementStub().GetCode(isolate())
1192  : KeyedLoadDictionaryElementPlatformStub().GetCode(isolate());
1193  __ DispatchMap(receiver(), scratch1(), receiver_map, stub, DO_SMI_CHECK);
1194  }
1195 
1196  TailCallBuiltin(masm(), Builtins::kKeyedLoadIC_Miss);
1197 
1198  // Return the generated code.
1199  return GetICCode(kind(), Code::NORMAL, factory()->empty_string());
1200 }
1201 
1202 
1203 Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
1204  Handle<Map> receiver_map) {
1205  ElementsKind elements_kind = receiver_map->elements_kind();
1206  bool is_jsarray = receiver_map->instance_type() == JS_ARRAY_TYPE;
1207  Handle<Code> stub;
1208  if (receiver_map->has_fast_elements() ||
1209  receiver_map->has_external_array_elements() ||
1210  receiver_map->has_fixed_typed_array_elements()) {
1212  is_jsarray,
1213  elements_kind,
1214  store_mode()).GetCode(isolate());
1215  } else {
1216  stub = KeyedStoreElementStub(is_jsarray,
1217  elements_kind,
1218  store_mode()).GetCode(isolate());
1219  }
1220 
1221  __ DispatchMap(receiver(), scratch1(), receiver_map, stub, DO_SMI_CHECK);
1222 
1223  TailCallBuiltin(masm(), Builtins::kKeyedStoreIC_Miss);
1224 
1225  // Return the generated code.
1226  return GetICCode(kind(), Code::NORMAL, factory()->empty_string());
1227 }
1228 
1229 
1230 #undef __
1231 
1232 
1233 void StubCompiler::TailCallBuiltin(MacroAssembler* masm, Builtins::Name name) {
1234  Handle<Code> code(masm->isolate()->builtins()->builtin(name));
1235  GenerateTailCall(masm, code);
1236 }
1237 
1238 
1239 void BaseLoadStoreStubCompiler::JitEvent(Handle<Name> name, Handle<Code> code) {
1240 #ifdef ENABLE_GDB_JIT_INTERFACE
1241  GDBJITInterface::CodeTag tag;
1242  if (kind_ == Code::LOAD_IC) {
1243  tag = GDBJITInterface::LOAD_IC;
1244  } else if (kind_ == Code::KEYED_LOAD_IC) {
1245  tag = GDBJITInterface::KEYED_LOAD_IC;
1246  } else if (kind_ == Code::STORE_IC) {
1247  tag = GDBJITInterface::STORE_IC;
1248  } else {
1249  tag = GDBJITInterface::KEYED_STORE_IC;
1250  }
1251  GDBJIT(AddCode(tag, *name, *code));
1252 #endif
1253 }
1254 
1255 
1256 void BaseLoadStoreStubCompiler::InitializeRegisters() {
1257  if (kind_ == Code::LOAD_IC) {
1258  registers_ = LoadStubCompiler::registers();
1259  } else if (kind_ == Code::KEYED_LOAD_IC) {
1260  registers_ = KeyedLoadStubCompiler::registers();
1261  } else if (kind_ == Code::STORE_IC) {
1262  registers_ = StoreStubCompiler::registers();
1263  } else {
1264  registers_ = KeyedStoreStubCompiler::registers();
1265  }
1266 }
1267 
1268 
1269 Handle<Code> BaseLoadStoreStubCompiler::GetICCode(Code::Kind kind,
1270  Code::StubType type,
1271  Handle<Name> name,
1272  InlineCacheState state) {
1273  Code::Flags flags = Code::ComputeFlags(kind, state, extra_state(), type);
1274  Handle<Code> code = GetCodeWithFlags(flags, name);
1275  PROFILE(isolate(), CodeCreateEvent(log_kind(code), *code, *name));
1276  JitEvent(name, code);
1277  return code;
1278 }
1279 
1280 
1281 Handle<Code> BaseLoadStoreStubCompiler::GetCode(Code::Kind kind,
1282  Code::StubType type,
1283  Handle<Name> name) {
1284  ASSERT_EQ(kNoExtraICState, extra_state());
1285  Code::Flags flags = Code::ComputeHandlerFlags(kind, type, cache_holder_);
1286  Handle<Code> code = GetCodeWithFlags(flags, name);
1287  PROFILE(isolate(), CodeCreateEvent(log_kind(code), *code, *name));
1288  JitEvent(name, code);
1289  return code;
1290 }
1291 
1292 
1293 void KeyedLoadStubCompiler::CompileElementHandlers(MapHandleList* receiver_maps,
1294  CodeHandleList* handlers) {
1295  for (int i = 0; i < receiver_maps->length(); ++i) {
1296  Handle<Map> receiver_map = receiver_maps->at(i);
1297  Handle<Code> cached_stub;
1298 
1299  if ((receiver_map->instance_type() & kNotStringTag) == 0) {
1300  cached_stub = isolate()->builtins()->KeyedLoadIC_String();
1301  } else if (receiver_map->instance_type() < FIRST_JS_RECEIVER_TYPE) {
1302  cached_stub = isolate()->builtins()->KeyedLoadIC_Slow();
1303  } else {
1304  bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
1305  ElementsKind elements_kind = receiver_map->elements_kind();
1306 
1307  if (IsFastElementsKind(elements_kind) ||
1308  IsExternalArrayElementsKind(elements_kind) ||
1309  IsFixedTypedArrayElementsKind(elements_kind)) {
1310  cached_stub =
1311  KeyedLoadFastElementStub(is_js_array,
1312  elements_kind).GetCode(isolate());
1313  } else if (elements_kind == SLOPPY_ARGUMENTS_ELEMENTS) {
1314  cached_stub = isolate()->builtins()->KeyedLoadIC_SloppyArguments();
1315  } else {
1316  ASSERT(elements_kind == DICTIONARY_ELEMENTS);
1317  cached_stub = KeyedLoadDictionaryElementStub().GetCode(isolate());
1318  }
1319  }
1320 
1321  handlers->Add(cached_stub);
1322  }
1323 }
1324 
1325 
1326 Handle<Code> KeyedStoreStubCompiler::CompileStoreElementPolymorphic(
1327  MapHandleList* receiver_maps) {
1328  // Collect MONOMORPHIC stubs for all |receiver_maps|.
1329  CodeHandleList handlers(receiver_maps->length());
1330  MapHandleList transitioned_maps(receiver_maps->length());
1331  for (int i = 0; i < receiver_maps->length(); ++i) {
1332  Handle<Map> receiver_map(receiver_maps->at(i));
1333  Handle<Code> cached_stub;
1334  Handle<Map> transitioned_map =
1335  receiver_map->FindTransitionedMap(receiver_maps);
1336 
1337  // TODO(mvstanton): The code below is doing pessimistic elements
1338  // transitions. I would like to stop doing that and rely on Allocation Site
1339  // Tracking to do a better job of ensuring the data types are what they need
1340  // to be. Not all the elements are in place yet, pessimistic elements
1341  // transitions are still important for performance.
1342  bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
1343  ElementsKind elements_kind = receiver_map->elements_kind();
1344  if (!transitioned_map.is_null()) {
1345  cached_stub = ElementsTransitionAndStoreStub(
1346  elements_kind,
1347  transitioned_map->elements_kind(),
1348  is_js_array,
1349  store_mode()).GetCode(isolate());
1350  } else if (receiver_map->instance_type() < FIRST_JS_RECEIVER_TYPE) {
1351  cached_stub = isolate()->builtins()->KeyedStoreIC_Slow();
1352  } else {
1353  if (receiver_map->has_fast_elements() ||
1354  receiver_map->has_external_array_elements() ||
1355  receiver_map->has_fixed_typed_array_elements()) {
1356  cached_stub = KeyedStoreFastElementStub(
1357  is_js_array,
1358  elements_kind,
1359  store_mode()).GetCode(isolate());
1360  } else {
1361  cached_stub = KeyedStoreElementStub(
1362  is_js_array,
1363  elements_kind,
1364  store_mode()).GetCode(isolate());
1365  }
1366  }
1367  ASSERT(!cached_stub.is_null());
1368  handlers.Add(cached_stub);
1369  transitioned_maps.Add(transitioned_map);
1370  }
1371  Handle<Code> code =
1372  CompileStorePolymorphic(receiver_maps, &handlers, &transitioned_maps);
1373  isolate()->counters()->keyed_store_polymorphic_stubs()->Increment();
1374  PROFILE(isolate(),
1375  CodeCreateEvent(Logger::KEYED_STORE_POLYMORPHIC_IC_TAG, *code, 0));
1376  return code;
1377 }
1378 
1379 
1380 void KeyedStoreStubCompiler::GenerateStoreDictionaryElement(
1381  MacroAssembler* masm) {
1382  KeyedStoreIC::GenerateSlow(masm);
1383 }
1384 
1385 
1386 CallOptimization::CallOptimization(LookupResult* lookup) {
1387  if (lookup->IsFound() &&
1388  lookup->IsCacheable() &&
1389  lookup->IsConstantFunction()) {
1390  // We only optimize constant function calls.
1391  Initialize(Handle<JSFunction>(lookup->GetConstantFunction()));
1392  } else {
1393  Initialize(Handle<JSFunction>::null());
1394  }
1395 }
1396 
1397 
1398 CallOptimization::CallOptimization(Handle<JSFunction> function) {
1399  Initialize(function);
1400 }
1401 
1402 
1403 Handle<JSObject> CallOptimization::LookupHolderOfExpectedType(
1404  Handle<Map> object_map,
1405  HolderLookup* holder_lookup) const {
1406  ASSERT(is_simple_api_call());
1407  if (!object_map->IsJSObjectMap()) {
1408  *holder_lookup = kHolderNotFound;
1409  return Handle<JSObject>::null();
1410  }
1411  if (expected_receiver_type_.is_null() ||
1412  expected_receiver_type_->IsTemplateFor(*object_map)) {
1413  *holder_lookup = kHolderIsReceiver;
1414  return Handle<JSObject>::null();
1415  }
1416  while (true) {
1417  if (!object_map->prototype()->IsJSObject()) break;
1418  Handle<JSObject> prototype(JSObject::cast(object_map->prototype()));
1419  if (!prototype->map()->is_hidden_prototype()) break;
1420  object_map = handle(prototype->map());
1421  if (expected_receiver_type_->IsTemplateFor(*object_map)) {
1422  *holder_lookup = kHolderFound;
1423  return prototype;
1424  }
1425  }
1426  *holder_lookup = kHolderNotFound;
1427  return Handle<JSObject>::null();
1428 }
1429 
1430 
1431 bool CallOptimization::IsCompatibleReceiver(Handle<Object> receiver,
1432  Handle<JSObject> holder) const {
1433  ASSERT(is_simple_api_call());
1434  if (!receiver->IsJSObject()) return false;
1435  Handle<Map> map(JSObject::cast(*receiver)->map());
1436  HolderLookup holder_lookup;
1437  Handle<JSObject> api_holder =
1438  LookupHolderOfExpectedType(map, &holder_lookup);
1439  switch (holder_lookup) {
1440  case kHolderNotFound:
1441  return false;
1442  case kHolderIsReceiver:
1443  return true;
1444  case kHolderFound:
1445  if (api_holder.is_identical_to(holder)) return true;
1446  // Check if holder is in prototype chain of api_holder.
1447  {
1448  JSObject* object = *api_holder;
1449  while (true) {
1450  Object* prototype = object->map()->prototype();
1451  if (!prototype->IsJSObject()) return false;
1452  if (prototype == *holder) return true;
1453  object = JSObject::cast(prototype);
1454  }
1455  }
1456  break;
1457  }
1458  UNREACHABLE();
1459  return false;
1460 }
1461 
1462 
1463 void CallOptimization::Initialize(Handle<JSFunction> function) {
1464  constant_function_ = Handle<JSFunction>::null();
1465  is_simple_api_call_ = false;
1466  expected_receiver_type_ = Handle<FunctionTemplateInfo>::null();
1467  api_call_info_ = Handle<CallHandlerInfo>::null();
1468 
1469  if (function.is_null() || !function->is_compiled()) return;
1470 
1471  constant_function_ = function;
1472  AnalyzePossibleApiFunction(function);
1473 }
1474 
1475 
1476 void CallOptimization::AnalyzePossibleApiFunction(Handle<JSFunction> function) {
1477  if (!function->shared()->IsApiFunction()) return;
1478  Handle<FunctionTemplateInfo> info(function->shared()->get_api_func_data());
1479 
1480  // Require a C++ callback.
1481  if (info->call_code()->IsUndefined()) return;
1482  api_call_info_ =
1483  Handle<CallHandlerInfo>(CallHandlerInfo::cast(info->call_code()));
1484 
1485  // Accept signatures that either have no restrictions at all or
1486  // only have restrictions on the receiver.
1487  if (!info->signature()->IsUndefined()) {
1488  Handle<SignatureInfo> signature =
1489  Handle<SignatureInfo>(SignatureInfo::cast(info->signature()));
1490  if (!signature->args()->IsUndefined()) return;
1491  if (!signature->receiver()->IsUndefined()) {
1492  expected_receiver_type_ =
1493  Handle<FunctionTemplateInfo>(
1494  FunctionTemplateInfo::cast(signature->receiver()));
1495  }
1496  }
1497 
1498  is_simple_api_call_ = true;
1499 }
1500 
1501 
1502 } } // namespace v8::internal
byte * Address
Definition: globals.h:186
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Isolate * isolate() const
Definition: assembler.h:62
bool IsExternalArrayElementsKind(ElementsKind kind)
#define PROFILE(IsolateGetter, Call)
Definition: cpu-profiler.h:194
#define RETURN_IF_SCHEDULED_EXCEPTION(isolate)
Definition: isolate.h:120
Handle< Code > CompileMonomorphicIC(Handle< HeapType > type, Handle< Code > handler, Handle< Name > name)
Definition: stub-cache.cc:1019
#define LOG(isolate, Call)
Definition: log.h:86
static Handle< String > cast(Handle< S > that)
Definition: handles.h:75
#define __
Definition: stub-cache.cc:787
kSerializedDataOffset Object
Definition: objects-inl.h:5016
T & at(int i) const
Definition: list.h:90
KeyedAccessStoreMode
Definition: objects.h:164
virtual ExtraICState GetExtraICState()
Definition: code-stubs.h:1436
void Add(Handle< Map > map_to_find, Handle< Object > obj_to_replace)
Definition: objects-inl.h:4697
#define RUNTIME_FUNCTION(Type, Name)
Definition: arguments.h:305
uint32_t Flags
Definition: objects.h:5184
#define ASSERT(condition)
Definition: checks.h:329
void(* AccessorSetterCallback)(Local< String > property, Local< Value > value, const PropertyCallbackInfo< void > &info)
Definition: v8.h:2146
#define RETURN_IF_EMPTY_HANDLE(isolate, call)
Definition: isolate.h:151
bool IsFastElementsKind(ElementsKind kind)
PropertyAttributes
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
kInstanceClassNameOffset flag
Definition: objects-inl.h:5115
Object * ValueAt(int entry)
Definition: objects.h:3930
const uint32_t kNotStringTag
Definition: objects.h:599
Code * value
Definition: stub-cache.h:74
#define UNREACHABLE()
Definition: checks.h:52
Local< Value > GetPrototype()
Definition: api.cc:3192
Definition: stub-cache.h:72
Map * map
Definition: stub-cache.h:75
Handle< Code > CompilePolymorphicIC(TypeHandleList *types, CodeHandleList *handlers, Handle< Name > name, Code::StubType type, IcCheckType check)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Handle< Code > CompileStoreElement(Handle< Map > receiver_map)
Definition: stub-cache.cc:1203
bool IsFixedTypedArrayElementsKind(ElementsKind kind)
bool IsPowerOf2(T x)
Definition: utils.h:51
Handle< Code > CompileLoadNonexistent(Handle< HeapType > type, Handle< JSObject > last, Handle< Name > name)
V8_INLINE bool IsUndefined() const
Definition: v8.h:6229
static v8::internal::Handle< To > OpenHandle(v8::Local< From > handle)
Definition: api.h:308
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
#define GDBJIT(action)
Definition: gdb-jit.h:137
#define RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, T)
Definition: isolate.h:128
bool is_null() const
Definition: handles.h:81
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:103
V8_INLINE bool IsEmpty() const
Definition: v8.h:248
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
void(* NamedPropertyGetterCallback)(Local< String > property, const PropertyCallbackInfo< Value > &info)
Definition: v8.h:3303
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
IN DWORD64 OUT PDWORD64 OUT PIMAGEHLP_SYMBOL64 Symbol
bool IsCompatibleReceiver(Object *receiver)
Definition: objects-inl.h:6427
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:39
int ExtraICState
Definition: objects.h:310
Vector< Handle< Object > > HandleVector(v8::internal::Handle< T > *elms, int length)
Definition: v8utils.h:118
#define STATIC_ASSERT(test)
Definition: checks.h:341
Handle< Code > CompileStoreElementPolymorphic(MapHandleList *receiver_maps)
Definition: stub-cache.cc:1326
Name * key
Definition: stub-cache.h:73
Handle< Code > CompileLoadElement(Handle< Map > receiver_map)
Definition: stub-cache.cc:1179