v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
objects.cc
Go to the documentation of this file.
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "accessors.h"
31 #include "allocation-site-scopes.h"
32 #include "api.h"
33 #include "arguments.h"
34 #include "bootstrapper.h"
35 #include "codegen.h"
36 #include "code-stubs.h"
37 #include "cpu-profiler.h"
38 #include "debug.h"
39 #include "deoptimizer.h"
40 #include "date.h"
41 #include "elements.h"
42 #include "execution.h"
43 #include "full-codegen.h"
44 #include "hydrogen.h"
45 #include "isolate-inl.h"
46 #include "log.h"
47 #include "objects-inl.h"
48 #include "objects-visiting-inl.h"
49 #include "macro-assembler.h"
50 #include "mark-compact.h"
51 #include "safepoint-table.h"
52 #include "string-stream.h"
53 #include "utils.h"
54 
55 #ifdef ENABLE_DISASSEMBLER
56 #include "disasm.h"
57 #include "disassembler.h"
58 #endif
59 
60 namespace v8 {
61 namespace internal {
62 
63 
64 MUST_USE_RESULT static MaybeObject* CreateJSValue(JSFunction* constructor,
65  Object* value) {
66  Object* result;
67  { MaybeObject* maybe_result =
68  constructor->GetHeap()->AllocateJSObject(constructor);
69  if (!maybe_result->ToObject(&result)) return maybe_result;
70  }
71  JSValue::cast(result)->set_value(value);
72  return result;
73 }
74 
75 
76 MaybeObject* Object::ToObject(Context* native_context) {
77  if (IsNumber()) {
78  return CreateJSValue(native_context->number_function(), this);
79  } else if (IsBoolean()) {
80  return CreateJSValue(native_context->boolean_function(), this);
81  } else if (IsString()) {
82  return CreateJSValue(native_context->string_function(), this);
83  } else if (IsSymbol()) {
84  return CreateJSValue(native_context->symbol_function(), this);
85  }
86  ASSERT(IsJSObject());
87  return this;
88 }
89 
90 
91 MaybeObject* Object::ToObject(Isolate* isolate) {
92  if (IsJSReceiver()) {
93  return this;
94  } else if (IsNumber()) {
95  Context* native_context = isolate->context()->native_context();
96  return CreateJSValue(native_context->number_function(), this);
97  } else if (IsBoolean()) {
98  Context* native_context = isolate->context()->native_context();
99  return CreateJSValue(native_context->boolean_function(), this);
100  } else if (IsString()) {
101  Context* native_context = isolate->context()->native_context();
102  return CreateJSValue(native_context->string_function(), this);
103  } else if (IsSymbol()) {
104  Context* native_context = isolate->context()->native_context();
105  return CreateJSValue(native_context->symbol_function(), this);
106  }
107 
108  // Throw a type error.
109  return Failure::InternalError();
110 }
111 
112 
114  if (IsBoolean()) return IsTrue();
115  if (IsSmi()) return Smi::cast(this)->value() != 0;
116  if (IsUndefined() || IsNull()) return false;
117  if (IsUndetectableObject()) return false; // Undetectable object is false.
118  if (IsString()) return String::cast(this)->length() != 0;
119  if (IsHeapNumber()) return HeapNumber::cast(this)->HeapNumberBooleanValue();
120  return true;
121 }
122 
123 
125  Object* fun = this;
126  while (fun->IsJSFunctionProxy()) {
127  fun = JSFunctionProxy::cast(fun)->call_trap();
128  }
129  return fun->IsJSFunction() ||
130  (fun->IsHeapObject() &&
132 }
133 
134 
135 void Object::Lookup(Name* name, LookupResult* result) {
136  Object* holder = NULL;
137  if (IsJSReceiver()) {
138  holder = this;
139  } else {
140  Context* native_context = result->isolate()->context()->native_context();
141  if (IsNumber()) {
142  holder = native_context->number_function()->instance_prototype();
143  } else if (IsString()) {
144  holder = native_context->string_function()->instance_prototype();
145  } else if (IsSymbol()) {
146  holder = native_context->symbol_function()->instance_prototype();
147  } else if (IsBoolean()) {
148  holder = native_context->boolean_function()->instance_prototype();
149  } else {
150  result->isolate()->PushStackTraceAndDie(
151  0xDEAD0000, this, JSReceiver::cast(this)->map(), 0xDEAD0001);
152  }
153  }
154  ASSERT(holder != NULL); // Cannot handle null or undefined.
155  JSReceiver::cast(holder)->Lookup(name, result);
156 }
157 
158 
160  Handle<Object> object,
161  Handle<Object> receiver,
163  PropertyAttributes* attributes) {
164  LookupResult lookup(name->GetIsolate());
165  object->Lookup(*name, &lookup);
166  Handle<Object> result =
167  GetProperty(object, receiver, &lookup, name, attributes);
168  ASSERT(*attributes <= ABSENT);
169  return result;
170 }
171 
172 
174  Name* name,
175  PropertyAttributes* attributes) {
176  LookupResult result(name->GetIsolate());
177  Lookup(name, &result);
178  MaybeObject* value = GetProperty(receiver, &result, name, attributes);
179  ASSERT(*attributes <= ABSENT);
180  return value;
181 }
182 
183 
184 bool Object::ToInt32(int32_t* value) {
185  if (IsSmi()) {
186  *value = Smi::cast(this)->value();
187  return true;
188  }
189  if (IsHeapNumber()) {
190  double num = HeapNumber::cast(this)->value();
191  if (FastI2D(FastD2I(num)) == num) {
192  *value = FastD2I(num);
193  return true;
194  }
195  }
196  return false;
197 }
198 
199 
200 bool Object::ToUint32(uint32_t* value) {
201  if (IsSmi()) {
202  int num = Smi::cast(this)->value();
203  if (num >= 0) {
204  *value = static_cast<uint32_t>(num);
205  return true;
206  }
207  }
208  if (IsHeapNumber()) {
209  double num = HeapNumber::cast(this)->value();
210  if (num >= 0 && FastUI2D(FastD2UI(num)) == num) {
211  *value = FastD2UI(num);
212  return true;
213  }
214  }
215  return false;
216 }
217 
218 
220  if (!object->IsHeapObject()) return false;
221  return IsTemplateFor(HeapObject::cast(object)->map());
222 }
223 
224 
226  // There is a constraint on the object; check.
227  if (!map->IsJSObjectMap()) return false;
228  // Fetch the constructor function of the object.
229  Object* cons_obj = map->constructor();
230  if (!cons_obj->IsJSFunction()) return false;
231  JSFunction* fun = JSFunction::cast(cons_obj);
232  // Iterate through the chain of inheriting function templates to
233  // see if the required one occurs.
234  for (Object* type = fun->shared()->function_data();
235  type->IsFunctionTemplateInfo();
236  type = FunctionTemplateInfo::cast(type)->parent_template()) {
237  if (type == this) return true;
238  }
239  // Didn't find the required type in the inheritance chain.
240  return false;
241 }
242 
243 
244 template<typename To>
245 static inline To* CheckedCast(void *from) {
246  uintptr_t temp = reinterpret_cast<uintptr_t>(from);
247  ASSERT(temp % sizeof(To) == 0);
248  return reinterpret_cast<To*>(temp);
249 }
250 
251 
252 static MaybeObject* PerformCompare(const BitmaskCompareDescriptor& descriptor,
253  char* ptr,
254  Heap* heap) {
255  uint32_t bitmask = descriptor.bitmask;
256  uint32_t compare_value = descriptor.compare_value;
257  uint32_t value;
258  switch (descriptor.size) {
259  case 1:
260  value = static_cast<uint32_t>(*CheckedCast<uint8_t>(ptr));
261  compare_value &= 0xff;
262  bitmask &= 0xff;
263  break;
264  case 2:
265  value = static_cast<uint32_t>(*CheckedCast<uint16_t>(ptr));
266  compare_value &= 0xffff;
267  bitmask &= 0xffff;
268  break;
269  case 4:
270  value = *CheckedCast<uint32_t>(ptr);
271  break;
272  default:
273  UNREACHABLE();
274  return NULL;
275  }
276  return heap->ToBoolean((bitmask & value) == (bitmask & compare_value));
277 }
278 
279 
280 static MaybeObject* PerformCompare(const PointerCompareDescriptor& descriptor,
281  char* ptr,
282  Heap* heap) {
283  uintptr_t compare_value =
284  reinterpret_cast<uintptr_t>(descriptor.compare_value);
285  uintptr_t value = *CheckedCast<uintptr_t>(ptr);
286  return heap->ToBoolean(compare_value == value);
287 }
288 
289 
290 static MaybeObject* GetPrimitiveValue(
291  const PrimitiveValueDescriptor& descriptor,
292  char* ptr,
293  Heap* heap) {
294  int32_t int32_value = 0;
295  switch (descriptor.data_type) {
296  case kDescriptorInt8Type:
297  int32_value = *CheckedCast<int8_t>(ptr);
298  break;
300  int32_value = *CheckedCast<uint8_t>(ptr);
301  break;
303  int32_value = *CheckedCast<int16_t>(ptr);
304  break;
306  int32_value = *CheckedCast<uint16_t>(ptr);
307  break;
309  int32_value = *CheckedCast<int32_t>(ptr);
310  break;
311  case kDescriptorUint32Type: {
312  uint32_t value = *CheckedCast<uint32_t>(ptr);
313  return heap->NumberFromUint32(value);
314  }
315  case kDescriptorBoolType: {
316  uint8_t byte = *CheckedCast<uint8_t>(ptr);
317  return heap->ToBoolean(byte & (0x1 << descriptor.bool_offset));
318  }
319  case kDescriptorFloatType: {
320  float value = *CheckedCast<float>(ptr);
321  return heap->NumberFromDouble(value);
322  }
323  case kDescriptorDoubleType: {
324  double value = *CheckedCast<double>(ptr);
325  return heap->NumberFromDouble(value);
326  }
327  }
328  return heap->NumberFromInt32(int32_value);
329 }
330 
331 
332 static MaybeObject* GetDeclaredAccessorProperty(Object* receiver,
333  DeclaredAccessorInfo* info,
334  Isolate* isolate) {
335  char* current = reinterpret_cast<char*>(receiver);
336  DeclaredAccessorDescriptorIterator iterator(info->descriptor());
337  while (true) {
338  const DeclaredAccessorDescriptorData* data = iterator.Next();
339  switch (data->type) {
341  ASSERT(iterator.Complete());
342  current = *CheckedCast<char*>(current);
343  return *CheckedCast<Object*>(current);
344  }
346  ASSERT(!iterator.Complete());
347  current = *reinterpret_cast<char**>(current);
348  break;
350  ASSERT(!iterator.Complete());
351  current += data->pointer_shift_descriptor.byte_offset;
352  break;
354  ASSERT(!iterator.Complete());
355  Object* object = CheckedCast<Object>(current);
356  int field = data->object_dereference_descriptor.internal_field;
357  Object* smi = JSObject::cast(object)->GetInternalField(field);
358  ASSERT(smi->IsSmi());
359  current = reinterpret_cast<char*>(smi);
360  break;
361  }
363  ASSERT(iterator.Complete());
364  return PerformCompare(data->bitmask_compare_descriptor,
365  current,
366  isolate->heap());
368  ASSERT(iterator.Complete());
369  return PerformCompare(data->pointer_compare_descriptor,
370  current,
371  isolate->heap());
373  ASSERT(iterator.Complete());
374  return GetPrimitiveValue(data->primitive_value_descriptor,
375  current,
376  isolate->heap());
377  }
378  }
379  UNREACHABLE();
380  return NULL;
381 }
382 
383 
385  Handle<JSObject> object) {
386  CALL_HEAP_FUNCTION(object->GetIsolate(),
387  object->EnsureWritableFastElements(),
388  FixedArray);
389 }
390 
391 
393  Handle<Object> receiver,
394  Handle<Object> structure,
395  Handle<Name> name) {
396  Isolate* isolate = name->GetIsolate();
397  // To accommodate both the old and the new api we switch on the
398  // data structure used to store the callbacks. Eventually foreign
399  // callbacks should be phased out.
400  if (structure->IsForeign()) {
401  AccessorDescriptor* callback =
402  reinterpret_cast<AccessorDescriptor*>(
403  Handle<Foreign>::cast(structure)->foreign_address());
404  CALL_HEAP_FUNCTION(isolate,
405  (callback->getter)(isolate, *receiver, callback->data),
406  Object);
407  }
408 
409  // api style callbacks.
410  if (structure->IsAccessorInfo()) {
411  Handle<AccessorInfo> accessor_info = Handle<AccessorInfo>::cast(structure);
412  if (!accessor_info->IsCompatibleReceiver(*receiver)) {
413  Handle<Object> args[2] = { name, receiver };
414  Handle<Object> error =
415  isolate->factory()->NewTypeError("incompatible_method_receiver",
416  HandleVector(args,
417  ARRAY_SIZE(args)));
418  isolate->Throw(*error);
419  return Handle<Object>::null();
420  }
421  // TODO(rossberg): Handling symbols in the API requires changing the API,
422  // so we do not support it for now.
423  if (name->IsSymbol()) return isolate->factory()->undefined_value();
424  if (structure->IsDeclaredAccessorInfo()) {
426  isolate,
427  GetDeclaredAccessorProperty(*receiver,
428  DeclaredAccessorInfo::cast(*structure),
429  isolate),
430  Object);
431  }
432 
435  v8::AccessorGetterCallback call_fun =
436  v8::ToCData<v8::AccessorGetterCallback>(data->getter());
437  if (call_fun == NULL) return isolate->factory()->undefined_value();
438 
439  HandleScope scope(isolate);
440  Handle<JSObject> self = Handle<JSObject>::cast(receiver);
442  LOG(isolate, ApiNamedPropertyAccess("load", *self, *name));
443  PropertyCallbackArguments args(isolate, data->data(), *self, *object);
444  v8::Handle<v8::Value> result =
445  args.Call(call_fun, v8::Utils::ToLocal(key));
447  if (result.IsEmpty()) {
448  return isolate->factory()->undefined_value();
449  }
450  Handle<Object> return_value = v8::Utils::OpenHandle(*result);
451  return_value->VerifyApiCallResultType();
452  return scope.CloseAndEscape(return_value);
453  }
454 
455  // __defineGetter__ callback
456  Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(),
457  isolate);
458  if (getter->IsSpecFunction()) {
459  // TODO(rossberg): nicer would be to cast to some JSCallable here...
461  isolate,
462  object->GetPropertyWithDefinedGetter(*receiver,
463  JSReceiver::cast(*getter)),
464  Object);
465  }
466  // Getter is not a function.
467  return isolate->factory()->undefined_value();
468 }
469 
470 
471 MaybeObject* JSProxy::GetPropertyWithHandler(Object* receiver_raw,
472  Name* name_raw) {
473  Isolate* isolate = GetIsolate();
474  HandleScope scope(isolate);
475  Handle<Object> receiver(receiver_raw, isolate);
476  Handle<Object> name(name_raw, isolate);
477 
478  // TODO(rossberg): adjust once there is a story for symbols vs proxies.
479  if (name->IsSymbol()) return isolate->heap()->undefined_value();
480 
481  Handle<Object> args[] = { receiver, name };
482  Handle<Object> result = CallTrap(
483  "get", isolate->derived_get_trap(), ARRAY_SIZE(args), args);
484  if (isolate->has_pending_exception()) return Failure::Exception();
485 
486  return *result;
487 }
488 
489 
491  Handle<Name> name) {
492  // TODO(rossberg): The index test should not be here but in the GetProperty
493  // method (or somewhere else entirely). Needs more global clean-up.
494  uint32_t index;
495  Isolate* isolate = name->GetIsolate();
496  if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
497  CALL_HEAP_FUNCTION(isolate, object->GetProperty(*name), Object);
498 }
499 
500 
501 MaybeObject* JSProxy::GetElementWithHandler(Object* receiver,
502  uint32_t index) {
503  String* name;
504  MaybeObject* maybe = GetHeap()->Uint32ToString(index);
505  if (!maybe->To<String>(&name)) return maybe;
506  return GetPropertyWithHandler(receiver, name);
507 }
508 
509 
510 Handle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
511  Handle<JSReceiver> receiver,
512  uint32_t index,
513  Handle<Object> value,
514  StrictMode strict_mode) {
515  Isolate* isolate = proxy->GetIsolate();
516  Handle<String> name = isolate->factory()->Uint32ToString(index);
517  return SetPropertyWithHandler(
518  proxy, receiver, name, value, NONE, strict_mode);
519 }
520 
521 
522 bool JSProxy::HasElementWithHandler(Handle<JSProxy> proxy, uint32_t index) {
523  Isolate* isolate = proxy->GetIsolate();
524  Handle<String> name = isolate->factory()->Uint32ToString(index);
525  return HasPropertyWithHandler(proxy, name);
526 }
527 
528 
530  JSReceiver* getter) {
531  Isolate* isolate = getter->GetIsolate();
532  HandleScope scope(isolate);
533  Handle<JSReceiver> fun(getter);
534  Handle<Object> self(receiver, isolate);
535 #ifdef ENABLE_DEBUGGER_SUPPORT
536  Debug* debug = isolate->debug();
537  // Handle stepping into a getter if step into is active.
538  // TODO(rossberg): should this apply to getters that are function proxies?
539  if (debug->StepInActive() && fun->IsJSFunction()) {
540  debug->HandleStepIn(
542  }
543 #endif
544 
545  bool has_pending_exception;
547  isolate, fun, self, 0, NULL, &has_pending_exception, true);
548  // Check for pending exception and return the result.
549  if (has_pending_exception) return Failure::Exception();
550  return *result;
551 }
552 
553 
554 // Only deal with CALLBACKS and INTERCEPTOR
555 Handle<Object> JSObject::GetPropertyWithFailedAccessCheck(
556  Handle<JSObject> object,
557  Handle<Object> receiver,
558  LookupResult* result,
559  Handle<Name> name,
560  PropertyAttributes* attributes) {
561  Isolate* isolate = name->GetIsolate();
562  if (result->IsProperty()) {
563  switch (result->type()) {
564  case CALLBACKS: {
565  // Only allow API accessors.
566  Handle<Object> callback_obj(result->GetCallbackObject(), isolate);
567  if (callback_obj->IsAccessorInfo()) {
568  if (!AccessorInfo::cast(*callback_obj)->all_can_read()) break;
569  *attributes = result->GetAttributes();
570  // Fall through to GetPropertyWithCallback.
571  } else if (callback_obj->IsAccessorPair()) {
572  if (!AccessorPair::cast(*callback_obj)->all_can_read()) break;
573  // Fall through to GetPropertyWithCallback.
574  } else {
575  break;
576  }
577  Handle<JSObject> holder(result->holder(), isolate);
578  return GetPropertyWithCallback(holder, receiver, callback_obj, name);
579  }
580  case NORMAL:
581  case FIELD:
582  case CONSTANT: {
583  // Search ALL_CAN_READ accessors in prototype chain.
584  LookupResult r(isolate);
585  result->holder()->LookupRealNamedPropertyInPrototypes(*name, &r);
586  if (r.IsProperty()) {
587  return GetPropertyWithFailedAccessCheck(
588  object, receiver, &r, name, attributes);
589  }
590  break;
591  }
592  case INTERCEPTOR: {
593  // If the object has an interceptor, try real named properties.
594  // No access check in GetPropertyAttributeWithInterceptor.
595  LookupResult r(isolate);
596  result->holder()->LookupRealNamedProperty(*name, &r);
597  if (r.IsProperty()) {
598  return GetPropertyWithFailedAccessCheck(
599  object, receiver, &r, name, attributes);
600  }
601  break;
602  }
603  default:
604  UNREACHABLE();
605  }
606  }
607 
608  // No accessible property found.
609  *attributes = ABSENT;
612  return isolate->factory()->undefined_value();
613 }
614 
615 
617  Handle<JSObject> object,
618  LookupResult* result,
619  Handle<Name> name,
620  bool continue_search) {
621  if (result->IsProperty()) {
622  switch (result->type()) {
623  case CALLBACKS: {
624  // Only allow API accessors.
625  Handle<Object> obj(result->GetCallbackObject(), object->GetIsolate());
626  if (obj->IsAccessorInfo()) {
628  if (info->all_can_read()) {
629  return result->GetAttributes();
630  }
631  } else if (obj->IsAccessorPair()) {
633  if (pair->all_can_read()) {
634  return result->GetAttributes();
635  }
636  }
637  break;
638  }
639 
640  case NORMAL:
641  case FIELD:
642  case CONSTANT: {
643  if (!continue_search) break;
644  // Search ALL_CAN_READ accessors in prototype chain.
645  LookupResult r(object->GetIsolate());
646  result->holder()->LookupRealNamedPropertyInPrototypes(*name, &r);
647  if (r.IsProperty()) {
649  object, &r, name, continue_search);
650  }
651  break;
652  }
653 
654  case INTERCEPTOR: {
655  // If the object has an interceptor, try real named properties.
656  // No access check in GetPropertyAttributeWithInterceptor.
657  LookupResult r(object->GetIsolate());
658  if (continue_search) {
659  result->holder()->LookupRealNamedProperty(*name, &r);
660  } else {
661  result->holder()->LocalLookupRealNamedProperty(*name, &r);
662  }
663  if (!r.IsFound()) break;
665  object, &r, name, continue_search);
666  }
667 
668  case HANDLER:
669  case TRANSITION:
670  case NONEXISTENT:
671  UNREACHABLE();
672  }
673  }
674 
675  object->GetIsolate()->ReportFailedAccessCheckWrapper(object, v8::ACCESS_HAS);
676  return ABSENT;
677 }
678 
679 
680 Object* JSObject::GetNormalizedProperty(const LookupResult* result) {
682  Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry());
683  if (IsGlobalObject()) {
684  value = PropertyCell::cast(value)->value();
685  }
686  ASSERT(!value->IsPropertyCell() && !value->IsCell());
687  return value;
688 }
689 
690 
692  const LookupResult* result,
693  Handle<Object> value) {
694  ASSERT(!object->HasFastProperties());
695  NameDictionary* property_dictionary = object->property_dictionary();
696  if (object->IsGlobalObject()) {
698  property_dictionary->ValueAt(result->GetDictionaryEntry())));
699  PropertyCell::SetValueInferType(cell, value);
700  } else {
701  property_dictionary->ValueAtPut(result->GetDictionaryEntry(), *value);
702  }
703 }
704 
705 
706 // TODO(mstarzinger): Temporary wrapper until handlified.
707 static Handle<NameDictionary> NameDictionaryAdd(Handle<NameDictionary> dict,
708  Handle<Name> name,
709  Handle<Object> value,
710  PropertyDetails details) {
711  CALL_HEAP_FUNCTION(dict->GetIsolate(),
712  dict->Add(*name, *value, details),
714 }
715 
716 
718  Handle<Name> name,
719  Handle<Object> value,
720  PropertyDetails details) {
721  ASSERT(!object->HasFastProperties());
722  Handle<NameDictionary> property_dictionary(object->property_dictionary());
723 
724  if (!name->IsUniqueName()) {
725  name = object->GetIsolate()->factory()->InternalizeString(
726  Handle<String>::cast(name));
727  }
728 
729  int entry = property_dictionary->FindEntry(*name);
730  if (entry == NameDictionary::kNotFound) {
731  Handle<Object> store_value = value;
732  if (object->IsGlobalObject()) {
733  store_value = object->GetIsolate()->factory()->NewPropertyCell(value);
734  }
735 
737  NameDictionaryAdd(property_dictionary, name, store_value, details);
738  object->set_properties(*property_dictionary);
739  return;
740  }
741 
742  PropertyDetails original_details = property_dictionary->DetailsAt(entry);
743  int enumeration_index;
744  // Preserve the enumeration index unless the property was deleted.
745  if (original_details.IsDeleted()) {
746  enumeration_index = property_dictionary->NextEnumerationIndex();
747  property_dictionary->SetNextEnumerationIndex(enumeration_index + 1);
748  } else {
749  enumeration_index = original_details.dictionary_index();
750  ASSERT(enumeration_index > 0);
751  }
752 
753  details = PropertyDetails(
754  details.attributes(), details.type(), enumeration_index);
755 
756  if (object->IsGlobalObject()) {
759  PropertyCell::SetValueInferType(cell, value);
760  // Please note we have to update the property details.
761  property_dictionary->DetailsAtPut(entry, details);
762  } else {
763  property_dictionary->SetEntry(entry, *name, *value, details);
764  }
765 }
766 
767 
768 // TODO(mstarzinger): Temporary wrapper until target is handlified.
770  Handle<Name> name) {
771  CALL_HEAP_FUNCTION(dict->GetIsolate(), dict->Shrink(*name), NameDictionary);
772 }
773 
774 
775 Handle<Object> JSObject::DeleteNormalizedProperty(Handle<JSObject> object,
776  Handle<Name> name,
777  DeleteMode mode) {
778  ASSERT(!object->HasFastProperties());
779  Isolate* isolate = object->GetIsolate();
780  Handle<NameDictionary> dictionary(object->property_dictionary());
781  int entry = dictionary->FindEntry(*name);
782  if (entry != NameDictionary::kNotFound) {
783  // If we have a global object set the cell to the hole.
784  if (object->IsGlobalObject()) {
785  PropertyDetails details = dictionary->DetailsAt(entry);
786  if (details.IsDontDelete()) {
787  if (mode != FORCE_DELETION) return isolate->factory()->false_value();
788  // When forced to delete global properties, we have to make a
789  // map change to invalidate any ICs that think they can load
790  // from the DontDelete cell without checking if it contains
791  // the hole value.
792  Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
793  ASSERT(new_map->is_dictionary_map());
794  object->set_map(*new_map);
795  }
796  Handle<PropertyCell> cell(PropertyCell::cast(dictionary->ValueAt(entry)));
797  Handle<Object> value = isolate->factory()->the_hole_value();
798  PropertyCell::SetValueInferType(cell, value);
799  dictionary->DetailsAtPut(entry, details.AsDeleted());
800  } else {
801  Handle<Object> deleted(dictionary->DeleteProperty(entry, mode), isolate);
802  if (*deleted == isolate->heap()->true_value()) {
803  Handle<NameDictionary> new_properties =
804  NameDictionaryShrink(dictionary, name);
805  object->set_properties(*new_properties);
806  }
807  return deleted;
808  }
809  }
810  return isolate->factory()->true_value();
811 }
812 
813 
815  Object* cons_obj = map()->constructor();
816  if (!cons_obj->IsJSFunction())
817  return true;
818  JSFunction* fun = JSFunction::cast(cons_obj);
819  if (!fun->shared()->IsApiFunction())
820  return true;
821  // If the object is fully fast case and has the same map it was
822  // created with then no changes can have been made to it.
823  return map() != fun->initial_map()
825  || !HasFastProperties();
826 }
827 
828 
830  Handle<Object> receiver,
831  LookupResult* result,
832  Handle<Name> key,
833  PropertyAttributes* attributes) {
834  Isolate* isolate = result->isolate();
836  isolate,
837  object->GetProperty(*receiver, result, *key, attributes),
838  Object);
839 }
840 
841 
843  Handle<Object> receiver,
844  LookupResult* result,
845  Handle<Name> key,
846  PropertyAttributes* attributes) {
847  Isolate* isolate = result->isolate();
849  isolate,
850  object->GetProperty(*receiver, result, *key, attributes));
851 }
852 
853 
854 // TODO(yangguo): handlify this and get rid of.
855 MaybeObject* Object::GetProperty(Object* receiver,
856  LookupResult* result,
857  Name* name,
858  PropertyAttributes* attributes) {
859  Isolate* isolate = name->GetIsolate();
860  Heap* heap = isolate->heap();
861 
862 #ifdef DEBUG
863  // TODO(mstarzinger): Only because of the AssertNoContextChange, drop as soon
864  // as this method has been fully handlified.
865  HandleScope scope(isolate);
866 #endif
867 
868  // Make sure that the top context does not change when doing
869  // callbacks or interceptor calls.
870  AssertNoContextChange ncc(isolate);
871 
872  // Traverse the prototype chain from the current object (this) to
873  // the holder and check for access rights. This avoids traversing the
874  // objects more than once in case of interceptors, because the
875  // holder will always be the interceptor holder and the search may
876  // only continue with a current object just after the interceptor
877  // holder in the prototype chain.
878  // Proxy handlers do not use the proxy's prototype, so we can skip this.
879  if (!result->IsHandler()) {
880  Object* last = result->IsProperty()
881  ? result->holder()
882  : Object::cast(heap->null_value());
883  ASSERT(this != this->GetPrototype(isolate));
884  for (Object* current = this;
885  true;
886  current = current->GetPrototype(isolate)) {
887  if (current->IsAccessCheckNeeded()) {
888  // Check if we're allowed to read from the current object. Note
889  // that even though we may not actually end up loading the named
890  // property from the current object, we still check that we have
891  // access to it.
892  JSObject* checked = JSObject::cast(current);
893  if (!isolate->MayNamedAccess(checked, name, v8::ACCESS_GET)) {
894  HandleScope scope(isolate);
895  Handle<Object> value = JSObject::GetPropertyWithFailedAccessCheck(
896  handle(checked, isolate),
897  handle(receiver, isolate),
898  result,
899  handle(name, isolate),
900  attributes);
901  RETURN_IF_EMPTY_HANDLE(isolate, value);
902  return *value;
903  }
904  }
905  // Stop traversing the chain once we reach the last object in the
906  // chain; either the holder of the result or null in case of an
907  // absent property.
908  if (current == last) break;
909  }
910  }
911 
912  if (!result->IsProperty()) {
913  *attributes = ABSENT;
914  return heap->undefined_value();
915  }
916  *attributes = result->GetAttributes();
917  Object* value;
918  switch (result->type()) {
919  case NORMAL:
920  value = result->holder()->GetNormalizedProperty(result);
921  ASSERT(!value->IsTheHole() || result->IsReadOnly());
922  return value->IsTheHole() ? heap->undefined_value() : value;
923  case FIELD: {
924  MaybeObject* maybe_result = result->holder()->FastPropertyAt(
925  result->representation(),
926  result->GetFieldIndex().field_index());
927  if (!maybe_result->To(&value)) return maybe_result;
928  ASSERT(!value->IsTheHole() || result->IsReadOnly());
929  return value->IsTheHole() ? heap->undefined_value() : value;
930  }
931  case CONSTANT:
932  return result->GetConstant();
933  case CALLBACKS: {
934  HandleScope scope(isolate);
936  handle(result->holder(), isolate),
937  handle(receiver, isolate),
938  handle(result->GetCallbackObject(), isolate),
939  handle(name, isolate));
940  RETURN_IF_EMPTY_HANDLE(isolate, value);
941  return *value;
942  }
943  case HANDLER:
944  return result->proxy()->GetPropertyWithHandler(receiver, name);
945  case INTERCEPTOR: {
946  HandleScope scope(isolate);
948  handle(result->holder(), isolate),
949  handle(receiver, isolate),
950  handle(name, isolate),
951  attributes);
952  RETURN_IF_EMPTY_HANDLE(isolate, value);
953  return *value;
954  }
955  case TRANSITION:
956  case NONEXISTENT:
957  UNREACHABLE();
958  break;
959  }
960  UNREACHABLE();
961  return NULL;
962 }
963 
964 
966  Handle<Object> object,
967  Handle<Object> receiver,
968  uint32_t index) {
969  Handle<Object> holder;
970 
971  // Iterate up the prototype chain until an element is found or the null
972  // prototype is encountered.
973  for (holder = object;
974  !holder->IsNull();
975  holder = Handle<Object>(holder->GetPrototype(isolate), isolate)) {
976  if (!holder->IsJSObject()) {
977  Context* native_context = isolate->context()->native_context();
978  if (holder->IsNumber()) {
979  holder = Handle<Object>(
980  native_context->number_function()->instance_prototype(), isolate);
981  } else if (holder->IsString()) {
982  holder = Handle<Object>(
983  native_context->string_function()->instance_prototype(), isolate);
984  } else if (holder->IsSymbol()) {
985  holder = Handle<Object>(
986  native_context->symbol_function()->instance_prototype(), isolate);
987  } else if (holder->IsBoolean()) {
988  holder = Handle<Object>(
989  native_context->boolean_function()->instance_prototype(), isolate);
990  } else if (holder->IsJSProxy()) {
991  CALL_HEAP_FUNCTION(isolate,
992  Handle<JSProxy>::cast(holder)->GetElementWithHandler(
993  *receiver, index),
994  Object);
995  } else {
996  // Undefined and null have no indexed properties.
997  ASSERT(holder->IsUndefined() || holder->IsNull());
998  return isolate->factory()->undefined_value();
999  }
1000  }
1001 
1002  // Inline the case for JSObjects. Doing so significantly improves the
1003  // performance of fetching elements where checking the prototype chain is
1004  // necessary.
1005  Handle<JSObject> js_object = Handle<JSObject>::cast(holder);
1006 
1007  // Check access rights if needed.
1008  if (js_object->IsAccessCheckNeeded()) {
1009  if (!isolate->MayIndexedAccessWrapper(js_object, index, v8::ACCESS_GET)) {
1010  isolate->ReportFailedAccessCheckWrapper(js_object, v8::ACCESS_GET);
1012  return isolate->factory()->undefined_value();
1013  }
1014  }
1015 
1016  if (js_object->HasIndexedInterceptor()) {
1017  return JSObject::GetElementWithInterceptor(js_object, receiver, index);
1018  }
1019 
1020  if (js_object->elements() != isolate->heap()->empty_fixed_array()) {
1021  Handle<Object> result = js_object->GetElementsAccessor()->Get(
1022  receiver, js_object, index);
1023  RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>());
1024  if (!result->IsTheHole()) return result;
1025  }
1026  }
1027 
1028  return isolate->factory()->undefined_value();
1029 }
1030 
1031 
1033  if (IsSmi()) {
1034  Context* context = isolate->context()->native_context();
1035  return context->number_function()->instance_prototype();
1036  }
1037 
1038  HeapObject* heap_object = HeapObject::cast(this);
1039 
1040  // The object is either a number, a string, a boolean,
1041  // a real JS object, or a Harmony proxy.
1042  if (heap_object->IsJSReceiver()) {
1043  return heap_object->map()->prototype();
1044  }
1045  Context* context = isolate->context()->native_context();
1046 
1047  if (heap_object->IsHeapNumber()) {
1048  return context->number_function()->instance_prototype();
1049  }
1050  if (heap_object->IsString()) {
1051  return context->string_function()->instance_prototype();
1052  }
1053  if (heap_object->IsSymbol()) {
1054  return context->symbol_function()->instance_prototype();
1055  }
1056  if (heap_object->IsBoolean()) {
1057  return context->boolean_function()->instance_prototype();
1058  } else {
1059  return isolate->heap()->null_value();
1060  }
1061 }
1062 
1063 
1065  if (IsSmi()) return isolate->heap()->heap_number_map();
1066  return HeapObject::cast(this)->map();
1067 }
1068 
1069 
1071  // The object is either a number, a name, an odd-ball,
1072  // a real JS object, or a Harmony proxy.
1073  if (IsNumber()) {
1074  uint32_t hash = ComputeLongHash(double_to_uint64(Number()));
1075  return Smi::FromInt(hash & Smi::kMaxValue);
1076  }
1077  if (IsName()) {
1078  uint32_t hash = Name::cast(this)->Hash();
1079  return Smi::FromInt(hash);
1080  }
1081  if (IsOddball()) {
1082  uint32_t hash = Oddball::cast(this)->to_string()->Hash();
1083  return Smi::FromInt(hash);
1084  }
1085 
1086  ASSERT(IsJSReceiver());
1087  return JSReceiver::cast(this)->GetIdentityHash();
1088 }
1089 
1090 
1092  Isolate* isolate) {
1093  Handle<Object> hash(object->GetHash(), isolate);
1094  if (hash->IsSmi())
1095  return hash;
1096 
1097  ASSERT(object->IsJSReceiver());
1099 }
1100 
1101 
1103  if (other == this) return true;
1104 
1105  // The object is either a number, a name, an odd-ball,
1106  // a real JS object, or a Harmony proxy.
1107  if (IsNumber() && other->IsNumber()) {
1108  double this_value = Number();
1109  double other_value = other->Number();
1110  bool equal = this_value == other_value;
1111  // SameValue(NaN, NaN) is true.
1112  if (!equal) return std::isnan(this_value) && std::isnan(other_value);
1113  // SameValue(0.0, -0.0) is false.
1114  return (this_value != 0) || ((1 / this_value) == (1 / other_value));
1115  }
1116  if (IsString() && other->IsString()) {
1117  return String::cast(this)->Equals(String::cast(other));
1118  }
1119  return false;
1120 }
1121 
1122 
1123 void Object::ShortPrint(FILE* out) {
1124  HeapStringAllocator allocator;
1125  StringStream accumulator(&allocator);
1126  ShortPrint(&accumulator);
1127  accumulator.OutputToFile(out);
1128 }
1129 
1130 
1131 void Object::ShortPrint(StringStream* accumulator) {
1132  if (IsSmi()) {
1133  Smi::cast(this)->SmiPrint(accumulator);
1134  } else if (IsFailure()) {
1135  Failure::cast(this)->FailurePrint(accumulator);
1136  } else {
1137  HeapObject::cast(this)->HeapObjectShortPrint(accumulator);
1138  }
1139 }
1140 
1141 
1142 void Smi::SmiPrint(FILE* out) {
1143  PrintF(out, "%d", value());
1144 }
1145 
1146 
1147 void Smi::SmiPrint(StringStream* accumulator) {
1148  accumulator->Add("%d", value());
1149 }
1150 
1151 
1153  accumulator->Add("Failure(%p)", reinterpret_cast<void*>(value()));
1154 }
1155 
1156 
1157 void Failure::FailurePrint(FILE* out) {
1158  PrintF(out, "Failure(%p)", reinterpret_cast<void*>(value()));
1159 }
1160 
1161 
1162 // Should a word be prefixed by 'a' or 'an' in order to read naturally in
1163 // English? Returns false for non-ASCII or words that don't start with
1164 // a capital letter. The a/an rule follows pronunciation in English.
1165 // We don't use the BBC's overcorrect "an historic occasion" though if
1166 // you speak a dialect you may well say "an 'istoric occasion".
1167 static bool AnWord(String* str) {
1168  if (str->length() == 0) return false; // A nothing.
1169  int c0 = str->Get(0);
1170  int c1 = str->length() > 1 ? str->Get(1) : 0;
1171  if (c0 == 'U') {
1172  if (c1 > 'Z') {
1173  return true; // An Umpire, but a UTF8String, a U.
1174  }
1175  } else if (c0 == 'A' || c0 == 'E' || c0 == 'I' || c0 == 'O') {
1176  return true; // An Ape, an ABCBook.
1177  } else if ((c1 == 0 || (c1 >= 'A' && c1 <= 'Z')) &&
1178  (c0 == 'F' || c0 == 'H' || c0 == 'M' || c0 == 'N' || c0 == 'R' ||
1179  c0 == 'S' || c0 == 'X')) {
1180  return true; // An MP3File, an M.
1181  }
1182  return false;
1183 }
1184 
1185 
1186 MaybeObject* String::SlowTryFlatten(PretenureFlag pretenure) {
1187 #ifdef DEBUG
1188  // Do not attempt to flatten in debug mode when allocation is not
1189  // allowed. This is to avoid an assertion failure when allocating.
1190  // Flattening strings is the only case where we always allow
1191  // allocation because no GC is performed if the allocation fails.
1192  if (!AllowHeapAllocation::IsAllowed()) return this;
1193 #endif
1194 
1195  Heap* heap = GetHeap();
1196  switch (StringShape(this).representation_tag()) {
1197  case kConsStringTag: {
1198  ConsString* cs = ConsString::cast(this);
1199  if (cs->second()->length() == 0) {
1200  return cs->first();
1201  }
1202  // There's little point in putting the flat string in new space if the
1203  // cons string is in old space. It can never get GCed until there is
1204  // an old space GC.
1205  PretenureFlag tenure = heap->InNewSpace(this) ? pretenure : TENURED;
1206  int len = length();
1207  Object* object;
1208  String* result;
1209  if (IsOneByteRepresentation()) {
1210  { MaybeObject* maybe_object =
1211  heap->AllocateRawOneByteString(len, tenure);
1212  if (!maybe_object->ToObject(&object)) return maybe_object;
1213  }
1214  result = String::cast(object);
1215  String* first = cs->first();
1216  int first_length = first->length();
1217  uint8_t* dest = SeqOneByteString::cast(result)->GetChars();
1218  WriteToFlat(first, dest, 0, first_length);
1219  String* second = cs->second();
1220  WriteToFlat(second,
1221  dest + first_length,
1222  0,
1223  len - first_length);
1224  } else {
1225  { MaybeObject* maybe_object =
1226  heap->AllocateRawTwoByteString(len, tenure);
1227  if (!maybe_object->ToObject(&object)) return maybe_object;
1228  }
1229  result = String::cast(object);
1230  uc16* dest = SeqTwoByteString::cast(result)->GetChars();
1231  String* first = cs->first();
1232  int first_length = first->length();
1233  WriteToFlat(first, dest, 0, first_length);
1234  String* second = cs->second();
1235  WriteToFlat(second,
1236  dest + first_length,
1237  0,
1238  len - first_length);
1239  }
1240  cs->set_first(result);
1241  cs->set_second(heap->empty_string(), SKIP_WRITE_BARRIER);
1242  return result;
1243  }
1244  default:
1245  return this;
1246  }
1247 }
1248 
1249 
1251  // Externalizing twice leaks the external resource, so it's
1252  // prohibited by the API.
1253  ASSERT(!this->IsExternalString());
1254 #ifdef ENABLE_SLOW_ASSERTS
1256  // Assert that the resource and the string are equivalent.
1257  ASSERT(static_cast<size_t>(this->length()) == resource->length());
1258  ScopedVector<uc16> smart_chars(this->length());
1259  String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1260  ASSERT(memcmp(smart_chars.start(),
1261  resource->data(),
1262  resource->length() * sizeof(smart_chars[0])) == 0);
1263  }
1264 #endif // DEBUG
1265  Heap* heap = GetHeap();
1266  int size = this->Size(); // Byte size of the original string.
1267  if (size < ExternalString::kShortSize) {
1268  return false;
1269  }
1270  bool is_ascii = this->IsOneByteRepresentation();
1271  bool is_internalized = this->IsInternalizedString();
1272 
1273  // Morph the string to an external string by replacing the map and
1274  // reinitializing the fields. This won't work if
1275  // - the space the existing string occupies is too small for a regular
1276  // external string.
1277  // - the existing string is in old pointer space and the backing store of
1278  // the external string is not aligned. The GC cannot deal with a field
1279  // containing a possibly unaligned address to outside of V8's heap.
1280  // In either case we resort to a short external string instead, omitting
1281  // the field caching the address of the backing store. When we encounter
1282  // short external strings in generated code, we need to bailout to runtime.
1283  if (size < ExternalString::kSize ||
1284  heap->old_pointer_space()->Contains(this)) {
1286  is_internalized
1287  ? (is_ascii
1288  ? heap->
1289  short_external_internalized_string_with_one_byte_data_map()
1290  : heap->short_external_internalized_string_map())
1291  : (is_ascii
1292  ? heap->short_external_string_with_one_byte_data_map()
1293  : heap->short_external_string_map()));
1294  } else {
1296  is_internalized
1297  ? (is_ascii
1298  ? heap->external_internalized_string_with_one_byte_data_map()
1299  : heap->external_internalized_string_map())
1300  : (is_ascii
1301  ? heap->external_string_with_one_byte_data_map()
1302  : heap->external_string_map()));
1303  }
1305  self->set_resource(resource);
1306  if (is_internalized) self->Hash(); // Force regeneration of the hash value.
1307 
1308  // Fill the remainder of the string with dead wood.
1309  int new_size = this->Size(); // Byte size of the external String object.
1310  heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
1311  heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR);
1312  return true;
1313 }
1314 
1315 
1317 #ifdef ENABLE_SLOW_ASSERTS
1319  // Assert that the resource and the string are equivalent.
1320  ASSERT(static_cast<size_t>(this->length()) == resource->length());
1321  if (this->IsTwoByteRepresentation()) {
1322  ScopedVector<uint16_t> smart_chars(this->length());
1323  String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1324  ASSERT(String::IsOneByte(smart_chars.start(), this->length()));
1325  }
1326  ScopedVector<char> smart_chars(this->length());
1327  String::WriteToFlat(this, smart_chars.start(), 0, this->length());
1328  ASSERT(memcmp(smart_chars.start(),
1329  resource->data(),
1330  resource->length() * sizeof(smart_chars[0])) == 0);
1331  }
1332 #endif // DEBUG
1333  Heap* heap = GetHeap();
1334  int size = this->Size(); // Byte size of the original string.
1335  if (size < ExternalString::kShortSize) {
1336  return false;
1337  }
1338  bool is_internalized = this->IsInternalizedString();
1339 
1340  // Morph the string to an external string by replacing the map and
1341  // reinitializing the fields. This won't work if
1342  // - the space the existing string occupies is too small for a regular
1343  // external string.
1344  // - the existing string is in old pointer space and the backing store of
1345  // the external string is not aligned. The GC cannot deal with a field
1346  // containing a possibly unaligned address to outside of V8's heap.
1347  // In either case we resort to a short external string instead, omitting
1348  // the field caching the address of the backing store. When we encounter
1349  // short external strings in generated code, we need to bailout to runtime.
1350  if (size < ExternalString::kSize ||
1351  heap->old_pointer_space()->Contains(this)) {
1353  is_internalized ? heap->short_external_ascii_internalized_string_map()
1354  : heap->short_external_ascii_string_map());
1355  } else {
1357  is_internalized ? heap->external_ascii_internalized_string_map()
1358  : heap->external_ascii_string_map());
1359  }
1361  self->set_resource(resource);
1362  if (is_internalized) self->Hash(); // Force regeneration of the hash value.
1363 
1364  // Fill the remainder of the string with dead wood.
1365  int new_size = this->Size(); // Byte size of the external String object.
1366  heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
1367  heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR);
1368  return true;
1369 }
1370 
1371 
1373  int len = length();
1374  if (len > kMaxShortPrintLength) {
1375  accumulator->Add("<Very long string[%u]>", len);
1376  return;
1377  }
1378 
1379  if (!LooksValid()) {
1380  accumulator->Add("<Invalid String>");
1381  return;
1382  }
1383 
1385  StringCharacterStream stream(this, &op);
1386 
1387  bool truncated = false;
1388  if (len > kMaxShortPrintLength) {
1389  len = kMaxShortPrintLength;
1390  truncated = true;
1391  }
1392  bool ascii = true;
1393  for (int i = 0; i < len; i++) {
1394  uint16_t c = stream.GetNext();
1395 
1396  if (c < 32 || c >= 127) {
1397  ascii = false;
1398  }
1399  }
1400  stream.Reset(this);
1401  if (ascii) {
1402  accumulator->Add("<String[%u]: ", length());
1403  for (int i = 0; i < len; i++) {
1404  accumulator->Put(static_cast<char>(stream.GetNext()));
1405  }
1406  accumulator->Put('>');
1407  } else {
1408  // Backslash indicates that the string contains control
1409  // characters and that backslashes are therefore escaped.
1410  accumulator->Add("<String[%u]\\: ", length());
1411  for (int i = 0; i < len; i++) {
1412  uint16_t c = stream.GetNext();
1413  if (c == '\n') {
1414  accumulator->Add("\\n");
1415  } else if (c == '\r') {
1416  accumulator->Add("\\r");
1417  } else if (c == '\\') {
1418  accumulator->Add("\\\\");
1419  } else if (c < 32 || c > 126) {
1420  accumulator->Add("\\x%02x", c);
1421  } else {
1422  accumulator->Put(static_cast<char>(c));
1423  }
1424  }
1425  if (truncated) {
1426  accumulator->Put('.');
1427  accumulator->Put('.');
1428  accumulator->Put('.');
1429  }
1430  accumulator->Put('>');
1431  }
1432  return;
1433 }
1434 
1435 
1437  switch (map()->instance_type()) {
1438  case JS_ARRAY_TYPE: {
1439  double length = JSArray::cast(this)->length()->IsUndefined()
1440  ? 0
1441  : JSArray::cast(this)->length()->Number();
1442  accumulator->Add("<JS Array[%u]>", static_cast<uint32_t>(length));
1443  break;
1444  }
1445  case JS_WEAK_MAP_TYPE: {
1446  accumulator->Add("<JS WeakMap>");
1447  break;
1448  }
1449  case JS_WEAK_SET_TYPE: {
1450  accumulator->Add("<JS WeakSet>");
1451  break;
1452  }
1453  case JS_REGEXP_TYPE: {
1454  accumulator->Add("<JS RegExp>");
1455  break;
1456  }
1457  case JS_FUNCTION_TYPE: {
1458  JSFunction* function = JSFunction::cast(this);
1459  Object* fun_name = function->shared()->DebugName();
1460  bool printed = false;
1461  if (fun_name->IsString()) {
1462  String* str = String::cast(fun_name);
1463  if (str->length() > 0) {
1464  accumulator->Add("<JS Function ");
1465  accumulator->Put(str);
1466  printed = true;
1467  }
1468  }
1469  if (!printed) {
1470  accumulator->Add("<JS Function");
1471  }
1472  accumulator->Add(" (SharedFunctionInfo %p)",
1473  reinterpret_cast<void*>(function->shared()));
1474  accumulator->Put('>');
1475  break;
1476  }
1477  case JS_GENERATOR_OBJECT_TYPE: {
1478  accumulator->Add("<JS Generator>");
1479  break;
1480  }
1481  case JS_MODULE_TYPE: {
1482  accumulator->Add("<JS Module>");
1483  break;
1484  }
1485  // All other JSObjects are rather similar to each other (JSObject,
1486  // JSGlobalProxy, JSGlobalObject, JSUndetectableObject, JSValue).
1487  default: {
1488  Map* map_of_this = map();
1489  Heap* heap = GetHeap();
1490  Object* constructor = map_of_this->constructor();
1491  bool printed = false;
1492  if (constructor->IsHeapObject() &&
1493  !heap->Contains(HeapObject::cast(constructor))) {
1494  accumulator->Add("!!!INVALID CONSTRUCTOR!!!");
1495  } else {
1496  bool global_object = IsJSGlobalProxy();
1497  if (constructor->IsJSFunction()) {
1498  if (!heap->Contains(JSFunction::cast(constructor)->shared())) {
1499  accumulator->Add("!!!INVALID SHARED ON CONSTRUCTOR!!!");
1500  } else {
1502  JSFunction::cast(constructor)->shared()->name();
1503  if (constructor_name->IsString()) {
1504  String* str = String::cast(constructor_name);
1505  if (str->length() > 0) {
1506  bool vowel = AnWord(str);
1507  accumulator->Add("<%sa%s ",
1508  global_object ? "Global Object: " : "",
1509  vowel ? "n" : "");
1510  accumulator->Put(str);
1511  accumulator->Add(" with %smap %p",
1512  map_of_this->is_deprecated() ? "deprecated " : "",
1513  map_of_this);
1514  printed = true;
1515  }
1516  }
1517  }
1518  }
1519  if (!printed) {
1520  accumulator->Add("<JS %sObject", global_object ? "Global " : "");
1521  }
1522  }
1523  if (IsJSValue()) {
1524  accumulator->Add(" value = ");
1525  JSValue::cast(this)->value()->ShortPrint(accumulator);
1526  }
1527  accumulator->Put('>');
1528  break;
1529  }
1530  }
1531 }
1532 
1533 
1535  FILE* file, Handle<JSObject> object,
1536  ElementsKind from_kind, Handle<FixedArrayBase> from_elements,
1537  ElementsKind to_kind, Handle<FixedArrayBase> to_elements) {
1538  if (from_kind != to_kind) {
1539  PrintF(file, "elements transition [");
1540  PrintElementsKind(file, from_kind);
1541  PrintF(file, " -> ");
1542  PrintElementsKind(file, to_kind);
1543  PrintF(file, "] in ");
1544  JavaScriptFrame::PrintTop(object->GetIsolate(), file, false, true);
1545  PrintF(file, " for ");
1546  object->ShortPrint(file);
1547  PrintF(file, " from ");
1548  from_elements->ShortPrint(file);
1549  PrintF(file, " to ");
1550  to_elements->ShortPrint(file);
1551  PrintF(file, "\n");
1552  }
1553 }
1554 
1555 
1557  const char* reason,
1558  int modify_index,
1559  int split,
1560  int descriptors,
1561  bool constant_to_field,
1562  Representation old_representation,
1563  Representation new_representation) {
1564  PrintF(file, "[generalizing ");
1565  constructor_name()->PrintOn(file);
1566  PrintF(file, "] ");
1567  Name* name = instance_descriptors()->GetKey(modify_index);
1568  if (name->IsString()) {
1569  String::cast(name)->PrintOn(file);
1570  } else {
1571  PrintF(file, "{symbol %p}", static_cast<void*>(name));
1572  }
1573  if (constant_to_field) {
1574  PrintF(file, ":c->f");
1575  } else {
1576  PrintF(file, ":%s->%s",
1577  old_representation.Mnemonic(),
1578  new_representation.Mnemonic());
1579  }
1580  PrintF(file, " (");
1581  if (strlen(reason) > 0) {
1582  PrintF(file, "%s", reason);
1583  } else {
1584  PrintF(file, "+%i maps", descriptors - split);
1585  }
1586  PrintF(file, ") [");
1587  JavaScriptFrame::PrintTop(GetIsolate(), file, false, true);
1588  PrintF(file, "]\n");
1589 }
1590 
1591 
1593  Map* original_map,
1594  Map* new_map) {
1595  PrintF(file, "[migrating ");
1596  map()->constructor_name()->PrintOn(file);
1597  PrintF(file, "] ");
1598  DescriptorArray* o = original_map->instance_descriptors();
1599  DescriptorArray* n = new_map->instance_descriptors();
1600  for (int i = 0; i < original_map->NumberOfOwnDescriptors(); i++) {
1601  Representation o_r = o->GetDetails(i).representation();
1602  Representation n_r = n->GetDetails(i).representation();
1603  if (!o_r.Equals(n_r)) {
1604  String::cast(o->GetKey(i))->PrintOn(file);
1605  PrintF(file, ":%s->%s ", o_r.Mnemonic(), n_r.Mnemonic());
1606  } else if (o->GetDetails(i).type() == CONSTANT &&
1607  n->GetDetails(i).type() == FIELD) {
1608  Name* name = o->GetKey(i);
1609  if (name->IsString()) {
1610  String::cast(name)->PrintOn(file);
1611  } else {
1612  PrintF(file, "{symbol %p}", static_cast<void*>(name));
1613  }
1614  PrintF(file, " ");
1615  }
1616  }
1617  PrintF(file, "\n");
1618 }
1619 
1620 
1622  Heap* heap = GetHeap();
1623  if (!heap->Contains(this)) {
1624  accumulator->Add("!!!INVALID POINTER!!!");
1625  return;
1626  }
1627  if (!heap->Contains(map())) {
1628  accumulator->Add("!!!INVALID MAP!!!");
1629  return;
1630  }
1631 
1632  accumulator->Add("%p ", this);
1633 
1634  if (IsString()) {
1635  String::cast(this)->StringShortPrint(accumulator);
1636  return;
1637  }
1638  if (IsJSObject()) {
1639  JSObject::cast(this)->JSObjectShortPrint(accumulator);
1640  return;
1641  }
1642  switch (map()->instance_type()) {
1643  case MAP_TYPE:
1644  accumulator->Add("<Map(elements=%u)>", Map::cast(this)->elements_kind());
1645  break;
1646  case FIXED_ARRAY_TYPE:
1647  accumulator->Add("<FixedArray[%u]>", FixedArray::cast(this)->length());
1648  break;
1650  accumulator->Add("<FixedDoubleArray[%u]>",
1651  FixedDoubleArray::cast(this)->length());
1652  break;
1653  case BYTE_ARRAY_TYPE:
1654  accumulator->Add("<ByteArray[%u]>", ByteArray::cast(this)->length());
1655  break;
1656  case FREE_SPACE_TYPE:
1657  accumulator->Add("<FreeSpace[%u]>", FreeSpace::cast(this)->Size());
1658  break;
1659 #define TYPED_ARRAY_SHORT_PRINT(Type, type, TYPE, ctype, size) \
1660  case EXTERNAL_##TYPE##_ARRAY_TYPE: \
1661  accumulator->Add("<External" #Type "Array[%u]>", \
1662  External##Type##Array::cast(this)->length()); \
1663  break; \
1664  case FIXED_##TYPE##_ARRAY_TYPE: \
1665  accumulator->Add("<Fixed" #Type "Array[%u]>", \
1666  Fixed##Type##Array::cast(this)->length()); \
1667  break;
1668 
1670 #undef TYPED_ARRAY_SHORT_PRINT
1671 
1674  SmartArrayPointer<char> debug_name =
1675  shared->DebugName()->ToCString();
1676  if (debug_name[0] != 0) {
1677  accumulator->Add("<SharedFunctionInfo %s>", debug_name.get());
1678  } else {
1679  accumulator->Add("<SharedFunctionInfo>");
1680  }
1681  break;
1682  }
1684  accumulator->Add("<JSMessageObject>");
1685  break;
1686 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1687  case NAME##_TYPE: \
1688  accumulator->Put('<'); \
1689  accumulator->Add(#Name); \
1690  accumulator->Put('>'); \
1691  break;
1693 #undef MAKE_STRUCT_CASE
1694  case CODE_TYPE:
1695  accumulator->Add("<Code>");
1696  break;
1697  case ODDBALL_TYPE: {
1698  if (IsUndefined())
1699  accumulator->Add("<undefined>");
1700  else if (IsTheHole())
1701  accumulator->Add("<the hole>");
1702  else if (IsNull())
1703  accumulator->Add("<null>");
1704  else if (IsTrue())
1705  accumulator->Add("<true>");
1706  else if (IsFalse())
1707  accumulator->Add("<false>");
1708  else
1709  accumulator->Add("<Odd Oddball>");
1710  break;
1711  }
1712  case SYMBOL_TYPE: {
1713  Symbol* symbol = Symbol::cast(this);
1714  accumulator->Add("<Symbol: %d", symbol->Hash());
1715  if (!symbol->name()->IsUndefined()) {
1716  accumulator->Add(" ");
1717  String::cast(symbol->name())->StringShortPrint(accumulator);
1718  }
1719  accumulator->Add(">");
1720  break;
1721  }
1722  case HEAP_NUMBER_TYPE:
1723  accumulator->Add("<Number: ");
1724  HeapNumber::cast(this)->HeapNumberPrint(accumulator);
1725  accumulator->Put('>');
1726  break;
1727  case JS_PROXY_TYPE:
1728  accumulator->Add("<JSProxy>");
1729  break;
1731  accumulator->Add("<JSFunctionProxy>");
1732  break;
1733  case FOREIGN_TYPE:
1734  accumulator->Add("<Foreign>");
1735  break;
1736  case CELL_TYPE:
1737  accumulator->Add("Cell for ");
1738  Cell::cast(this)->value()->ShortPrint(accumulator);
1739  break;
1740  case PROPERTY_CELL_TYPE:
1741  accumulator->Add("PropertyCell for ");
1742  PropertyCell::cast(this)->value()->ShortPrint(accumulator);
1743  break;
1744  default:
1745  accumulator->Add("<Other heap object (%d)>", map()->instance_type());
1746  break;
1747  }
1748 }
1749 
1750 
1751 void HeapObject::Iterate(ObjectVisitor* v) {
1752  // Handle header
1754  // Handle object body
1755  Map* m = map();
1756  IterateBody(m->instance_type(), SizeFromMap(m), v);
1757 }
1758 
1759 
1760 void HeapObject::IterateBody(InstanceType type, int object_size,
1761  ObjectVisitor* v) {
1762  // Avoiding <Type>::cast(this) because it accesses the map pointer field.
1763  // During GC, the map pointer field is encoded.
1764  if (type < FIRST_NONSTRING_TYPE) {
1765  switch (type & kStringRepresentationMask) {
1766  case kSeqStringTag:
1767  break;
1768  case kConsStringTag:
1770  break;
1771  case kSlicedStringTag:
1773  break;
1774  case kExternalStringTag:
1775  if ((type & kStringEncodingMask) == kOneByteStringTag) {
1776  reinterpret_cast<ExternalAsciiString*>(this)->
1777  ExternalAsciiStringIterateBody(v);
1778  } else {
1779  reinterpret_cast<ExternalTwoByteString*>(this)->
1780  ExternalTwoByteStringIterateBody(v);
1781  }
1782  break;
1783  }
1784  return;
1785  }
1786 
1787  switch (type) {
1788  case FIXED_ARRAY_TYPE:
1789  FixedArray::BodyDescriptor::IterateBody(this, object_size, v);
1790  break;
1792  reinterpret_cast<ConstantPoolArray*>(this)->ConstantPoolIterateBody(v);
1793  break;
1795  break;
1796  case JS_OBJECT_TYPE:
1799  case JS_MODULE_TYPE:
1800  case JS_VALUE_TYPE:
1801  case JS_DATE_TYPE:
1802  case JS_ARRAY_TYPE:
1803  case JS_ARRAY_BUFFER_TYPE:
1804  case JS_TYPED_ARRAY_TYPE:
1805  case JS_DATA_VIEW_TYPE:
1806  case JS_SET_TYPE:
1807  case JS_MAP_TYPE:
1808  case JS_WEAK_MAP_TYPE:
1809  case JS_WEAK_SET_TYPE:
1810  case JS_REGEXP_TYPE:
1811  case JS_GLOBAL_PROXY_TYPE:
1812  case JS_GLOBAL_OBJECT_TYPE:
1815  JSObject::BodyDescriptor::IterateBody(this, object_size, v);
1816  break;
1817  case JS_FUNCTION_TYPE:
1818  reinterpret_cast<JSFunction*>(this)
1819  ->JSFunctionIterateBody(object_size, v);
1820  break;
1821  case ODDBALL_TYPE:
1823  break;
1824  case JS_PROXY_TYPE:
1826  break;
1829  break;
1830  case FOREIGN_TYPE:
1831  reinterpret_cast<Foreign*>(this)->ForeignIterateBody(v);
1832  break;
1833  case MAP_TYPE:
1835  break;
1836  case CODE_TYPE:
1837  reinterpret_cast<Code*>(this)->CodeIterateBody(v);
1838  break;
1839  case CELL_TYPE:
1841  break;
1842  case PROPERTY_CELL_TYPE:
1844  break;
1845  case SYMBOL_TYPE:
1846  Symbol::BodyDescriptor::IterateBody(this, v);
1847  break;
1848 
1849  case HEAP_NUMBER_TYPE:
1850  case FILLER_TYPE:
1851  case BYTE_ARRAY_TYPE:
1852  case FREE_SPACE_TYPE:
1853  break;
1854 
1855 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
1856  case EXTERNAL_##TYPE##_ARRAY_TYPE: \
1857  case FIXED_##TYPE##_ARRAY_TYPE: \
1858  break;
1859 
1861 #undef TYPED_ARRAY_CASE
1862 
1865  break;
1866  }
1867 
1868 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1869  case NAME##_TYPE:
1871 #undef MAKE_STRUCT_CASE
1872  if (type == ALLOCATION_SITE_TYPE) {
1874  } else {
1875  StructBodyDescriptor::IterateBody(this, object_size, v);
1876  }
1877  break;
1878  default:
1879  PrintF("Unknown type: %d\n", type);
1880  UNREACHABLE();
1881  }
1882 }
1883 
1884 
1886  // NaN, +0, and -0 should return the false object
1887 #if __BYTE_ORDER == __LITTLE_ENDIAN
1889 #elif __BYTE_ORDER == __BIG_ENDIAN
1891 #endif
1892  u.d = value();
1893  if (u.bits.exp == 2047) {
1894  // Detect NaN for IEEE double precision floating point.
1895  if ((u.bits.man_low | u.bits.man_high) != 0) return false;
1896  }
1897  if (u.bits.exp == 0) {
1898  // Detect +0, and -0 for IEEE double precision floating point.
1899  if ((u.bits.man_low | u.bits.man_high) == 0) return false;
1900  }
1901  return true;
1902 }
1903 
1904 
1906  PrintF(out, "%.16g", Number());
1907 }
1908 
1909 
1911  // The Windows version of vsnprintf can allocate when printing a %g string
1912  // into a buffer that may not be big enough. We don't want random memory
1913  // allocation when producing post-crash stack traces, so we print into a
1914  // buffer that is plenty big enough for any floating point number, then
1915  // print that using vsnprintf (which may truncate but never allocate if
1916  // there is no more space in the buffer).
1918  OS::SNPrintF(buffer, "%.16g", Number());
1919  accumulator->Add("%s", buffer.start());
1920 }
1921 
1922 
1924  if (IsJSFunction() && IsJSFunctionProxy()) {
1925  return GetHeap()->function_class_string();
1926  }
1927  if (map()->constructor()->IsJSFunction()) {
1928  JSFunction* constructor = JSFunction::cast(map()->constructor());
1929  return String::cast(constructor->shared()->instance_class_name());
1930  }
1931  // If the constructor is not present, return "Object".
1932  return GetHeap()->Object_string();
1933 }
1934 
1935 
1937  if (constructor()->IsJSFunction()) {
1938  JSFunction* constructor = JSFunction::cast(this->constructor());
1939  String* name = String::cast(constructor->shared()->name());
1940  if (name->length() > 0) return name;
1941  String* inferred_name = constructor->shared()->inferred_name();
1942  if (inferred_name->length() > 0) return inferred_name;
1943  Object* proto = prototype();
1944  if (proto->IsJSObject()) return JSObject::cast(proto)->constructor_name();
1945  }
1946  // TODO(rossberg): what about proxies?
1947  // If the constructor is not present, return "Object".
1948  return GetHeap()->Object_string();
1949 }
1950 
1951 
1953  return map()->constructor_name();
1954 }
1955 
1956 
1957 // TODO(mstarzinger): Temporary wrapper until handlified.
1958 static Handle<Object> NewStorageFor(Isolate* isolate,
1959  Handle<Object> object,
1960  Representation representation) {
1961  Heap* heap = isolate->heap();
1962  CALL_HEAP_FUNCTION(isolate,
1963  object->AllocateNewStorageFor(heap, representation),
1964  Object);
1965 }
1966 
1967 
1968 static MaybeObject* CopyAddFieldDescriptor(Map* map,
1969  Name* name,
1970  int index,
1971  PropertyAttributes attributes,
1972  Representation representation,
1973  TransitionFlag flag) {
1974  Map* new_map;
1975  FieldDescriptor new_field_desc(name, index, attributes, representation);
1976  MaybeObject* maybe_map = map->CopyAddDescriptor(&new_field_desc, flag);
1977  if (!maybe_map->To(&new_map)) return maybe_map;
1978  int unused_property_fields = map->unused_property_fields() - 1;
1979  if (unused_property_fields < 0) {
1980  unused_property_fields += JSObject::kFieldsAdded;
1981  }
1982  new_map->set_unused_property_fields(unused_property_fields);
1983  return new_map;
1984 }
1985 
1986 
1987 static Handle<Map> CopyAddFieldDescriptor(Handle<Map> map,
1988  Handle<Name> name,
1989  int index,
1990  PropertyAttributes attributes,
1991  Representation representation,
1992  TransitionFlag flag) {
1993  CALL_HEAP_FUNCTION(map->GetIsolate(),
1994  CopyAddFieldDescriptor(
1995  *map, *name, index, attributes, representation, flag),
1996  Map);
1997 }
1998 
1999 
2000 void JSObject::AddFastProperty(Handle<JSObject> object,
2001  Handle<Name> name,
2002  Handle<Object> value,
2003  PropertyAttributes attributes,
2004  StoreFromKeyed store_mode,
2005  ValueType value_type,
2006  TransitionFlag flag) {
2007  ASSERT(!object->IsJSGlobalProxy());
2009  object->map()->instance_descriptors()->Search(
2010  *name, object->map()->NumberOfOwnDescriptors()));
2011 
2012  // Normalize the object if the name is an actual name (not the
2013  // hidden strings) and is not a real identifier.
2014  // Normalize the object if it will have too many fast properties.
2015  Isolate* isolate = object->GetIsolate();
2016  if (!name->IsCacheable(isolate) ||
2017  object->TooManyFastProperties(store_mode)) {
2019  AddSlowProperty(object, name, value, attributes);
2020  return;
2021  }
2022 
2023  // Compute the new index for new field.
2024  int index = object->map()->NextFreePropertyIndex();
2025 
2026  // Allocate new instance descriptors with (name, index) added
2027  if (object->IsJSContextExtensionObject()) value_type = FORCE_TAGGED;
2028  Representation representation = value->OptimalRepresentation(value_type);
2029  Handle<Map> new_map = CopyAddFieldDescriptor(
2030  handle(object->map()), name, index, attributes, representation, flag);
2031 
2032  JSObject::MigrateToMap(object, new_map);
2033 
2034  if (representation.IsDouble()) {
2035  // Nothing more to be done.
2036  if (value->IsUninitialized()) return;
2037  HeapNumber* box = HeapNumber::cast(object->RawFastPropertyAt(index));
2038  box->set_value(value->Number());
2039  } else {
2040  object->FastPropertyAtPut(index, *value);
2041  }
2042 }
2043 
2044 
2045 static MaybeObject* CopyAddConstantDescriptor(Map* map,
2046  Name* name,
2047  Object* value,
2048  PropertyAttributes attributes,
2049  TransitionFlag flag) {
2050  ConstantDescriptor new_constant_desc(name, value, attributes);
2051  return map->CopyAddDescriptor(&new_constant_desc, flag);
2052 }
2053 
2054 
2055 static Handle<Map> CopyAddConstantDescriptor(Handle<Map> map,
2056  Handle<Name> name,
2057  Handle<Object> value,
2058  PropertyAttributes attributes,
2059  TransitionFlag flag) {
2060  CALL_HEAP_FUNCTION(map->GetIsolate(),
2061  CopyAddConstantDescriptor(
2062  *map, *name, *value, attributes, flag),
2063  Map);
2064 }
2065 
2066 
2067 void JSObject::AddConstantProperty(Handle<JSObject> object,
2068  Handle<Name> name,
2069  Handle<Object> constant,
2070  PropertyAttributes attributes,
2071  TransitionFlag initial_flag) {
2072  TransitionFlag flag =
2073  // Do not add transitions to global objects.
2074  (object->IsGlobalObject() ||
2075  // Don't add transitions to special properties with non-trivial
2076  // attributes.
2077  attributes != NONE)
2078  ? OMIT_TRANSITION
2079  : initial_flag;
2080 
2081  // Allocate new instance descriptors with (name, constant) added.
2082  Handle<Map> new_map = CopyAddConstantDescriptor(
2083  handle(object->map()), name, constant, attributes, flag);
2084 
2085  JSObject::MigrateToMap(object, new_map);
2086 }
2087 
2088 
2089 void JSObject::AddSlowProperty(Handle<JSObject> object,
2090  Handle<Name> name,
2091  Handle<Object> value,
2092  PropertyAttributes attributes) {
2093  ASSERT(!object->HasFastProperties());
2094  Isolate* isolate = object->GetIsolate();
2095  Handle<NameDictionary> dict(object->property_dictionary());
2096  if (object->IsGlobalObject()) {
2097  // In case name is an orphaned property reuse the cell.
2098  int entry = dict->FindEntry(*name);
2099  if (entry != NameDictionary::kNotFound) {
2100  Handle<PropertyCell> cell(PropertyCell::cast(dict->ValueAt(entry)));
2101  PropertyCell::SetValueInferType(cell, value);
2102  // Assign an enumeration index to the property and update
2103  // SetNextEnumerationIndex.
2104  int index = dict->NextEnumerationIndex();
2105  PropertyDetails details = PropertyDetails(attributes, NORMAL, index);
2106  dict->SetNextEnumerationIndex(index + 1);
2107  dict->SetEntry(entry, *name, *cell, details);
2108  return;
2109  }
2110  Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(value);
2111  PropertyCell::SetValueInferType(cell, value);
2112  value = cell;
2113  }
2114  PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
2115  Handle<NameDictionary> result = NameDictionaryAdd(dict, name, value, details);
2116  if (*dict != *result) object->set_properties(*result);
2117 }
2118 
2119 
2120 Handle<Object> JSObject::AddProperty(Handle<JSObject> object,
2121  Handle<Name> name,
2122  Handle<Object> value,
2123  PropertyAttributes attributes,
2124  StrictMode strict_mode,
2125  JSReceiver::StoreFromKeyed store_mode,
2126  ExtensibilityCheck extensibility_check,
2127  ValueType value_type,
2128  StoreMode mode,
2129  TransitionFlag transition_flag) {
2130  ASSERT(!object->IsJSGlobalProxy());
2131  Isolate* isolate = object->GetIsolate();
2132 
2133  if (!name->IsUniqueName()) {
2134  name = isolate->factory()->InternalizeString(
2135  Handle<String>::cast(name));
2136  }
2137 
2138  if (extensibility_check == PERFORM_EXTENSIBILITY_CHECK &&
2139  !object->map()->is_extensible()) {
2140  if (strict_mode == SLOPPY) {
2141  return value;
2142  } else {
2143  Handle<Object> args[1] = { name };
2144  Handle<Object> error = isolate->factory()->NewTypeError(
2145  "object_not_extensible", HandleVector(args, ARRAY_SIZE(args)));
2146  isolate->Throw(*error);
2147  return Handle<Object>();
2148  }
2149  }
2150 
2151  if (object->HasFastProperties()) {
2152  // Ensure the descriptor array does not get too big.
2153  if (object->map()->NumberOfOwnDescriptors() <= kMaxNumberOfDescriptors) {
2154  // TODO(verwaest): Support other constants.
2155  // if (mode == ALLOW_AS_CONSTANT &&
2156  // !value->IsTheHole() &&
2157  // !value->IsConsString()) {
2158  if (value->IsJSFunction()) {
2159  AddConstantProperty(object, name, value, attributes, transition_flag);
2160  } else {
2161  AddFastProperty(object, name, value, attributes, store_mode,
2162  value_type, transition_flag);
2163  }
2164  } else {
2165  // Normalize the object to prevent very large instance descriptors.
2166  // This eliminates unwanted N^2 allocation and lookup behavior.
2168  AddSlowProperty(object, name, value, attributes);
2169  }
2170  } else {
2171  AddSlowProperty(object, name, value, attributes);
2172  }
2173 
2174  if (object->map()->is_observed() &&
2175  *name != isolate->heap()->hidden_string()) {
2176  Handle<Object> old_value = isolate->factory()->the_hole_value();
2177  EnqueueChangeRecord(object, "add", name, old_value);
2178  }
2179 
2180  return value;
2181 }
2182 
2183 
2185  const char* type_str,
2186  Handle<Name> name,
2187  Handle<Object> old_value) {
2188  Isolate* isolate = object->GetIsolate();
2189  HandleScope scope(isolate);
2190  Handle<String> type = isolate->factory()->InternalizeUtf8String(type_str);
2191  if (object->IsJSGlobalObject()) {
2192  object = handle(JSGlobalObject::cast(*object)->global_receiver(), isolate);
2193  }
2194  Handle<Object> args[] = { type, object, name, old_value };
2195  int argc = name.is_null() ? 2 : old_value->IsTheHole() ? 3 : 4;
2196  bool threw;
2197 
2198  Execution::Call(isolate,
2199  Handle<JSFunction>(isolate->observers_notify_change()),
2200  isolate->factory()->undefined_value(),
2201  argc, args,
2202  &threw);
2203  ASSERT(!threw);
2204 }
2205 
2206 
2207 Handle<Object> JSObject::SetPropertyPostInterceptor(
2208  Handle<JSObject> object,
2209  Handle<Name> name,
2210  Handle<Object> value,
2211  PropertyAttributes attributes,
2212  StrictMode strict_mode) {
2213  // Check local property, ignore interceptor.
2214  LookupResult result(object->GetIsolate());
2215  object->LocalLookupRealNamedProperty(*name, &result);
2216  if (!result.IsFound()) {
2217  object->map()->LookupTransition(*object, *name, &result);
2218  }
2219  if (result.IsFound()) {
2220  // An existing property or a map transition was found. Use set property to
2221  // handle all these cases.
2222  return SetPropertyForResult(object, &result, name, value, attributes,
2223  strict_mode, MAY_BE_STORE_FROM_KEYED);
2224  }
2225  bool done = false;
2226  Handle<Object> result_object = SetPropertyViaPrototypes(
2227  object, name, value, attributes, strict_mode, &done);
2228  if (done) return result_object;
2229  // Add a new real property.
2230  return AddProperty(object, name, value, attributes, strict_mode);
2231 }
2232 
2233 
2234 static void ReplaceSlowProperty(Handle<JSObject> object,
2235  Handle<Name> name,
2236  Handle<Object> value,
2237  PropertyAttributes attributes) {
2238  NameDictionary* dictionary = object->property_dictionary();
2239  int old_index = dictionary->FindEntry(*name);
2240  int new_enumeration_index = 0; // 0 means "Use the next available index."
2241  if (old_index != -1) {
2242  // All calls to ReplaceSlowProperty have had all transitions removed.
2243  new_enumeration_index = dictionary->DetailsAt(old_index).dictionary_index();
2244  }
2245 
2246  PropertyDetails new_details(attributes, NORMAL, new_enumeration_index);
2247  JSObject::SetNormalizedProperty(object, name, value, new_details);
2248 }
2249 
2250 
2251 const char* Representation::Mnemonic() const {
2252  switch (kind_) {
2253  case kNone: return "v";
2254  case kTagged: return "t";
2255  case kSmi: return "s";
2256  case kDouble: return "d";
2257  case kInteger32: return "i";
2258  case kHeapObject: return "h";
2259  case kExternal: return "x";
2260  default:
2261  UNREACHABLE();
2262  return NULL;
2263  }
2264 }
2265 
2266 
2267 static void ZapEndOfFixedArray(Address new_end, int to_trim) {
2268  // If we are doing a big trim in old space then we zap the space.
2269  Object** zap = reinterpret_cast<Object**>(new_end);
2270  zap++; // Header of filler must be at least one word so skip that.
2271  for (int i = 1; i < to_trim; i++) {
2272  *zap++ = Smi::FromInt(0);
2273  }
2274 }
2275 
2276 
2277 template<Heap::InvocationMode mode>
2278 static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) {
2279  ASSERT(elms->map() != heap->fixed_cow_array_map());
2280  // For now this trick is only applied to fixed arrays in new and paged space.
2281  ASSERT(!heap->lo_space()->Contains(elms));
2282 
2283  const int len = elms->length();
2284 
2285  ASSERT(to_trim < len);
2286 
2287  Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim);
2288 
2289  if (mode != Heap::FROM_GC || Heap::ShouldZapGarbage()) {
2290  ZapEndOfFixedArray(new_end, to_trim);
2291  }
2292 
2293  int size_delta = to_trim * kPointerSize;
2294 
2295  // Technically in new space this write might be omitted (except for
2296  // debug mode which iterates through the heap), but to play safer
2297  // we still do it.
2298  heap->CreateFillerObjectAt(new_end, size_delta);
2299 
2300  elms->set_length(len - to_trim);
2301 
2302  heap->AdjustLiveBytes(elms->address(), -size_delta, mode);
2303 
2304  // The array may not be moved during GC,
2305  // and size has to be adjusted nevertheless.
2306  HeapProfiler* profiler = heap->isolate()->heap_profiler();
2307  if (profiler->is_tracking_allocations()) {
2308  profiler->UpdateObjectSizeEvent(elms->address(), elms->Size());
2309  }
2310 }
2311 
2312 
2314  int target_number_of_fields,
2315  int target_inobject,
2316  int target_unused) {
2317  // If fields were added (or removed), rewrite the instance.
2318  int number_of_fields = NumberOfFields();
2319  ASSERT(target_number_of_fields >= number_of_fields);
2320  if (target_number_of_fields != number_of_fields) return true;
2321 
2322  // If smi descriptors were replaced by double descriptors, rewrite.
2323  DescriptorArray* old_desc = instance_descriptors();
2324  DescriptorArray* new_desc = target->instance_descriptors();
2325  int limit = NumberOfOwnDescriptors();
2326  for (int i = 0; i < limit; i++) {
2327  if (new_desc->GetDetails(i).representation().IsDouble() &&
2328  !old_desc->GetDetails(i).representation().IsDouble()) {
2329  return true;
2330  }
2331  }
2332 
2333  // If no fields were added, and no inobject properties were removed, setting
2334  // the map is sufficient.
2335  if (target_inobject == inobject_properties()) return false;
2336  // In-object slack tracking may have reduced the object size of the new map.
2337  // In that case, succeed if all existing fields were inobject, and they still
2338  // fit within the new inobject size.
2339  ASSERT(target_inobject < inobject_properties());
2340  if (target_number_of_fields <= target_inobject) {
2341  ASSERT(target_number_of_fields + target_unused == target_inobject);
2342  return false;
2343  }
2344  // Otherwise, properties will need to be moved to the backing store.
2345  return true;
2346 }
2347 
2348 
2349 // To migrate an instance to a map:
2350 // - First check whether the instance needs to be rewritten. If not, simply
2351 // change the map.
2352 // - Otherwise, allocate a fixed array large enough to hold all fields, in
2353 // addition to unused space.
2354 // - Copy all existing properties in, in the following order: backing store
2355 // properties, unused fields, inobject properties.
2356 // - If all allocation succeeded, commit the state atomically:
2357 // * Copy inobject properties from the backing store back into the object.
2358 // * Trim the difference in instance size of the object. This also cleanly
2359 // frees inobject properties that moved to the backing store.
2360 // * If there are properties left in the backing store, trim of the space used
2361 // to temporarily store the inobject properties.
2362 // * If there are properties left in the backing store, install the backing
2363 // store.
2365  Isolate* isolate = object->GetIsolate();
2366  Handle<Map> old_map(object->map());
2367  int number_of_fields = new_map->NumberOfFields();
2368  int inobject = new_map->inobject_properties();
2369  int unused = new_map->unused_property_fields();
2370 
2371  // Nothing to do if no functions were converted to fields and no smis were
2372  // converted to doubles.
2373  if (!old_map->InstancesNeedRewriting(
2374  *new_map, number_of_fields, inobject, unused)) {
2375  object->set_map(*new_map);
2376  return;
2377  }
2378 
2379  int total_size = number_of_fields + unused;
2380  int external = total_size - inobject;
2381  Handle<FixedArray> array = isolate->factory()->NewFixedArray(total_size);
2382 
2383  Handle<DescriptorArray> old_descriptors(old_map->instance_descriptors());
2384  Handle<DescriptorArray> new_descriptors(new_map->instance_descriptors());
2385  int old_nof = old_map->NumberOfOwnDescriptors();
2386  int new_nof = new_map->NumberOfOwnDescriptors();
2387 
2388  // This method only supports generalizing instances to at least the same
2389  // number of properties.
2390  ASSERT(old_nof <= new_nof);
2391 
2392  for (int i = 0; i < old_nof; i++) {
2393  PropertyDetails details = new_descriptors->GetDetails(i);
2394  if (details.type() != FIELD) continue;
2395  PropertyDetails old_details = old_descriptors->GetDetails(i);
2396  if (old_details.type() == CALLBACKS) {
2397  ASSERT(details.representation().IsTagged());
2398  continue;
2399  }
2400  ASSERT(old_details.type() == CONSTANT ||
2401  old_details.type() == FIELD);
2402  Object* raw_value = old_details.type() == CONSTANT
2403  ? old_descriptors->GetValue(i)
2404  : object->RawFastPropertyAt(old_descriptors->GetFieldIndex(i));
2405  Handle<Object> value(raw_value, isolate);
2406  if (!old_details.representation().IsDouble() &&
2407  details.representation().IsDouble()) {
2408  if (old_details.representation().IsNone()) {
2409  value = handle(Smi::FromInt(0), isolate);
2410  }
2411  value = NewStorageFor(isolate, value, details.representation());
2412  }
2413  ASSERT(!(details.representation().IsDouble() && value->IsSmi()));
2414  int target_index = new_descriptors->GetFieldIndex(i) - inobject;
2415  if (target_index < 0) target_index += total_size;
2416  array->set(target_index, *value);
2417  }
2418 
2419  for (int i = old_nof; i < new_nof; i++) {
2420  PropertyDetails details = new_descriptors->GetDetails(i);
2421  if (details.type() != FIELD) continue;
2422  if (details.representation().IsDouble()) {
2423  int target_index = new_descriptors->GetFieldIndex(i) - inobject;
2424  if (target_index < 0) target_index += total_size;
2425  Handle<Object> box = isolate->factory()->NewHeapNumber(0);
2426  array->set(target_index, *box);
2427  }
2428  }
2429 
2430  // From here on we cannot fail and we shouldn't GC anymore.
2431  DisallowHeapAllocation no_allocation;
2432 
2433  // Copy (real) inobject properties. If necessary, stop at number_of_fields to
2434  // avoid overwriting |one_pointer_filler_map|.
2435  int limit = Min(inobject, number_of_fields);
2436  for (int i = 0; i < limit; i++) {
2437  object->FastPropertyAtPut(i, array->get(external + i));
2438  }
2439 
2440  // Create filler object past the new instance size.
2441  int new_instance_size = new_map->instance_size();
2442  int instance_size_delta = old_map->instance_size() - new_instance_size;
2443  ASSERT(instance_size_delta >= 0);
2444  Address address = object->address() + new_instance_size;
2445  isolate->heap()->CreateFillerObjectAt(address, instance_size_delta);
2446 
2447  // If there are properties in the new backing store, trim it to the correct
2448  // size and install the backing store into the object.
2449  if (external > 0) {
2450  RightTrimFixedArray<Heap::FROM_MUTATOR>(isolate->heap(), *array, inobject);
2451  object->set_properties(*array);
2452  }
2453 
2454  object->set_map(*new_map);
2455 }
2456 
2457 
2459  Handle<Name> key,
2460  Handle<Map> target,
2461  SimpleTransitionFlag flag) {
2462  CALL_HEAP_FUNCTION(map->GetIsolate(),
2463  map->AddTransition(*key, *target, flag),
2464  TransitionArray);
2465 }
2466 
2467 
2469  int modify_index,
2470  Representation new_representation,
2471  StoreMode store_mode) {
2473  handle(object->map()), modify_index, new_representation, store_mode);
2474  if (object->map() == *new_map) return;
2475  return MigrateToMap(object, new_map);
2476 }
2477 
2478 
2480  DescriptorArray* descriptors = instance_descriptors();
2481  int result = 0;
2482  for (int i = 0; i < NumberOfOwnDescriptors(); i++) {
2483  if (descriptors->GetDetails(i).type() == FIELD) result++;
2484  }
2485  return result;
2486 }
2487 
2488 
2490  int modify_index,
2491  StoreMode store_mode,
2492  PropertyAttributes attributes,
2493  const char* reason) {
2494  Handle<Map> new_map = Copy(map);
2495 
2496  DescriptorArray* descriptors = new_map->instance_descriptors();
2498 
2499  // Unless the instance is being migrated, ensure that modify_index is a field.
2500  PropertyDetails details = descriptors->GetDetails(modify_index);
2501  if (store_mode == FORCE_FIELD && details.type() != FIELD) {
2502  FieldDescriptor d(descriptors->GetKey(modify_index),
2503  new_map->NumberOfFields(),
2504  attributes,
2506  d.SetSortedKeyIndex(details.pointer());
2507  descriptors->Set(modify_index, &d);
2508  int unused_property_fields = new_map->unused_property_fields() - 1;
2509  if (unused_property_fields < 0) {
2510  unused_property_fields += JSObject::kFieldsAdded;
2511  }
2512  new_map->set_unused_property_fields(unused_property_fields);
2513  }
2514 
2515  if (FLAG_trace_generalization) {
2516  map->PrintGeneralization(stdout, reason, modify_index,
2517  new_map->NumberOfOwnDescriptors(),
2518  new_map->NumberOfOwnDescriptors(),
2519  details.type() == CONSTANT && store_mode == FORCE_FIELD,
2521  }
2522  return new_map;
2523 }
2524 
2525 
2527  if (is_deprecated()) return;
2528  if (HasTransitionArray()) {
2529  TransitionArray* transitions = this->transitions();
2530  for (int i = 0; i < transitions->number_of_transitions(); i++) {
2531  transitions->GetTarget(i)->DeprecateTransitionTree();
2532  }
2533  }
2534  deprecate();
2535  dependent_code()->DeoptimizeDependentCodeGroup(
2538 }
2539 
2540 
2541 // Invalidates a transition target at |key|, and installs |new_descriptors| over
2542 // the current instance_descriptors to ensure proper sharing of descriptor
2543 // arrays.
2544 void Map::DeprecateTarget(Name* key, DescriptorArray* new_descriptors) {
2545  if (HasTransitionArray()) {
2546  TransitionArray* transitions = this->transitions();
2547  int transition = transitions->Search(key);
2548  if (transition != TransitionArray::kNotFound) {
2549  transitions->GetTarget(transition)->DeprecateTransitionTree();
2550  }
2551  }
2552 
2553  // Don't overwrite the empty descriptor array.
2554  if (NumberOfOwnDescriptors() == 0) return;
2555 
2556  DescriptorArray* to_replace = instance_descriptors();
2557  Map* current = this;
2558  GetHeap()->incremental_marking()->RecordWrites(to_replace);
2559  while (current->instance_descriptors() == to_replace) {
2560  current->SetEnumLength(kInvalidEnumCacheSentinel);
2561  current->set_instance_descriptors(new_descriptors);
2562  Object* next = current->GetBackPointer();
2563  if (next->IsUndefined()) break;
2564  current = Map::cast(next);
2565  }
2566 
2567  set_owns_descriptors(false);
2568 }
2569 
2570 
2572  Map* result = this;
2573  while (true) {
2574  Object* back = result->GetBackPointer();
2575  if (back->IsUndefined()) return result;
2576  result = Map::cast(back);
2577  }
2578 }
2579 
2580 
2581 // Returns NULL if the updated map is incompatible.
2582 Map* Map::FindUpdatedMap(int verbatim,
2583  int length,
2584  DescriptorArray* descriptors) {
2585  // This can only be called on roots of transition trees.
2586  ASSERT(GetBackPointer()->IsUndefined());
2587 
2588  Map* current = this;
2589 
2590  for (int i = verbatim; i < length; i++) {
2591  if (!current->HasTransitionArray()) break;
2592  Name* name = descriptors->GetKey(i);
2593  TransitionArray* transitions = current->transitions();
2594  int transition = transitions->Search(name);
2595  if (transition == TransitionArray::kNotFound) break;
2596  current = transitions->GetTarget(transition);
2597  PropertyDetails details = descriptors->GetDetails(i);
2598  PropertyDetails target_details =
2599  current->instance_descriptors()->GetDetails(i);
2600  if (details.attributes() != target_details.attributes()) return NULL;
2601  if (details.type() == CALLBACKS) {
2602  if (target_details.type() != CALLBACKS) return NULL;
2603  if (descriptors->GetValue(i) !=
2604  current->instance_descriptors()->GetValue(i)) {
2605  return NULL;
2606  }
2607  } else if (target_details.type() == CALLBACKS) {
2608  return NULL;
2609  }
2610  }
2611 
2612  return current;
2613 }
2614 
2615 
2617  int length,
2618  DescriptorArray* descriptors) {
2619  // This can only be called on roots of transition trees.
2620  ASSERT(GetBackPointer()->IsUndefined());
2621 
2622  Map* current = this;
2623 
2624  for (int i = verbatim; i < length; i++) {
2625  if (!current->HasTransitionArray()) break;
2626  Name* name = descriptors->GetKey(i);
2627  TransitionArray* transitions = current->transitions();
2628  int transition = transitions->Search(name);
2629  if (transition == TransitionArray::kNotFound) break;
2630 
2631  Map* next = transitions->GetTarget(transition);
2632  DescriptorArray* next_descriptors = next->instance_descriptors();
2633 
2634  if (next_descriptors->GetValue(i) != descriptors->GetValue(i)) break;
2635 
2636  PropertyDetails details = descriptors->GetDetails(i);
2637  PropertyDetails next_details = next_descriptors->GetDetails(i);
2638  if (details.type() != next_details.type()) break;
2639  if (details.attributes() != next_details.attributes()) break;
2640  if (!details.representation().Equals(next_details.representation())) break;
2641 
2642  current = next;
2643  }
2644  return current;
2645 }
2646 
2647 
2648 // Generalize the representation of the descriptor at |modify_index|.
2649 // This method rewrites the transition tree to reflect the new change. To avoid
2650 // high degrees over polymorphism, and to stabilize quickly, on every rewrite
2651 // the new type is deduced by merging the current type with any potential new
2652 // (partial) version of the type in the transition tree.
2653 // To do this, on each rewrite:
2654 // - Search the root of the transition tree using FindRootMap.
2655 // - Find |updated|, the newest matching version of this map using
2656 // FindUpdatedMap. This uses the keys in the own map's descriptor array to
2657 // walk the transition tree.
2658 // - Merge/generalize the descriptor array of the current map and |updated|.
2659 // - Generalize the |modify_index| descriptor using |new_representation|.
2660 // - Walk the tree again starting from the root towards |updated|. Stop at
2661 // |split_map|, the first map who's descriptor array does not match the merged
2662 // descriptor array.
2663 // - If |updated| == |split_map|, |updated| is in the expected state. Return it.
2664 // - Otherwise, invalidate the outdated transition target from |updated|, and
2665 // replace its transition tree with a new branch for the updated descriptors.
2667  int modify_index,
2668  Representation new_representation,
2669  StoreMode store_mode) {
2670  Handle<DescriptorArray> old_descriptors(old_map->instance_descriptors());
2671  PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
2672  Representation old_representation = old_details.representation();
2673 
2674  // It's fine to transition from None to anything but double without any
2675  // modification to the object, because the default uninitialized value for
2676  // representation None can be overwritten by both smi and tagged values.
2677  // Doubles, however, would require a box allocation.
2678  if (old_representation.IsNone() &&
2679  !new_representation.IsNone() &&
2680  !new_representation.IsDouble()) {
2681  old_descriptors->SetRepresentation(modify_index, new_representation);
2682  return old_map;
2683  }
2684 
2685  int descriptors = old_map->NumberOfOwnDescriptors();
2686  Handle<Map> root_map(old_map->FindRootMap());
2687 
2688  // Check the state of the root map.
2689  if (!old_map->EquivalentToForTransition(*root_map)) {
2690  return CopyGeneralizeAllRepresentations(old_map, modify_index, store_mode,
2691  old_details.attributes(), "not equivalent");
2692  }
2693 
2694  int verbatim = root_map->NumberOfOwnDescriptors();
2695 
2696  if (store_mode != ALLOW_AS_CONSTANT && modify_index < verbatim) {
2697  return CopyGeneralizeAllRepresentations(old_map, modify_index, store_mode,
2698  old_details.attributes(), "root modification");
2699  }
2700 
2701  Map* raw_updated = root_map->FindUpdatedMap(
2702  verbatim, descriptors, *old_descriptors);
2703  if (raw_updated == NULL) {
2704  return CopyGeneralizeAllRepresentations(old_map, modify_index, store_mode,
2705  old_details.attributes(), "incompatible");
2706  }
2707 
2708  Handle<Map> updated(raw_updated);
2709  Handle<DescriptorArray> updated_descriptors(updated->instance_descriptors());
2710 
2711  int valid = updated->NumberOfOwnDescriptors();
2712 
2713  // Directly change the map if the target map is more general. Ensure that the
2714  // target type of the modify_index is a FIELD, unless we are migrating.
2715  if (updated_descriptors->IsMoreGeneralThan(
2716  verbatim, valid, descriptors, *old_descriptors) &&
2717  (store_mode == ALLOW_AS_CONSTANT ||
2718  updated_descriptors->GetDetails(modify_index).type() == FIELD)) {
2719  Representation updated_representation =
2720  updated_descriptors->GetDetails(modify_index).representation();
2721  if (new_representation.fits_into(updated_representation)) return updated;
2722  }
2723 
2725  updated_descriptors, verbatim, valid, descriptors, modify_index,
2726  store_mode, old_descriptors);
2727  ASSERT(store_mode == ALLOW_AS_CONSTANT ||
2728  new_descriptors->GetDetails(modify_index).type() == FIELD);
2729 
2730  old_representation =
2731  new_descriptors->GetDetails(modify_index).representation();
2732  Representation updated_representation =
2733  new_representation.generalize(old_representation);
2734  if (!updated_representation.Equals(old_representation)) {
2735  new_descriptors->SetRepresentation(modify_index, updated_representation);
2736  }
2737 
2738  Handle<Map> split_map(root_map->FindLastMatchMap(
2739  verbatim, descriptors, *new_descriptors));
2740 
2741  int split_descriptors = split_map->NumberOfOwnDescriptors();
2742  // This is shadowed by |updated_descriptors| being more general than
2743  // |old_descriptors|.
2744  ASSERT(descriptors != split_descriptors);
2745 
2746  int descriptor = split_descriptors;
2747  split_map->DeprecateTarget(
2748  old_descriptors->GetKey(descriptor), *new_descriptors);
2749 
2750  if (FLAG_trace_generalization) {
2751  old_map->PrintGeneralization(
2752  stdout, "", modify_index, descriptor, descriptors,
2753  old_descriptors->GetDetails(modify_index).type() == CONSTANT &&
2754  store_mode == FORCE_FIELD,
2755  old_representation, updated_representation);
2756  }
2757 
2758  // Add missing transitions.
2759  Handle<Map> new_map = split_map;
2760  for (; descriptor < descriptors; descriptor++) {
2761  new_map = Map::CopyInstallDescriptors(new_map, descriptor, new_descriptors);
2762  }
2763 
2764  new_map->set_owns_descriptors(true);
2765  return new_map;
2766 }
2767 
2768 
2769 // Generalize the representation of all FIELD descriptors.
2771  Handle<Map> map,
2772  Representation new_representation) {
2773  Handle<DescriptorArray> descriptors(map->instance_descriptors());
2774  for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
2775  PropertyDetails details = descriptors->GetDetails(i);
2776  if (details.type() == FIELD) {
2777  map = GeneralizeRepresentation(map, i, new_representation, FORCE_FIELD);
2778  }
2779  }
2780  return map;
2781 }
2782 
2783 
2785  Handle<Map> proto_map(map);
2786  while (proto_map->prototype()->IsJSObject()) {
2787  Handle<JSObject> holder(JSObject::cast(proto_map->prototype()));
2788  if (holder->map()->is_deprecated()) {
2790  }
2791  proto_map = Handle<Map>(holder->map());
2792  }
2793  return CurrentMapForDeprecatedInternal(map);
2794 }
2795 
2796 
2798  if (!map->is_deprecated()) return map;
2799 
2800  DisallowHeapAllocation no_allocation;
2801  DescriptorArray* old_descriptors = map->instance_descriptors();
2802 
2803  int descriptors = map->NumberOfOwnDescriptors();
2804  Map* root_map = map->FindRootMap();
2805 
2806  // Check the state of the root map.
2807  if (!map->EquivalentToForTransition(root_map)) return Handle<Map>();
2808  int verbatim = root_map->NumberOfOwnDescriptors();
2809 
2810  Map* updated = root_map->FindUpdatedMap(
2811  verbatim, descriptors, old_descriptors);
2812  if (updated == NULL) return Handle<Map>();
2813 
2814  DescriptorArray* updated_descriptors = updated->instance_descriptors();
2815  int valid = updated->NumberOfOwnDescriptors();
2816  if (!updated_descriptors->IsMoreGeneralThan(
2817  verbatim, valid, descriptors, old_descriptors)) {
2818  return Handle<Map>();
2819  }
2820 
2821  return handle(updated);
2822 }
2823 
2824 
2826  Handle<JSObject> object,
2827  Handle<Name> name,
2828  Handle<Object> value,
2829  PropertyAttributes attributes,
2830  StrictMode strict_mode) {
2831  // TODO(rossberg): Support symbols in the API.
2832  if (name->IsSymbol()) return value;
2833  Isolate* isolate = object->GetIsolate();
2834  Handle<String> name_string = Handle<String>::cast(name);
2835  Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
2836  if (!interceptor->setter()->IsUndefined()) {
2837  LOG(isolate,
2838  ApiNamedPropertyAccess("interceptor-named-set", *object, *name));
2840  isolate, interceptor->data(), *object, *object);
2842  v8::ToCData<v8::NamedPropertySetterCallback>(interceptor->setter());
2843  Handle<Object> value_unhole = value->IsTheHole()
2844  ? Handle<Object>(isolate->factory()->undefined_value()) : value;
2845  v8::Handle<v8::Value> result = args.Call(setter,
2846  v8::Utils::ToLocal(name_string),
2847  v8::Utils::ToLocal(value_unhole));
2849  if (!result.IsEmpty()) return value;
2850  }
2851  Handle<Object> result =
2852  SetPropertyPostInterceptor(object, name, value, attributes, strict_mode);
2854  return result;
2855 }
2856 
2857 
2859  Handle<Name> name,
2860  Handle<Object> value,
2861  PropertyAttributes attributes,
2862  StrictMode strict_mode,
2863  StoreFromKeyed store_mode) {
2864  LookupResult result(object->GetIsolate());
2865  object->LocalLookup(*name, &result, true);
2866  if (!result.IsFound()) {
2867  object->map()->LookupTransition(JSObject::cast(*object), *name, &result);
2868  }
2869  return SetProperty(object, &result, name, value, attributes, strict_mode,
2870  store_mode);
2871 }
2872 
2873 
2875  Handle<Object> structure,
2876  Handle<Name> name,
2877  Handle<Object> value,
2878  Handle<JSObject> holder,
2879  StrictMode strict_mode) {
2880  Isolate* isolate = object->GetIsolate();
2881 
2882  // We should never get here to initialize a const with the hole
2883  // value since a const declaration would conflict with the setter.
2884  ASSERT(!value->IsTheHole());
2885 
2886  // To accommodate both the old and the new api we switch on the
2887  // data structure used to store the callbacks. Eventually foreign
2888  // callbacks should be phased out.
2889  if (structure->IsForeign()) {
2890  AccessorDescriptor* callback =
2891  reinterpret_cast<AccessorDescriptor*>(
2892  Handle<Foreign>::cast(structure)->foreign_address());
2893  CALL_AND_RETRY_OR_DIE(isolate,
2894  (callback->setter)(
2895  isolate, *object, *value, callback->data),
2896  break,
2897  return Handle<Object>());
2899  return value;
2900  }
2901 
2902  if (structure->IsExecutableAccessorInfo()) {
2903  // api style callbacks
2905  if (!data->IsCompatibleReceiver(*object)) {
2906  Handle<Object> args[2] = { name, object };
2907  Handle<Object> error =
2908  isolate->factory()->NewTypeError("incompatible_method_receiver",
2909  HandleVector(args,
2910  ARRAY_SIZE(args)));
2911  isolate->Throw(*error);
2912  return Handle<Object>();
2913  }
2914  // TODO(rossberg): Support symbols in the API.
2915  if (name->IsSymbol()) return value;
2916  Object* call_obj = data->setter();
2917  v8::AccessorSetterCallback call_fun =
2918  v8::ToCData<v8::AccessorSetterCallback>(call_obj);
2919  if (call_fun == NULL) return value;
2921  LOG(isolate, ApiNamedPropertyAccess("store", *object, *name));
2923  isolate, data->data(), *object, JSObject::cast(*holder));
2924  args.Call(call_fun,
2925  v8::Utils::ToLocal(key),
2926  v8::Utils::ToLocal(value));
2928  return value;
2929  }
2930 
2931  if (structure->IsAccessorPair()) {
2932  Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate);
2933  if (setter->IsSpecFunction()) {
2934  // TODO(rossberg): nicer would be to cast to some JSCallable here...
2936  object, Handle<JSReceiver>::cast(setter), value);
2937  } else {
2938  if (strict_mode == SLOPPY) return value;
2939  Handle<Object> args[2] = { name, holder };
2940  Handle<Object> error =
2941  isolate->factory()->NewTypeError("no_setter_in_callback",
2942  HandleVector(args, 2));
2943  isolate->Throw(*error);
2944  return Handle<Object>();
2945  }
2946  }
2947 
2948  // TODO(dcarney): Handle correctly.
2949  if (structure->IsDeclaredAccessorInfo()) {
2950  return value;
2951  }
2952 
2953  UNREACHABLE();
2954  return Handle<Object>();
2955 }
2956 
2957 
2959  Handle<JSReceiver> object,
2960  Handle<JSReceiver> setter,
2961  Handle<Object> value) {
2962  Isolate* isolate = object->GetIsolate();
2963 
2964 #ifdef ENABLE_DEBUGGER_SUPPORT
2965  Debug* debug = isolate->debug();
2966  // Handle stepping into a setter if step into is active.
2967  // TODO(rossberg): should this apply to getters that are function proxies?
2968  if (debug->StepInActive() && setter->IsJSFunction()) {
2969  debug->HandleStepIn(
2970  Handle<JSFunction>::cast(setter), Handle<Object>::null(), 0, false);
2971  }
2972 #endif
2973 
2974  bool has_pending_exception;
2975  Handle<Object> argv[] = { value };
2977  isolate, setter, object, ARRAY_SIZE(argv), argv, &has_pending_exception);
2978  // Check for pending exception and return the result.
2979  if (has_pending_exception) return Handle<Object>();
2980  return value;
2981 }
2982 
2983 
2984 Handle<Object> JSObject::SetElementWithCallbackSetterInPrototypes(
2985  Handle<JSObject> object,
2986  uint32_t index,
2987  Handle<Object> value,
2988  bool* found,
2989  StrictMode strict_mode) {
2990  Isolate *isolate = object->GetIsolate();
2991  for (Handle<Object> proto = handle(object->GetPrototype(), isolate);
2992  !proto->IsNull();
2993  proto = handle(proto->GetPrototype(isolate), isolate)) {
2994  if (proto->IsJSProxy()) {
2996  Handle<JSProxy>::cast(proto),
2997  object,
2998  isolate->factory()->Uint32ToString(index), // name
2999  value,
3000  NONE,
3001  strict_mode,
3002  found);
3003  }
3004  Handle<JSObject> js_proto = Handle<JSObject>::cast(proto);
3005  if (!js_proto->HasDictionaryElements()) {
3006  continue;
3007  }
3008  Handle<SeededNumberDictionary> dictionary(js_proto->element_dictionary());
3009  int entry = dictionary->FindEntry(index);
3010  if (entry != SeededNumberDictionary::kNotFound) {
3011  PropertyDetails details = dictionary->DetailsAt(entry);
3012  if (details.type() == CALLBACKS) {
3013  *found = true;
3014  Handle<Object> structure(dictionary->ValueAt(entry), isolate);
3015  return SetElementWithCallback(object, structure, index, value, js_proto,
3016  strict_mode);
3017  }
3018  }
3019  }
3020  *found = false;
3021  return isolate->factory()->the_hole_value();
3022 }
3023 
3024 
3025 Handle<Object> JSObject::SetPropertyViaPrototypes(Handle<JSObject> object,
3026  Handle<Name> name,
3027  Handle<Object> value,
3028  PropertyAttributes attributes,
3029  StrictMode strict_mode,
3030  bool* done) {
3031  Isolate* isolate = object->GetIsolate();
3032 
3033  *done = false;
3034  // We could not find a local property so let's check whether there is an
3035  // accessor that wants to handle the property, or whether the property is
3036  // read-only on the prototype chain.
3037  LookupResult result(isolate);
3038  object->LookupRealNamedPropertyInPrototypes(*name, &result);
3039  if (result.IsFound()) {
3040  switch (result.type()) {
3041  case NORMAL:
3042  case FIELD:
3043  case CONSTANT:
3044  *done = result.IsReadOnly();
3045  break;
3046  case INTERCEPTOR: {
3048  handle(result.holder()), object, name, true);
3049  *done = !!(attr & READ_ONLY);
3050  break;
3051  }
3052  case CALLBACKS: {
3053  *done = true;
3054  Handle<Object> callback_object(result.GetCallbackObject(), isolate);
3055  return SetPropertyWithCallback(object, callback_object, name, value,
3056  handle(result.holder()), strict_mode);
3057  }
3058  case HANDLER: {
3059  Handle<JSProxy> proxy(result.proxy());
3061  proxy, object, name, value, attributes, strict_mode, done);
3062  }
3063  case TRANSITION:
3064  case NONEXISTENT:
3065  UNREACHABLE();
3066  break;
3067  }
3068  }
3069 
3070  // If we get here with *done true, we have encountered a read-only property.
3071  if (*done) {
3072  if (strict_mode == SLOPPY) return value;
3073  Handle<Object> args[] = { name, object };
3074  Handle<Object> error = isolate->factory()->NewTypeError(
3075  "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args)));
3076  isolate->Throw(*error);
3077  return Handle<Object>();
3078  }
3079  return isolate->factory()->the_hole_value();
3080 }
3081 
3082 
3084  Handle<DescriptorArray> descriptors(map->instance_descriptors());
3085  if (slack <= descriptors->NumberOfSlackDescriptors()) return;
3086  int number_of_descriptors = descriptors->number_of_descriptors();
3087  Isolate* isolate = map->GetIsolate();
3088  Handle<DescriptorArray> new_descriptors =
3089  isolate->factory()->NewDescriptorArray(number_of_descriptors, slack);
3090  DescriptorArray::WhitenessWitness witness(*new_descriptors);
3091 
3092  for (int i = 0; i < number_of_descriptors; ++i) {
3093  new_descriptors->CopyFrom(i, *descriptors, i, witness);
3094  }
3095 
3096  map->set_instance_descriptors(*new_descriptors);
3097 }
3098 
3099 
3100 template<class T>
3101 static int AppendUniqueCallbacks(NeanderArray* callbacks,
3103  int valid_descriptors) {
3104  int nof_callbacks = callbacks->length();
3105 
3106  Isolate* isolate = array->GetIsolate();
3107  // Ensure the keys are unique names before writing them into the
3108  // instance descriptor. Since it may cause a GC, it has to be done before we
3109  // temporarily put the heap in an invalid state while appending descriptors.
3110  for (int i = 0; i < nof_callbacks; ++i) {
3111  Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i)));
3112  if (entry->name()->IsUniqueName()) continue;
3113  Handle<String> key =
3114  isolate->factory()->InternalizeString(
3115  Handle<String>(String::cast(entry->name())));
3116  entry->set_name(*key);
3117  }
3118 
3119  // Fill in new callback descriptors. Process the callbacks from
3120  // back to front so that the last callback with a given name takes
3121  // precedence over previously added callbacks with that name.
3122  for (int i = nof_callbacks - 1; i >= 0; i--) {
3123  AccessorInfo* entry = AccessorInfo::cast(callbacks->get(i));
3124  Name* key = Name::cast(entry->name());
3125  // Check if a descriptor with this name already exists before writing.
3126  if (!T::Contains(key, entry, valid_descriptors, array)) {
3127  T::Insert(key, entry, valid_descriptors, array);
3128  valid_descriptors++;
3129  }
3130  }
3131 
3132  return valid_descriptors;
3133 }
3134 
3137  static bool Contains(Name* key,
3138  AccessorInfo* entry,
3139  int valid_descriptors,
3140  Handle<DescriptorArray> array) {
3141  return array->Search(key, valid_descriptors) != DescriptorArray::kNotFound;
3142  }
3143  static void Insert(Name* key,
3144  AccessorInfo* entry,
3145  int valid_descriptors,
3146  Handle<DescriptorArray> array) {
3147  CallbacksDescriptor desc(key, entry, entry->property_attributes());
3148  array->Append(&desc);
3149  }
3150 };
3151 
3152 
3155  static bool Contains(Name* key,
3156  AccessorInfo* entry,
3157  int valid_descriptors,
3158  Handle<FixedArray> array) {
3159  for (int i = 0; i < valid_descriptors; i++) {
3160  if (key == AccessorInfo::cast(array->get(i))->name()) return true;
3161  }
3162  return false;
3163  }
3164  static void Insert(Name* key,
3165  AccessorInfo* entry,
3166  int valid_descriptors,
3167  Handle<FixedArray> array) {
3168  array->set(valid_descriptors, entry);
3169  }
3170 };
3171 
3172 
3174  Handle<Object> descriptors) {
3175  int nof = map->NumberOfOwnDescriptors();
3176  Handle<DescriptorArray> array(map->instance_descriptors());
3177  NeanderArray callbacks(descriptors);
3178  ASSERT(array->NumberOfSlackDescriptors() >= callbacks.length());
3179  nof = AppendUniqueCallbacks<DescriptorArrayAppender>(&callbacks, array, nof);
3180  map->SetNumberOfOwnDescriptors(nof);
3181 }
3182 
3183 
3185  Handle<FixedArray> array,
3186  int valid_descriptors) {
3187  NeanderArray callbacks(descriptors);
3188  ASSERT(array->length() >= callbacks.length() + valid_descriptors);
3189  return AppendUniqueCallbacks<FixedArrayAppender>(&callbacks,
3190  array,
3191  valid_descriptors);
3192 }
3193 
3194 
3195 static bool ContainsMap(MapHandleList* maps, Handle<Map> map) {
3196  ASSERT(!map.is_null());
3197  for (int i = 0; i < maps->length(); ++i) {
3198  if (!maps->at(i).is_null() && maps->at(i).is_identical_to(map)) return true;
3199  }
3200  return false;
3201 }
3202 
3203 
3204 template <class T>
3205 static Handle<T> MaybeNull(T* p) {
3206  if (p == NULL) return Handle<T>::null();
3207  return Handle<T>(p);
3208 }
3209 
3210 
3212  ElementsKind kind = elements_kind();
3213  Handle<Map> transitioned_map = Handle<Map>::null();
3214  Handle<Map> current_map(this);
3215  bool packed = IsFastPackedElementsKind(kind);
3217  while (CanTransitionToMoreGeneralFastElementsKind(kind, false)) {
3218  kind = GetNextMoreGeneralFastElementsKind(kind, false);
3219  Handle<Map> maybe_transitioned_map =
3220  MaybeNull(current_map->LookupElementsTransitionMap(kind));
3221  if (maybe_transitioned_map.is_null()) break;
3222  if (ContainsMap(candidates, maybe_transitioned_map) &&
3223  (packed || !IsFastPackedElementsKind(kind))) {
3224  transitioned_map = maybe_transitioned_map;
3225  if (!IsFastPackedElementsKind(kind)) packed = false;
3226  }
3227  current_map = maybe_transitioned_map;
3228  }
3229  }
3230  return transitioned_map;
3231 }
3232 
3233 
3234 static Map* FindClosestElementsTransition(Map* map, ElementsKind to_kind) {
3235  Map* current_map = map;
3236  int target_kind =
3237  IsFastElementsKind(to_kind) || IsExternalArrayElementsKind(to_kind)
3238  ? to_kind
3240 
3241  // Support for legacy API.
3242  if (IsExternalArrayElementsKind(to_kind) &&
3244  return map;
3245  }
3246 
3247  ElementsKind kind = map->elements_kind();
3248  while (kind != target_kind) {
3249  kind = GetNextTransitionElementsKind(kind);
3250  if (!current_map->HasElementsTransition()) return current_map;
3251  current_map = current_map->elements_transition_map();
3252  }
3253 
3254  if (to_kind != kind && current_map->HasElementsTransition()) {
3255  ASSERT(to_kind == DICTIONARY_ELEMENTS);
3256  Map* next_map = current_map->elements_transition_map();
3257  if (next_map->elements_kind() == to_kind) return next_map;
3258  }
3259 
3260  ASSERT(current_map->elements_kind() == target_kind);
3261  return current_map;
3262 }
3263 
3264 
3266  Map* to_map = FindClosestElementsTransition(this, to_kind);
3267  if (to_map->elements_kind() == to_kind) return to_map;
3268  return NULL;
3269 }
3270 
3271 
3273  Isolate* isolate = GetIsolate();
3274  if (isolate->initial_array_prototype()->map() == this) {
3275  return true;
3276  }
3277 
3278  if (isolate->initial_object_prototype()->map() == this) {
3279  return true;
3280  }
3281 
3282  return false;
3283 }
3284 
3285 
3286 static MaybeObject* AddMissingElementsTransitions(Map* map,
3287  ElementsKind to_kind) {
3289 
3290  Map* current_map = map;
3291 
3292  ElementsKind kind = map->elements_kind();
3293  while (kind != to_kind && !IsTerminalElementsKind(kind)) {
3294  kind = GetNextTransitionElementsKind(kind);
3295  MaybeObject* maybe_next_map =
3296  current_map->CopyAsElementsKind(kind, INSERT_TRANSITION);
3297  if (!maybe_next_map->To(&current_map)) return maybe_next_map;
3298  }
3299 
3300  // In case we are exiting the fast elements kind system, just add the map in
3301  // the end.
3302  if (kind != to_kind) {
3303  MaybeObject* maybe_next_map =
3304  current_map->CopyAsElementsKind(to_kind, INSERT_TRANSITION);
3305  if (!maybe_next_map->To(&current_map)) return maybe_next_map;
3306  }
3307 
3308  ASSERT(current_map->elements_kind() == to_kind);
3309  return current_map;
3310 }
3311 
3312 
3314  ElementsKind to_kind) {
3315  Isolate* isolate = object->GetIsolate();
3316  CALL_HEAP_FUNCTION(isolate,
3317  object->GetElementsTransitionMap(isolate, to_kind),
3318  Map);
3319 }
3320 
3321 
3323  Map* start_map = map();
3324  ElementsKind from_kind = start_map->elements_kind();
3325 
3326  if (from_kind == to_kind) {
3327  return start_map;
3328  }
3329 
3330  bool allow_store_transition =
3331  // Only remember the map transition if there is not an already existing
3332  // non-matching element transition.
3333  !start_map->IsUndefined() && !start_map->is_shared() &&
3334  IsTransitionElementsKind(from_kind);
3335 
3336  // Only store fast element maps in ascending generality.
3337  if (IsFastElementsKind(to_kind)) {
3338  allow_store_transition &=
3339  IsTransitionableFastElementsKind(from_kind) &&
3340  IsMoreGeneralElementsKindTransition(from_kind, to_kind);
3341  }
3342 
3343  if (!allow_store_transition) {
3344  return start_map->CopyAsElementsKind(to_kind, OMIT_TRANSITION);
3345  }
3346 
3347  return start_map->AsElementsKind(to_kind);
3348 }
3349 
3350 
3351 // TODO(ishell): Temporary wrapper until handlified.
3352 // static
3354  CALL_HEAP_FUNCTION(map->GetIsolate(),
3355  map->AsElementsKind(kind),
3356  Map);
3357 }
3358 
3359 
3360 MaybeObject* Map::AsElementsKind(ElementsKind kind) {
3361  Map* closest_map = FindClosestElementsTransition(this, kind);
3362 
3363  if (closest_map->elements_kind() == kind) {
3364  return closest_map;
3365  }
3366 
3367  return AddMissingElementsTransitions(closest_map, kind);
3368 }
3369 
3370 
3371 void JSObject::LocalLookupRealNamedProperty(Name* name, LookupResult* result) {
3372  DisallowHeapAllocation no_gc;
3373  if (IsJSGlobalProxy()) {
3374  Object* proto = GetPrototype();
3375  if (proto->IsNull()) return result->NotFound();
3376  ASSERT(proto->IsJSGlobalObject());
3377  return JSObject::cast(proto)->LocalLookupRealNamedProperty(name, result);
3378  }
3379 
3380  if (HasFastProperties()) {
3381  map()->LookupDescriptor(this, name, result);
3382  // A property or a map transition was found. We return all of these result
3383  // types because LocalLookupRealNamedProperty is used when setting
3384  // properties where map transitions are handled.
3385  ASSERT(!result->IsFound() ||
3386  (result->holder() == this && result->IsFastPropertyType()));
3387  // Disallow caching for uninitialized constants. These can only
3388  // occur as fields.
3389  if (result->IsField() &&
3390  result->IsReadOnly() &&
3391  RawFastPropertyAt(result->GetFieldIndex().field_index())->IsTheHole()) {
3392  result->DisallowCaching();
3393  }
3394  return;
3395  }
3396 
3397  int entry = property_dictionary()->FindEntry(name);
3398  if (entry != NameDictionary::kNotFound) {
3399  Object* value = property_dictionary()->ValueAt(entry);
3400  if (IsGlobalObject()) {
3401  PropertyDetails d = property_dictionary()->DetailsAt(entry);
3402  if (d.IsDeleted()) {
3403  result->NotFound();
3404  return;
3405  }
3406  value = PropertyCell::cast(value)->value();
3407  }
3408  // Make sure to disallow caching for uninitialized constants
3409  // found in the dictionary-mode objects.
3410  if (value->IsTheHole()) result->DisallowCaching();
3411  result->DictionaryResult(this, entry);
3412  return;
3413  }
3414 
3415  result->NotFound();
3416 }
3417 
3418 
3419 void JSObject::LookupRealNamedProperty(Name* name, LookupResult* result) {
3420  LocalLookupRealNamedProperty(name, result);
3421  if (result->IsFound()) return;
3422 
3424 }
3425 
3426 
3428  LookupResult* result) {
3429  Isolate* isolate = GetIsolate();
3430  Heap* heap = isolate->heap();
3431  for (Object* pt = GetPrototype();
3432  pt != heap->null_value();
3433  pt = pt->GetPrototype(isolate)) {
3434  if (pt->IsJSProxy()) {
3435  return result->HandlerResult(JSProxy::cast(pt));
3436  }
3437  JSObject::cast(pt)->LocalLookupRealNamedProperty(name, result);
3438  ASSERT(!(result->IsFound() && result->type() == INTERCEPTOR));
3439  if (result->IsFound()) return;
3440  }
3441  result->NotFound();
3442 }
3443 
3444 
3445 // We only need to deal with CALLBACKS and INTERCEPTORS
3446 Handle<Object> JSObject::SetPropertyWithFailedAccessCheck(
3447  Handle<JSObject> object,
3448  LookupResult* result,
3449  Handle<Name> name,
3450  Handle<Object> value,
3451  bool check_prototype,
3452  StrictMode strict_mode) {
3453  if (check_prototype && !result->IsProperty()) {
3454  object->LookupRealNamedPropertyInPrototypes(*name, result);
3455  }
3456 
3457  if (result->IsProperty()) {
3458  if (!result->IsReadOnly()) {
3459  switch (result->type()) {
3460  case CALLBACKS: {
3461  Object* obj = result->GetCallbackObject();
3462  if (obj->IsAccessorInfo()) {
3464  if (info->all_can_write()) {
3465  return SetPropertyWithCallback(object,
3466  info,
3467  name,
3468  value,
3469  handle(result->holder()),
3470  strict_mode);
3471  }
3472  } else if (obj->IsAccessorPair()) {
3473  Handle<AccessorPair> pair(AccessorPair::cast(obj));
3474  if (pair->all_can_read()) {
3475  return SetPropertyWithCallback(object,
3476  pair,
3477  name,
3478  value,
3479  handle(result->holder()),
3480  strict_mode);
3481  }
3482  }
3483  break;
3484  }
3485  case INTERCEPTOR: {
3486  // Try lookup real named properties. Note that only property can be
3487  // set is callbacks marked as ALL_CAN_WRITE on the prototype chain.
3488  LookupResult r(object->GetIsolate());
3489  object->LookupRealNamedProperty(*name, &r);
3490  if (r.IsProperty()) {
3491  return SetPropertyWithFailedAccessCheck(object,
3492  &r,
3493  name,
3494  value,
3495  check_prototype,
3496  strict_mode);
3497  }
3498  break;
3499  }
3500  default: {
3501  break;
3502  }
3503  }
3504  }
3505  }
3506 
3507  Isolate* isolate = object->GetIsolate();
3510  return value;
3511 }
3512 
3513 
3514 Handle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object,
3515  LookupResult* result,
3516  Handle<Name> key,
3517  Handle<Object> value,
3518  PropertyAttributes attributes,
3519  StrictMode strict_mode,
3520  StoreFromKeyed store_mode) {
3521  if (result->IsHandler()) {
3522  return JSProxy::SetPropertyWithHandler(handle(result->proxy()),
3523  object, key, value, attributes, strict_mode);
3524  } else {
3525  return JSObject::SetPropertyForResult(Handle<JSObject>::cast(object),
3526  result, key, value, attributes, strict_mode, store_mode);
3527  }
3528 }
3529 
3530 
3531 bool JSProxy::HasPropertyWithHandler(Handle<JSProxy> proxy, Handle<Name> name) {
3532  Isolate* isolate = proxy->GetIsolate();
3533 
3534  // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3535  if (name->IsSymbol()) return false;
3536 
3537  Handle<Object> args[] = { name };
3538  Handle<Object> result = proxy->CallTrap(
3539  "has", isolate->derived_has_trap(), ARRAY_SIZE(args), args);
3540  if (isolate->has_pending_exception()) return false;
3541 
3542  return result->BooleanValue();
3543 }
3544 
3545 
3546 Handle<Object> JSProxy::SetPropertyWithHandler(Handle<JSProxy> proxy,
3547  Handle<JSReceiver> receiver,
3548  Handle<Name> name,
3549  Handle<Object> value,
3550  PropertyAttributes attributes,
3551  StrictMode strict_mode) {
3552  Isolate* isolate = proxy->GetIsolate();
3553 
3554  // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3555  if (name->IsSymbol()) return value;
3556 
3557  Handle<Object> args[] = { receiver, name, value };
3558  proxy->CallTrap("set", isolate->derived_set_trap(), ARRAY_SIZE(args), args);
3559  if (isolate->has_pending_exception()) return Handle<Object>();
3560 
3561  return value;
3562 }
3563 
3564 
3566  Handle<JSProxy> proxy,
3567  Handle<JSReceiver> receiver,
3568  Handle<Name> name,
3569  Handle<Object> value,
3570  PropertyAttributes attributes,
3571  StrictMode strict_mode,
3572  bool* done) {
3573  Isolate* isolate = proxy->GetIsolate();
3574  Handle<Object> handler(proxy->handler(), isolate); // Trap might morph proxy.
3575 
3576  // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3577  if (name->IsSymbol()) {
3578  *done = false;
3579  return isolate->factory()->the_hole_value();
3580  }
3581 
3582  *done = true; // except where redefined...
3583  Handle<Object> args[] = { name };
3584  Handle<Object> result = proxy->CallTrap(
3585  "getPropertyDescriptor", Handle<Object>(), ARRAY_SIZE(args), args);
3586  if (isolate->has_pending_exception()) return Handle<Object>();
3587 
3588  if (result->IsUndefined()) {
3589  *done = false;
3590  return isolate->factory()->the_hole_value();
3591  }
3592 
3593  // Emulate [[GetProperty]] semantics for proxies.
3594  bool has_pending_exception;
3595  Handle<Object> argv[] = { result };
3597  isolate, isolate->to_complete_property_descriptor(), result,
3598  ARRAY_SIZE(argv), argv, &has_pending_exception);
3599  if (has_pending_exception) return Handle<Object>();
3600 
3601  // [[GetProperty]] requires to check that all properties are configurable.
3602  Handle<String> configurable_name =
3603  isolate->factory()->InternalizeOneByteString(
3604  STATIC_ASCII_VECTOR("configurable_"));
3605  Handle<Object> configurable(
3606  v8::internal::GetProperty(isolate, desc, configurable_name));
3607  ASSERT(!isolate->has_pending_exception());
3608  ASSERT(configurable->IsTrue() || configurable->IsFalse());
3609  if (configurable->IsFalse()) {
3610  Handle<String> trap =
3611  isolate->factory()->InternalizeOneByteString(
3612  STATIC_ASCII_VECTOR("getPropertyDescriptor"));
3613  Handle<Object> args[] = { handler, trap, name };
3614  Handle<Object> error = isolate->factory()->NewTypeError(
3615  "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args)));
3616  isolate->Throw(*error);
3617  return Handle<Object>();
3618  }
3619  ASSERT(configurable->IsTrue());
3620 
3621  // Check for DataDescriptor.
3622  Handle<String> hasWritable_name =
3623  isolate->factory()->InternalizeOneByteString(
3624  STATIC_ASCII_VECTOR("hasWritable_"));
3625  Handle<Object> hasWritable(
3626  v8::internal::GetProperty(isolate, desc, hasWritable_name));
3627  ASSERT(!isolate->has_pending_exception());
3628  ASSERT(hasWritable->IsTrue() || hasWritable->IsFalse());
3629  if (hasWritable->IsTrue()) {
3630  Handle<String> writable_name =
3631  isolate->factory()->InternalizeOneByteString(
3632  STATIC_ASCII_VECTOR("writable_"));
3633  Handle<Object> writable(
3634  v8::internal::GetProperty(isolate, desc, writable_name));
3635  ASSERT(!isolate->has_pending_exception());
3636  ASSERT(writable->IsTrue() || writable->IsFalse());
3637  *done = writable->IsFalse();
3638  if (!*done) return isolate->factory()->the_hole_value();
3639  if (strict_mode == SLOPPY) return value;
3640  Handle<Object> args[] = { name, receiver };
3641  Handle<Object> error = isolate->factory()->NewTypeError(
3642  "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args)));
3643  isolate->Throw(*error);
3644  return Handle<Object>();
3645  }
3646 
3647  // We have an AccessorDescriptor.
3648  Handle<String> set_name = isolate->factory()->InternalizeOneByteString(
3649  STATIC_ASCII_VECTOR("set_"));
3650  Handle<Object> setter(v8::internal::GetProperty(isolate, desc, set_name));
3651  ASSERT(!isolate->has_pending_exception());
3652  if (!setter->IsUndefined()) {
3653  // TODO(rossberg): nicer would be to cast to some JSCallable here...
3655  receiver, Handle<JSReceiver>::cast(setter), value);
3656  }
3657 
3658  if (strict_mode == SLOPPY) return value;
3659  Handle<Object> args2[] = { name, proxy };
3660  Handle<Object> error = isolate->factory()->NewTypeError(
3661  "no_setter_in_callback", HandleVector(args2, ARRAY_SIZE(args2)));
3662  isolate->Throw(*error);
3663  return Handle<Object>();
3664 }
3665 
3666 
3667 Handle<Object> JSProxy::DeletePropertyWithHandler(
3668  Handle<JSProxy> proxy, Handle<Name> name, DeleteMode mode) {
3669  Isolate* isolate = proxy->GetIsolate();
3670 
3671  // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3672  if (name->IsSymbol()) return isolate->factory()->false_value();
3673 
3674  Handle<Object> args[] = { name };
3675  Handle<Object> result = proxy->CallTrap(
3676  "delete", Handle<Object>(), ARRAY_SIZE(args), args);
3677  if (isolate->has_pending_exception()) return Handle<Object>();
3678 
3679  bool result_bool = result->BooleanValue();
3680  if (mode == STRICT_DELETION && !result_bool) {
3681  Handle<Object> handler(proxy->handler(), isolate);
3682  Handle<String> trap_name = isolate->factory()->InternalizeOneByteString(
3683  STATIC_ASCII_VECTOR("delete"));
3684  Handle<Object> args[] = { handler, trap_name };
3685  Handle<Object> error = isolate->factory()->NewTypeError(
3686  "handler_failed", HandleVector(args, ARRAY_SIZE(args)));
3687  isolate->Throw(*error);
3688  return Handle<Object>();
3689  }
3690  return isolate->factory()->ToBoolean(result_bool);
3691 }
3692 
3693 
3694 Handle<Object> JSProxy::DeleteElementWithHandler(
3695  Handle<JSProxy> proxy, uint32_t index, DeleteMode mode) {
3696  Isolate* isolate = proxy->GetIsolate();
3697  Handle<String> name = isolate->factory()->Uint32ToString(index);
3698  return JSProxy::DeletePropertyWithHandler(proxy, name, mode);
3699 }
3700 
3701 
3703  Handle<JSProxy> proxy,
3704  Handle<JSReceiver> receiver,
3705  Handle<Name> name) {
3706  Isolate* isolate = proxy->GetIsolate();
3707  HandleScope scope(isolate);
3708 
3709  // TODO(rossberg): adjust once there is a story for symbols vs proxies.
3710  if (name->IsSymbol()) return ABSENT;
3711 
3712  Handle<Object> args[] = { name };
3713  Handle<Object> result = proxy->CallTrap(
3714  "getPropertyDescriptor", Handle<Object>(), ARRAY_SIZE(args), args);
3715  if (isolate->has_pending_exception()) return NONE;
3716 
3717  if (result->IsUndefined()) return ABSENT;
3718 
3719  bool has_pending_exception;
3720  Handle<Object> argv[] = { result };
3722  isolate, isolate->to_complete_property_descriptor(), result,
3723  ARRAY_SIZE(argv), argv, &has_pending_exception);
3724  if (has_pending_exception) return NONE;
3725 
3726  // Convert result to PropertyAttributes.
3727  Handle<String> enum_n = isolate->factory()->InternalizeOneByteString(
3728  STATIC_ASCII_VECTOR("enumerable_"));
3729  Handle<Object> enumerable(v8::internal::GetProperty(isolate, desc, enum_n));
3730  if (isolate->has_pending_exception()) return NONE;
3731  Handle<String> conf_n = isolate->factory()->InternalizeOneByteString(
3732  STATIC_ASCII_VECTOR("configurable_"));
3733  Handle<Object> configurable(v8::internal::GetProperty(isolate, desc, conf_n));
3734  if (isolate->has_pending_exception()) return NONE;
3735  Handle<String> writ_n = isolate->factory()->InternalizeOneByteString(
3736  STATIC_ASCII_VECTOR("writable_"));
3737  Handle<Object> writable(v8::internal::GetProperty(isolate, desc, writ_n));
3738  if (isolate->has_pending_exception()) return NONE;
3739  if (!writable->BooleanValue()) {
3740  Handle<String> set_n = isolate->factory()->InternalizeOneByteString(
3741  STATIC_ASCII_VECTOR("set_"));
3742  Handle<Object> setter(v8::internal::GetProperty(isolate, desc, set_n));
3743  if (isolate->has_pending_exception()) return NONE;
3744  writable = isolate->factory()->ToBoolean(!setter->IsUndefined());
3745  }
3746 
3747  if (configurable->IsFalse()) {
3748  Handle<Object> handler(proxy->handler(), isolate);
3749  Handle<String> trap = isolate->factory()->InternalizeOneByteString(
3750  STATIC_ASCII_VECTOR("getPropertyDescriptor"));
3751  Handle<Object> args[] = { handler, trap, name };
3752  Handle<Object> error = isolate->factory()->NewTypeError(
3753  "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args)));
3754  isolate->Throw(*error);
3755  return NONE;
3756  }
3757 
3758  int attributes = NONE;
3759  if (!enumerable->BooleanValue()) attributes |= DONT_ENUM;
3760  if (!configurable->BooleanValue()) attributes |= DONT_DELETE;
3761  if (!writable->BooleanValue()) attributes |= READ_ONLY;
3762  return static_cast<PropertyAttributes>(attributes);
3763 }
3764 
3765 
3767  Handle<JSProxy> proxy,
3768  Handle<JSReceiver> receiver,
3769  uint32_t index) {
3770  Isolate* isolate = proxy->GetIsolate();
3771  Handle<String> name = isolate->factory()->Uint32ToString(index);
3772  return GetPropertyAttributeWithHandler(proxy, receiver, name);
3773 }
3774 
3775 
3777  Isolate* isolate = proxy->GetIsolate();
3778 
3779  // Save identity hash.
3780  Handle<Object> hash(proxy->GetIdentityHash(), isolate);
3781 
3782  if (proxy->IsJSFunctionProxy()) {
3783  isolate->factory()->BecomeJSFunction(proxy);
3784  // Code will be set on the JavaScript side.
3785  } else {
3786  isolate->factory()->BecomeJSObject(proxy);
3787  }
3788  ASSERT(proxy->IsJSObject());
3789 
3790  // Inherit identity, if it was present.
3791  if (hash->IsSmi()) {
3793  Handle<Smi>::cast(hash));
3794  }
3795 }
3796 
3797 
3799  Handle<Object> derived,
3800  int argc,
3801  Handle<Object> argv[]) {
3802  Isolate* isolate = GetIsolate();
3803  Handle<Object> handler(this->handler(), isolate);
3804 
3805  Handle<String> trap_name = isolate->factory()->InternalizeUtf8String(name);
3806  Handle<Object> trap(v8::internal::GetProperty(isolate, handler, trap_name));
3807  if (isolate->has_pending_exception()) return trap;
3808 
3809  if (trap->IsUndefined()) {
3810  if (derived.is_null()) {
3811  Handle<Object> args[] = { handler, trap_name };
3812  Handle<Object> error = isolate->factory()->NewTypeError(
3813  "handler_trap_missing", HandleVector(args, ARRAY_SIZE(args)));
3814  isolate->Throw(*error);
3815  return Handle<Object>();
3816  }
3817  trap = Handle<Object>(derived);
3818  }
3819 
3820  bool threw;
3821  return Execution::Call(isolate, trap, handler, argc, argv, &threw);
3822 }
3823 
3824 
3825 // TODO(mstarzinger): Temporary wrapper until handlified.
3826 static Handle<Map> MapAsElementsKind(Handle<Map> map, ElementsKind kind) {
3827  CALL_HEAP_FUNCTION(map->GetIsolate(), map->AsElementsKind(kind), Map);
3828 }
3829 
3830 
3832  ASSERT(object->map()->inobject_properties() == map->inobject_properties());
3833  ElementsKind obj_kind = object->map()->elements_kind();
3834  ElementsKind map_kind = map->elements_kind();
3835  if (map_kind != obj_kind) {
3836  ElementsKind to_kind = map_kind;
3837  if (IsMoreGeneralElementsKindTransition(map_kind, obj_kind) ||
3838  IsDictionaryElementsKind(obj_kind)) {
3839  to_kind = obj_kind;
3840  }
3841  if (IsDictionaryElementsKind(to_kind)) {
3842  NormalizeElements(object);
3843  } else {
3844  TransitionElementsKind(object, to_kind);
3845  }
3846  map = MapAsElementsKind(map, to_kind);
3847  }
3848  JSObject::MigrateToMap(object, map);
3849 }
3850 
3851 
3853  // Converting any field to the most specific type will cause the
3854  // GeneralizeFieldRepresentation algorithm to create the most general existing
3855  // transition that matches the object. This achieves what is needed.
3856  Handle<Map> original_map(object->map());
3858  object, 0, Representation::None(), ALLOW_AS_CONSTANT);
3859  object->map()->set_migration_target(true);
3860  if (FLAG_trace_migration) {
3861  object->PrintInstanceMigration(stdout, *original_map, object->map());
3862  }
3863 }
3864 
3865 
3867  Handle<Map> original_map(object->map());
3868  Handle<Map> new_map = Map::CurrentMapForDeprecatedInternal(original_map);
3869  if (new_map.is_null()) return Handle<Object>();
3870  JSObject::MigrateToMap(object, new_map);
3871  if (FLAG_trace_migration) {
3872  object->PrintInstanceMigration(stdout, *original_map, object->map());
3873  }
3874  return object;
3875 }
3876 
3877 
3878 Handle<Object> JSObject::SetPropertyUsingTransition(
3879  Handle<JSObject> object,
3880  LookupResult* lookup,
3881  Handle<Name> name,
3882  Handle<Object> value,
3883  PropertyAttributes attributes) {
3884  Handle<Map> transition_map(lookup->GetTransitionTarget());
3885  int descriptor = transition_map->LastAdded();
3886 
3887  DescriptorArray* descriptors = transition_map->instance_descriptors();
3888  PropertyDetails details = descriptors->GetDetails(descriptor);
3889 
3890  if (details.type() == CALLBACKS || attributes != details.attributes()) {
3891  // AddProperty will either normalize the object, or create a new fast copy
3892  // of the map. If we get a fast copy of the map, all field representations
3893  // will be tagged since the transition is omitted.
3894  return JSObject::AddProperty(
3895  object, name, value, attributes, SLOPPY,
3899  }
3900 
3901  // Keep the target CONSTANT if the same value is stored.
3902  // TODO(verwaest): Also support keeping the placeholder
3903  // (value->IsUninitialized) as constant.
3904  if (!value->FitsRepresentation(details.representation()) ||
3905  (details.type() == CONSTANT &&
3906  descriptors->GetValue(descriptor) != *value)) {
3907  transition_map = Map::GeneralizeRepresentation(transition_map,
3908  descriptor, value->OptimalRepresentation(), FORCE_FIELD);
3909  }
3910 
3911  JSObject::MigrateToMap(object, transition_map);
3912 
3913  // Reload.
3914  descriptors = transition_map->instance_descriptors();
3915  details = descriptors->GetDetails(descriptor);
3916 
3917  if (details.type() != FIELD) return value;
3918 
3919  int field_index = descriptors->GetFieldIndex(descriptor);
3920  if (details.representation().IsDouble()) {
3921  // Nothing more to be done.
3922  if (value->IsUninitialized()) return value;
3923  HeapNumber* box = HeapNumber::cast(object->RawFastPropertyAt(field_index));
3924  box->set_value(value->Number());
3925  } else {
3926  object->FastPropertyAtPut(field_index, *value);
3927  }
3928 
3929  return value;
3930 }
3931 
3932 
3933 static void SetPropertyToField(LookupResult* lookup,
3934  Handle<Name> name,
3935  Handle<Object> value) {
3936  Representation representation = lookup->representation();
3937  if (!value->FitsRepresentation(representation) ||
3938  lookup->type() == CONSTANT) {
3940  lookup->GetDescriptorIndex(),
3941  value->OptimalRepresentation(),
3942  FORCE_FIELD);
3943  DescriptorArray* desc = lookup->holder()->map()->instance_descriptors();
3944  int descriptor = lookup->GetDescriptorIndex();
3945  representation = desc->GetDetails(descriptor).representation();
3946  }
3947 
3948  if (representation.IsDouble()) {
3949  HeapNumber* storage = HeapNumber::cast(lookup->holder()->RawFastPropertyAt(
3950  lookup->GetFieldIndex().field_index()));
3951  storage->set_value(value->Number());
3952  return;
3953  }
3954 
3955  lookup->holder()->FastPropertyAtPut(
3956  lookup->GetFieldIndex().field_index(), *value);
3957 }
3958 
3959 
3960 static void ConvertAndSetLocalProperty(LookupResult* lookup,
3961  Handle<Name> name,
3962  Handle<Object> value,
3963  PropertyAttributes attributes) {
3964  Handle<JSObject> object(lookup->holder());
3965  if (object->TooManyFastProperties()) {
3967  }
3968 
3969  if (!object->HasFastProperties()) {
3970  ReplaceSlowProperty(object, name, value, attributes);
3971  return;
3972  }
3973 
3974  int descriptor_index = lookup->GetDescriptorIndex();
3975  if (lookup->GetAttributes() == attributes) {
3977  object, descriptor_index, Representation::Tagged(), FORCE_FIELD);
3978  } else {
3979  Handle<Map> old_map(object->map());
3980  Handle<Map> new_map = Map::CopyGeneralizeAllRepresentations(old_map,
3981  descriptor_index, FORCE_FIELD, attributes, "attributes mismatch");
3982  JSObject::MigrateToMap(object, new_map);
3983  }
3984 
3985  DescriptorArray* descriptors = object->map()->instance_descriptors();
3986  int index = descriptors->GetDetails(descriptor_index).field_index();
3987  object->FastPropertyAtPut(index, *value);
3988 }
3989 
3990 
3991 static void SetPropertyToFieldWithAttributes(LookupResult* lookup,
3992  Handle<Name> name,
3993  Handle<Object> value,
3994  PropertyAttributes attributes) {
3995  if (lookup->GetAttributes() == attributes) {
3996  if (value->IsUninitialized()) return;
3997  SetPropertyToField(lookup, name, value);
3998  } else {
3999  ConvertAndSetLocalProperty(lookup, name, value, attributes);
4000  }
4001 }
4002 
4003 
4005  LookupResult* lookup,
4006  Handle<Name> name,
4007  Handle<Object> value,
4008  PropertyAttributes attributes,
4009  StrictMode strict_mode,
4010  StoreFromKeyed store_mode) {
4011  Isolate* isolate = object->GetIsolate();
4012 
4013  // Make sure that the top context does not change when doing callbacks or
4014  // interceptor calls.
4015  AssertNoContextChange ncc(isolate);
4016 
4017  // Optimization for 2-byte strings often used as keys in a decompression
4018  // dictionary. We internalize these short keys to avoid constantly
4019  // reallocating them.
4020  if (name->IsString() && !name->IsInternalizedString() &&
4021  Handle<String>::cast(name)->length() <= 2) {
4022  name = isolate->factory()->InternalizeString(Handle<String>::cast(name));
4023  }
4024 
4025  // Check access rights if needed.
4026  if (object->IsAccessCheckNeeded()) {
4027  if (!isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_SET)) {
4028  return SetPropertyWithFailedAccessCheck(object, lookup, name, value,
4029  true, strict_mode);
4030  }
4031  }
4032 
4033  if (object->IsJSGlobalProxy()) {
4034  Handle<Object> proto(object->GetPrototype(), isolate);
4035  if (proto->IsNull()) return value;
4036  ASSERT(proto->IsJSGlobalObject());
4038  lookup, name, value, attributes, strict_mode, store_mode);
4039  }
4040 
4041  ASSERT(!lookup->IsFound() || lookup->holder() == *object ||
4042  lookup->holder()->map()->is_hidden_prototype());
4043 
4044  if (!lookup->IsProperty() && !object->IsJSContextExtensionObject()) {
4045  bool done = false;
4046  Handle<Object> result_object = SetPropertyViaPrototypes(
4047  object, name, value, attributes, strict_mode, &done);
4048  if (done) return result_object;
4049  }
4050 
4051  if (!lookup->IsFound()) {
4052  // Neither properties nor transitions found.
4053  return AddProperty(
4054  object, name, value, attributes, strict_mode, store_mode);
4055  }
4056 
4057  if (lookup->IsProperty() && lookup->IsReadOnly()) {
4058  if (strict_mode == STRICT) {
4059  Handle<Object> args[] = { name, object };
4060  Handle<Object> error = isolate->factory()->NewTypeError(
4061  "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args)));
4062  isolate->Throw(*error);
4063  return Handle<Object>();
4064  } else {
4065  return value;
4066  }
4067  }
4068 
4069  Handle<Object> old_value = isolate->factory()->the_hole_value();
4070  bool is_observed = object->map()->is_observed() &&
4071  *name != isolate->heap()->hidden_string();
4072  if (is_observed && lookup->IsDataProperty()) {
4073  old_value = Object::GetProperty(object, name);
4074  CHECK_NOT_EMPTY_HANDLE(isolate, old_value);
4075  }
4076 
4077  // This is a real property that is not read-only, or it is a
4078  // transition or null descriptor and there are no setters in the prototypes.
4079  Handle<Object> result = value;
4080  switch (lookup->type()) {
4081  case NORMAL:
4082  SetNormalizedProperty(handle(lookup->holder()), lookup, value);
4083  break;
4084  case FIELD:
4085  SetPropertyToField(lookup, name, value);
4086  break;
4087  case CONSTANT:
4088  // Only replace the constant if necessary.
4089  if (*value == lookup->GetConstant()) return value;
4090  SetPropertyToField(lookup, name, value);
4091  break;
4092  case CALLBACKS: {
4093  Handle<Object> callback_object(lookup->GetCallbackObject(), isolate);
4094  return SetPropertyWithCallback(object, callback_object, name, value,
4095  handle(lookup->holder()), strict_mode);
4096  }
4097  case INTERCEPTOR:
4098  result = SetPropertyWithInterceptor(handle(lookup->holder()), name, value,
4099  attributes, strict_mode);
4100  break;
4101  case TRANSITION:
4102  result = SetPropertyUsingTransition(handle(lookup->holder()), lookup,
4103  name, value, attributes);
4104  break;
4105  case HANDLER:
4106  case NONEXISTENT:
4107  UNREACHABLE();
4108  }
4109 
4110  RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>());
4111 
4112  if (is_observed) {
4113  if (lookup->IsTransition()) {
4114  EnqueueChangeRecord(object, "add", name, old_value);
4115  } else {
4116  LookupResult new_lookup(isolate);
4117  object->LocalLookup(*name, &new_lookup, true);
4118  if (new_lookup.IsDataProperty()) {
4119  Handle<Object> new_value = Object::GetProperty(object, name);
4120  CHECK_NOT_EMPTY_HANDLE(isolate, new_value);
4121  if (!new_value->SameValue(*old_value)) {
4122  EnqueueChangeRecord(object, "update", name, old_value);
4123  }
4124  }
4125  }
4126  }
4127 
4128  return result;
4129 }
4130 
4131 
4132 // Set a real local property, even if it is READ_ONLY. If the property is not
4133 // present, add it with attributes NONE. This code is an exact clone of
4134 // SetProperty, with the check for IsReadOnly and the check for a
4135 // callback setter removed. The two lines looking up the LookupResult
4136 // result are also added. If one of the functions is changed, the other
4137 // should be.
4138 // Note that this method cannot be used to set the prototype of a function
4139 // because ConvertDescriptorToField() which is called in "case CALLBACKS:"
4140 // doesn't handle function prototypes correctly.
4142  Handle<JSObject> object,
4143  Handle<Name> name,
4144  Handle<Object> value,
4145  PropertyAttributes attributes,
4146  ValueType value_type,
4147  StoreMode mode,
4148  ExtensibilityCheck extensibility_check) {
4149  Isolate* isolate = object->GetIsolate();
4150 
4151  // Make sure that the top context does not change when doing callbacks or
4152  // interceptor calls.
4153  AssertNoContextChange ncc(isolate);
4154 
4155  LookupResult lookup(isolate);
4156  object->LocalLookup(*name, &lookup, true);
4157  if (!lookup.IsFound()) {
4158  object->map()->LookupTransition(*object, *name, &lookup);
4159  }
4160 
4161  // Check access rights if needed.
4162  if (object->IsAccessCheckNeeded()) {
4163  if (!isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_SET)) {
4164  return SetPropertyWithFailedAccessCheck(object, &lookup, name, value,
4165  false, SLOPPY);
4166  }
4167  }
4168 
4169  if (object->IsJSGlobalProxy()) {
4170  Handle<Object> proto(object->GetPrototype(), isolate);
4171  if (proto->IsNull()) return value;
4172  ASSERT(proto->IsJSGlobalObject());
4174  name, value, attributes, value_type, mode, extensibility_check);
4175  }
4176 
4177  if (lookup.IsFound() &&
4178  (lookup.type() == INTERCEPTOR || lookup.type() == CALLBACKS)) {
4179  object->LocalLookupRealNamedProperty(*name, &lookup);
4180  }
4181 
4182  // Check for accessor in prototype chain removed here in clone.
4183  if (!lookup.IsFound()) {
4184  object->map()->LookupTransition(*object, *name, &lookup);
4185  TransitionFlag flag = lookup.IsFound()
4187  // Neither properties nor transitions found.
4188  return AddProperty(object, name, value, attributes, SLOPPY,
4189  MAY_BE_STORE_FROM_KEYED, extensibility_check, value_type, mode, flag);
4190  }
4191 
4192  Handle<Object> old_value = isolate->factory()->the_hole_value();
4193  PropertyAttributes old_attributes = ABSENT;
4194  bool is_observed = object->map()->is_observed() &&
4195  *name != isolate->heap()->hidden_string();
4196  if (is_observed && lookup.IsProperty()) {
4197  if (lookup.IsDataProperty()) {
4198  old_value = Object::GetProperty(object, name);
4199  CHECK_NOT_EMPTY_HANDLE(isolate, old_value);
4200  }
4201  old_attributes = lookup.GetAttributes();
4202  }
4203 
4204  // Check of IsReadOnly removed from here in clone.
4205  switch (lookup.type()) {
4206  case NORMAL:
4207  ReplaceSlowProperty(object, name, value, attributes);
4208  break;
4209  case FIELD:
4210  SetPropertyToFieldWithAttributes(&lookup, name, value, attributes);
4211  break;
4212  case CONSTANT:
4213  // Only replace the constant if necessary.
4214  if (lookup.GetAttributes() != attributes ||
4215  *value != lookup.GetConstant()) {
4216  SetPropertyToFieldWithAttributes(&lookup, name, value, attributes);
4217  }
4218  break;
4219  case CALLBACKS:
4220  ConvertAndSetLocalProperty(&lookup, name, value, attributes);
4221  break;
4222  case TRANSITION: {
4223  Handle<Object> result = SetPropertyUsingTransition(
4224  handle(lookup.holder()), &lookup, name, value, attributes);
4225  RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>());
4226  break;
4227  }
4228  case NONEXISTENT:
4229  case HANDLER:
4230  case INTERCEPTOR:
4231  UNREACHABLE();
4232  }
4233 
4234  if (is_observed) {
4235  if (lookup.IsTransition()) {
4236  EnqueueChangeRecord(object, "add", name, old_value);
4237  } else if (old_value->IsTheHole()) {
4238  EnqueueChangeRecord(object, "reconfigure", name, old_value);
4239  } else {
4240  LookupResult new_lookup(isolate);
4241  object->LocalLookup(*name, &new_lookup, true);
4242  bool value_changed = false;
4243  if (new_lookup.IsDataProperty()) {
4244  Handle<Object> new_value = Object::GetProperty(object, name);
4245  CHECK_NOT_EMPTY_HANDLE(isolate, new_value);
4246  value_changed = !old_value->SameValue(*new_value);
4247  }
4248  if (new_lookup.GetAttributes() != old_attributes) {
4249  if (!value_changed) old_value = isolate->factory()->the_hole_value();
4250  EnqueueChangeRecord(object, "reconfigure", name, old_value);
4251  } else if (value_changed) {
4252  EnqueueChangeRecord(object, "update", name, old_value);
4253  }
4254  }
4255  }
4256 
4257  return value;
4258 }
4259 
4260 
4262  Handle<JSObject> object,
4263  Handle<JSObject> receiver,
4264  Handle<Name> name,
4265  bool continue_search) {
4266  // Check local property, ignore interceptor.
4267  Isolate* isolate = object->GetIsolate();
4268  LookupResult result(isolate);
4269  object->LocalLookupRealNamedProperty(*name, &result);
4270  if (result.IsFound()) return result.GetAttributes();
4271 
4272  if (continue_search) {
4273  // Continue searching via the prototype chain.
4274  Handle<Object> proto(object->GetPrototype(), isolate);
4275  if (!proto->IsNull()) {
4277  Handle<JSObject>::cast(proto), receiver, name);
4278  }
4279  }
4280  return ABSENT;
4281 }
4282 
4283 
4285  Handle<JSObject> object,
4286  Handle<JSObject> receiver,
4287  Handle<Name> name,
4288  bool continue_search) {
4289  // TODO(rossberg): Support symbols in the API.
4290  if (name->IsSymbol()) return ABSENT;
4291 
4292  Isolate* isolate = object->GetIsolate();
4293  HandleScope scope(isolate);
4294 
4295  // Make sure that the top context does not change when doing
4296  // callbacks or interceptor calls.
4297  AssertNoContextChange ncc(isolate);
4298 
4299  Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
4301  isolate, interceptor->data(), *receiver, *object);
4302  if (!interceptor->query()->IsUndefined()) {
4304  v8::ToCData<v8::NamedPropertyQueryCallback>(interceptor->query());
4305  LOG(isolate,
4306  ApiNamedPropertyAccess("interceptor-named-has", *object, *name));
4307  v8::Handle<v8::Integer> result =
4308  args.Call(query, v8::Utils::ToLocal(Handle<String>::cast(name)));
4309  if (!result.IsEmpty()) {
4310  ASSERT(result->IsInt32());
4311  return static_cast<PropertyAttributes>(result->Int32Value());
4312  }
4313  } else if (!interceptor->getter()->IsUndefined()) {
4315  v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter());
4316  LOG(isolate,
4317  ApiNamedPropertyAccess("interceptor-named-get-has", *object, *name));
4318  v8::Handle<v8::Value> result =
4319  args.Call(getter, v8::Utils::ToLocal(Handle<String>::cast(name)));
4320  if (!result.IsEmpty()) return DONT_ENUM;
4321  }
4323  object, receiver, name, continue_search);
4324 }
4325 
4326 
4328  Handle<JSReceiver> object,
4329  Handle<JSReceiver> receiver,
4330  Handle<Name> key) {
4331  uint32_t index = 0;
4332  if (object->IsJSObject() && key->AsArrayIndex(&index)) {
4334  Handle<JSObject>::cast(object), receiver, index, true);
4335  }
4336  // Named property.
4337  LookupResult lookup(object->GetIsolate());
4338  object->Lookup(*key, &lookup);
4339  return GetPropertyAttributeForResult(object, receiver, &lookup, key, true);
4340 }
4341 
4342 
4343 PropertyAttributes JSReceiver::GetPropertyAttributeForResult(
4344  Handle<JSReceiver> object,
4345  Handle<JSReceiver> receiver,
4346  LookupResult* lookup,
4347  Handle<Name> name,
4348  bool continue_search) {
4349  // Check access rights if needed.
4350  if (object->IsAccessCheckNeeded()) {
4351  Heap* heap = object->GetHeap();
4353  if (!heap->isolate()->MayNamedAccessWrapper(obj, name, v8::ACCESS_HAS)) {
4355  obj, lookup, name, continue_search);
4356  }
4357  }
4358  if (lookup->IsFound()) {
4359  switch (lookup->type()) {
4360  case NORMAL: // fall through
4361  case FIELD:
4362  case CONSTANT:
4363  case CALLBACKS:
4364  return lookup->GetAttributes();
4365  case HANDLER: {
4367  handle(lookup->proxy()), receiver, name);
4368  }
4369  case INTERCEPTOR:
4371  handle(lookup->holder()),
4372  Handle<JSObject>::cast(receiver),
4373  name,
4374  continue_search);
4375  case TRANSITION:
4376  case NONEXISTENT:
4377  UNREACHABLE();
4378  }
4379  }
4380  return ABSENT;
4381 }
4382 
4383 
4385  Handle<JSReceiver> object, Handle<Name> name) {
4386  // Check whether the name is an array index.
4387  uint32_t index = 0;
4388  if (object->IsJSObject() && name->AsArrayIndex(&index)) {
4389  return GetLocalElementAttribute(object, index);
4390  }
4391  // Named property.
4392  LookupResult lookup(object->GetIsolate());
4393  object->LocalLookup(*name, &lookup, true);
4394  return GetPropertyAttributeForResult(object, object, &lookup, name, false);
4395 }
4396 
4397 
4399  Handle<JSObject> object,
4400  Handle<JSReceiver> receiver,
4401  uint32_t index,
4402  bool continue_search) {
4403  Isolate* isolate = object->GetIsolate();
4404 
4405  // Check access rights if needed.
4406  if (object->IsAccessCheckNeeded()) {
4407  if (!isolate->MayIndexedAccessWrapper(object, index, v8::ACCESS_HAS)) {
4409  return ABSENT;
4410  }
4411  }
4412 
4413  if (object->IsJSGlobalProxy()) {
4414  Handle<Object> proto(object->GetPrototype(), isolate);
4415  if (proto->IsNull()) return ABSENT;
4416  ASSERT(proto->IsJSGlobalObject());
4418  Handle<JSObject>::cast(proto), receiver, index, continue_search);
4419  }
4420 
4421  // Check for lookup interceptor except when bootstrapping.
4422  if (object->HasIndexedInterceptor() && !isolate->bootstrapper()->IsActive()) {
4423  return JSObject::GetElementAttributeWithInterceptor(
4424  object, receiver, index, continue_search);
4425  }
4426 
4427  return GetElementAttributeWithoutInterceptor(
4428  object, receiver, index, continue_search);
4429 }
4430 
4431 
4432 PropertyAttributes JSObject::GetElementAttributeWithInterceptor(
4433  Handle<JSObject> object,
4434  Handle<JSReceiver> receiver,
4435  uint32_t index,
4436  bool continue_search) {
4437  Isolate* isolate = object->GetIsolate();
4438  HandleScope scope(isolate);
4439 
4440  // Make sure that the top context does not change when doing
4441  // callbacks or interceptor calls.
4442  AssertNoContextChange ncc(isolate);
4443 
4444  Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
4446  isolate, interceptor->data(), *receiver, *object);
4447  if (!interceptor->query()->IsUndefined()) {
4449  v8::ToCData<v8::IndexedPropertyQueryCallback>(interceptor->query());
4450  LOG(isolate,
4451  ApiIndexedPropertyAccess("interceptor-indexed-has", *object, index));
4452  v8::Handle<v8::Integer> result = args.Call(query, index);
4453  if (!result.IsEmpty())
4454  return static_cast<PropertyAttributes>(result->Int32Value());
4455  } else if (!interceptor->getter()->IsUndefined()) {
4457  v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter());
4458  LOG(isolate,
4459  ApiIndexedPropertyAccess(
4460  "interceptor-indexed-get-has", *object, index));
4461  v8::Handle<v8::Value> result = args.Call(getter, index);
4462  if (!result.IsEmpty()) return NONE;
4463  }
4464 
4465  return GetElementAttributeWithoutInterceptor(
4466  object, receiver, index, continue_search);
4467 }
4468 
4469 
4470 PropertyAttributes JSObject::GetElementAttributeWithoutInterceptor(
4471  Handle<JSObject> object,
4472  Handle<JSReceiver> receiver,
4473  uint32_t index,
4474  bool continue_search) {
4475  PropertyAttributes attr = object->GetElementsAccessor()->GetAttributes(
4476  *receiver, *object, index);
4477  if (attr != ABSENT) return attr;
4478 
4479  // Handle [] on String objects.
4480  if (object->IsStringObjectWithCharacterAt(index)) {
4481  return static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
4482  }
4483 
4484  if (!continue_search) return ABSENT;
4485 
4486  Handle<Object> proto(object->GetPrototype(), object->GetIsolate());
4487  if (proto->IsJSProxy()) {
4488  // We need to follow the spec and simulate a call to [[GetOwnProperty]].
4490  Handle<JSProxy>::cast(proto), receiver, index);
4491  }
4492  if (proto->IsNull()) return ABSENT;
4494  Handle<JSObject>::cast(proto), receiver, index, true);
4495 }
4496 
4497 
4499  Handle<JSObject> obj,
4501  int index = obj->map()->Hash() % kEntries;
4502  Handle<Object> result = handle(cache->get(index), cache->GetIsolate());
4503  if (result->IsMap() &&
4504  Handle<Map>::cast(result)->EquivalentToForNormalization(obj->map(),
4505  mode)) {
4506 #ifdef VERIFY_HEAP
4507  if (FLAG_verify_heap) {
4508  Handle<Map>::cast(result)->SharedMapVerify();
4509  }
4510 #endif
4511 #ifdef ENABLE_SLOW_ASSERTS
4513  // The cached map should match newly created normalized map bit-by-bit,
4514  // except for the code cache, which can contain some ics which can be
4515  // applied to the shared map.
4516  Handle<Map> fresh = Map::CopyNormalized(handle(obj->map()), mode,
4518 
4519  ASSERT(memcmp(fresh->address(),
4520  Handle<Map>::cast(result)->address(),
4521  Map::kCodeCacheOffset) == 0);
4523  Map::kCodeCacheOffset + kPointerSize);
4524  int offset = Map::kDependentCodeOffset + kPointerSize;
4525  ASSERT(memcmp(fresh->address() + offset,
4526  Handle<Map>::cast(result)->address() + offset,
4527  Map::kSize - offset) == 0);
4528  }
4529 #endif
4530  return Handle<Map>::cast(result);
4531  }
4532 
4533  Isolate* isolate = cache->GetIsolate();
4534  Handle<Map> map = Map::CopyNormalized(handle(obj->map()), mode,
4536  ASSERT(map->is_dictionary_map());
4537  cache->set(index, *map);
4538  isolate->counters()->normalized_maps()->Increment();
4539 
4540  return map;
4541 }
4542 
4543 
4545  int entries = length();
4546  for (int i = 0; i != entries; i++) {
4547  set_undefined(i);
4548  }
4549 }
4550 
4551 
4553  Handle<Name> name,
4554  Handle<Code> code) {
4555  Handle<Map> map(object->map());
4556  Map::UpdateCodeCache(map, name, code);
4557 }
4558 
4559 
4562  int expected_additional_properties) {
4563  if (!object->HasFastProperties()) return;
4564 
4565  // The global object is always normalized.
4566  ASSERT(!object->IsGlobalObject());
4567  // JSGlobalProxy must never be normalized
4568  ASSERT(!object->IsJSGlobalProxy());
4569 
4570  Isolate* isolate = object->GetIsolate();
4571  HandleScope scope(isolate);
4572  Handle<Map> map(object->map());
4573 
4574  // Allocate new content.
4575  int real_size = map->NumberOfOwnDescriptors();
4576  int property_count = real_size;
4577  if (expected_additional_properties > 0) {
4578  property_count += expected_additional_properties;
4579  } else {
4580  property_count += 2; // Make space for two more properties.
4581  }
4582  Handle<NameDictionary> dictionary =
4583  isolate->factory()->NewNameDictionary(property_count);
4584 
4585  Handle<DescriptorArray> descs(map->instance_descriptors());
4586  for (int i = 0; i < real_size; i++) {
4587  PropertyDetails details = descs->GetDetails(i);
4588  switch (details.type()) {
4589  case CONSTANT: {
4590  Handle<Name> key(descs->GetKey(i));
4591  Handle<Object> value(descs->GetConstant(i), isolate);
4592  PropertyDetails d = PropertyDetails(
4593  details.attributes(), NORMAL, i + 1);
4594  dictionary = NameDictionaryAdd(dictionary, key, value, d);
4595  break;
4596  }
4597  case FIELD: {
4598  Handle<Name> key(descs->GetKey(i));
4599  Handle<Object> value(
4600  object->RawFastPropertyAt(descs->GetFieldIndex(i)), isolate);
4601  PropertyDetails d =
4602  PropertyDetails(details.attributes(), NORMAL, i + 1);
4603  dictionary = NameDictionaryAdd(dictionary, key, value, d);
4604  break;
4605  }
4606  case CALLBACKS: {
4607  Handle<Name> key(descs->GetKey(i));
4608  Handle<Object> value(descs->GetCallbacksObject(i), isolate);
4609  PropertyDetails d = PropertyDetails(
4610  details.attributes(), CALLBACKS, i + 1);
4611  dictionary = NameDictionaryAdd(dictionary, key, value, d);
4612  break;
4613  }
4614  case INTERCEPTOR:
4615  break;
4616  case HANDLER:
4617  case NORMAL:
4618  case TRANSITION:
4619  case NONEXISTENT:
4620  UNREACHABLE();
4621  break;
4622  }
4623  }
4624 
4625  // Copy the next enumeration index from instance descriptor.
4626  dictionary->SetNextEnumerationIndex(real_size + 1);
4627 
4629  isolate->context()->native_context()->normalized_map_cache());
4630  Handle<Map> new_map = NormalizedMapCache::Get(cache, object, mode);
4631  ASSERT(new_map->is_dictionary_map());
4632 
4633  // From here on we cannot fail and we shouldn't GC anymore.
4634  DisallowHeapAllocation no_allocation;
4635 
4636  // Resize the object in the heap if necessary.
4637  int new_instance_size = new_map->instance_size();
4638  int instance_size_delta = map->instance_size() - new_instance_size;
4639  ASSERT(instance_size_delta >= 0);
4640  Heap* heap = isolate->heap();
4641  heap->CreateFillerObjectAt(object->address() + new_instance_size,
4642  instance_size_delta);
4643  heap->AdjustLiveBytes(object->address(),
4644  -instance_size_delta,
4646 
4647  object->set_map(*new_map);
4648  map->NotifyLeafMapLayoutChange();
4649 
4650  object->set_properties(*dictionary);
4651 
4652  isolate->counters()->props_to_dictionary()->Increment();
4653 
4654 #ifdef DEBUG
4655  if (FLAG_trace_normalization) {
4656  PrintF("Object properties have been normalized:\n");
4657  object->Print();
4658  }
4659 #endif
4660 }
4661 
4662 
4664  int unused_property_fields) {
4665  if (object->HasFastProperties()) return;
4666  ASSERT(!object->IsGlobalObject());
4668  object->GetIsolate(),
4669  object->property_dictionary()->TransformPropertiesToFastFor(
4670  *object, unused_property_fields));
4671 }
4672 
4673 
4674 static Handle<SeededNumberDictionary> CopyFastElementsToDictionary(
4675  Handle<FixedArrayBase> array,
4676  int length,
4677  Handle<SeededNumberDictionary> dictionary) {
4678  Isolate* isolate = array->GetIsolate();
4679  Factory* factory = isolate->factory();
4680  bool has_double_elements = array->IsFixedDoubleArray();
4681  for (int i = 0; i < length; i++) {
4682  Handle<Object> value;
4683  if (has_double_elements) {
4684  Handle<FixedDoubleArray> double_array =
4686  if (double_array->is_the_hole(i)) {
4687  value = factory->the_hole_value();
4688  } else {
4689  value = factory->NewHeapNumber(double_array->get_scalar(i));
4690  }
4691  } else {
4692  value = handle(Handle<FixedArray>::cast(array)->get(i), isolate);
4693  }
4694  if (!value->IsTheHole()) {
4695  PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
4696  dictionary =
4697  SeededNumberDictionary::AddNumberEntry(dictionary, i, value, details);
4698  }
4699  }
4700  return dictionary;
4701 }
4702 
4703 
4705  Handle<JSObject> object) {
4706  ASSERT(!object->HasExternalArrayElements() &&
4707  !object->HasFixedTypedArrayElements());
4708  Isolate* isolate = object->GetIsolate();
4709  Factory* factory = isolate->factory();
4710 
4711  // Find the backing store.
4712  Handle<FixedArrayBase> array(FixedArrayBase::cast(object->elements()));
4713  bool is_arguments =
4714  (array->map() == isolate->heap()->sloppy_arguments_elements_map());
4715  if (is_arguments) {
4716  array = handle(FixedArrayBase::cast(
4717  Handle<FixedArray>::cast(array)->get(1)));
4718  }
4719  if (array->IsDictionary()) return Handle<SeededNumberDictionary>::cast(array);
4720 
4721  ASSERT(object->HasFastSmiOrObjectElements() ||
4722  object->HasFastDoubleElements() ||
4723  object->HasFastArgumentsElements());
4724  // Compute the effective length and allocate a new backing store.
4725  int length = object->IsJSArray()
4726  ? Smi::cast(Handle<JSArray>::cast(object)->length())->value()
4727  : array->length();
4728  int old_capacity = 0;
4729  int used_elements = 0;
4730  object->GetElementsCapacityAndUsage(&old_capacity, &used_elements);
4731  Handle<SeededNumberDictionary> dictionary =
4732  factory->NewSeededNumberDictionary(used_elements);
4733 
4734  dictionary = CopyFastElementsToDictionary(array, length, dictionary);
4735 
4736  // Switch to using the dictionary as the backing storage for elements.
4737  if (is_arguments) {
4738  FixedArray::cast(object->elements())->set(1, *dictionary);
4739  } else {
4740  // Set the new map first to satify the elements type assert in
4741  // set_elements().
4742  Handle<Map> new_map =
4744 
4745  JSObject::MigrateToMap(object, new_map);
4746  object->set_elements(*dictionary);
4747  }
4748 
4749  isolate->counters()->elements_to_dictionary()->Increment();
4750 
4751 #ifdef DEBUG
4752  if (FLAG_trace_normalization) {
4753  PrintF("Object elements have been normalized:\n");
4754  object->Print();
4755  }
4756 #endif
4757 
4758  ASSERT(object->HasDictionaryElements() ||
4759  object->HasDictionaryArgumentsElements());
4760  return dictionary;
4761 }
4762 
4763 
4765  Isolate* isolate = GetIsolate();
4766 
4767  int hash_value;
4768  int attempts = 0;
4769  do {
4770  // Generate a random 32-bit hash value but limit range to fit
4771  // within a smi.
4772  hash_value = isolate->random_number_generator()->NextInt() & Smi::kMaxValue;
4773  attempts++;
4774  } while (hash_value == 0 && attempts < 30);
4775  hash_value = hash_value != 0 ? hash_value : 1; // never return 0
4776 
4777  return Smi::FromInt(hash_value);
4778 }
4779 
4780 
4782  Isolate* isolate = object->GetIsolate();
4783  SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash);
4784 }
4785 
4786 
4787 Object* JSObject::GetIdentityHash() {
4788  Object* stored_value = GetHiddenProperty(GetHeap()->identity_hash_string());
4789  return stored_value->IsSmi() ? stored_value : GetHeap()->undefined_value();
4790 }
4791 
4792 
4793 Handle<Object> JSObject::GetOrCreateIdentityHash(Handle<JSObject> object) {
4794  Handle<Object> hash(object->GetIdentityHash(), object->GetIsolate());
4795  if (hash->IsSmi())
4796  return hash;
4797 
4798  Isolate* isolate = object->GetIsolate();
4799 
4800  hash = handle(object->GenerateIdentityHash(), isolate);
4801  Handle<Object> result = SetHiddenProperty(object,
4802  isolate->factory()->identity_hash_string(), hash);
4803 
4804  if (result->IsUndefined()) {
4805  // Trying to get hash of detached proxy.
4806  return handle(Smi::FromInt(0), isolate);
4807  }
4808 
4809  return hash;
4810 }
4811 
4812 
4813 Object* JSProxy::GetIdentityHash() {
4814  return this->hash();
4815 }
4816 
4817 
4818 Handle<Object> JSProxy::GetOrCreateIdentityHash(Handle<JSProxy> proxy) {
4819  Isolate* isolate = proxy->GetIsolate();
4820 
4821  Handle<Object> hash(proxy->GetIdentityHash(), isolate);
4822  if (hash->IsSmi())
4823  return hash;
4824 
4825  hash = handle(proxy->GenerateIdentityHash(), isolate);
4826  proxy->set_hash(*hash);
4827  return hash;
4828 }
4829 
4830 
4832  ASSERT(key->IsUniqueName());
4833  if (IsJSGlobalProxy()) {
4834  // For a proxy, use the prototype as target object.
4835  Object* proxy_parent = GetPrototype();
4836  // If the proxy is detached, return undefined.
4837  if (proxy_parent->IsNull()) return GetHeap()->the_hole_value();
4838  ASSERT(proxy_parent->IsJSGlobalObject());
4839  return JSObject::cast(proxy_parent)->GetHiddenProperty(key);
4840  }
4841  ASSERT(!IsJSGlobalProxy());
4842  Object* inline_value = GetHiddenPropertiesHashTable();
4843 
4844  if (inline_value->IsSmi()) {
4845  // Handle inline-stored identity hash.
4846  if (key == GetHeap()->identity_hash_string()) {
4847  return inline_value;
4848  } else {
4849  return GetHeap()->the_hole_value();
4850  }
4851  }
4852 
4853  if (inline_value->IsUndefined()) return GetHeap()->the_hole_value();
4854 
4855  ObjectHashTable* hashtable = ObjectHashTable::cast(inline_value);
4856  Object* entry = hashtable->Lookup(key);
4857  return entry;
4858 }
4859 
4860 
4862  Handle<Name> key,
4863  Handle<Object> value) {
4864  Isolate* isolate = object->GetIsolate();
4865 
4866  ASSERT(key->IsUniqueName());
4867  if (object->IsJSGlobalProxy()) {
4868  // For a proxy, use the prototype as target object.
4869  Handle<Object> proxy_parent(object->GetPrototype(), isolate);
4870  // If the proxy is detached, return undefined.
4871  if (proxy_parent->IsNull()) return isolate->factory()->undefined_value();
4872  ASSERT(proxy_parent->IsJSGlobalObject());
4873  return SetHiddenProperty(Handle<JSObject>::cast(proxy_parent), key, value);
4874  }
4875  ASSERT(!object->IsJSGlobalProxy());
4876 
4877  Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate);
4878 
4879  // If there is no backing store yet, store the identity hash inline.
4880  if (value->IsSmi() &&
4881  *key == *isolate->factory()->identity_hash_string() &&
4882  (inline_value->IsUndefined() || inline_value->IsSmi())) {
4883  return JSObject::SetHiddenPropertiesHashTable(object, value);
4884  }
4885 
4886  Handle<ObjectHashTable> hashtable =
4887  GetOrCreateHiddenPropertiesHashtable(object);
4888 
4889  // If it was found, check if the key is already in the dictionary.
4890  Handle<ObjectHashTable> new_table = ObjectHashTable::Put(hashtable, key,
4891  value);
4892  if (*new_table != *hashtable) {
4893  // If adding the key expanded the dictionary (i.e., Add returned a new
4894  // dictionary), store it back to the object.
4895  SetHiddenPropertiesHashTable(object, new_table);
4896  }
4897 
4898  // Return this to mark success.
4899  return object;
4900 }
4901 
4902 
4904  Isolate* isolate = object->GetIsolate();
4905  ASSERT(key->IsUniqueName());
4906 
4907  if (object->IsJSGlobalProxy()) {
4908  Handle<Object> proto(object->GetPrototype(), isolate);
4909  if (proto->IsNull()) return;
4910  ASSERT(proto->IsJSGlobalObject());
4911  return DeleteHiddenProperty(Handle<JSObject>::cast(proto), key);
4912  }
4913 
4914  Object* inline_value = object->GetHiddenPropertiesHashTable();
4915 
4916  // We never delete (inline-stored) identity hashes.
4917  ASSERT(*key != *isolate->factory()->identity_hash_string());
4918  if (inline_value->IsUndefined() || inline_value->IsSmi()) return;
4919 
4920  Handle<ObjectHashTable> hashtable(ObjectHashTable::cast(inline_value));
4921  ObjectHashTable::Put(hashtable, key, isolate->factory()->the_hole_value());
4922 }
4923 
4924 
4926  Handle<Name> hidden = object->GetIsolate()->factory()->hidden_string();
4928  object, object, hidden, false) != ABSENT;
4929 }
4930 
4931 
4932 Object* JSObject::GetHiddenPropertiesHashTable() {
4933  ASSERT(!IsJSGlobalProxy());
4934  if (HasFastProperties()) {
4935  // If the object has fast properties, check whether the first slot
4936  // in the descriptor array matches the hidden string. Since the
4937  // hidden strings hash code is zero (and no other name has hash
4938  // code zero) it will always occupy the first entry if present.
4939  DescriptorArray* descriptors = this->map()->instance_descriptors();
4940  if (descriptors->number_of_descriptors() > 0) {
4941  int sorted_index = descriptors->GetSortedKeyIndex(0);
4942  if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() &&
4943  sorted_index < map()->NumberOfOwnDescriptors()) {
4944  ASSERT(descriptors->GetType(sorted_index) == FIELD);
4945  ASSERT(descriptors->GetDetails(sorted_index).representation().
4946  IsCompatibleForLoad(Representation::Tagged()));
4947  return this->RawFastPropertyAt(
4948  descriptors->GetFieldIndex(sorted_index));
4949  } else {
4950  return GetHeap()->undefined_value();
4951  }
4952  } else {
4953  return GetHeap()->undefined_value();
4954  }
4955  } else {
4956  PropertyAttributes attributes;
4957  // You can't install a getter on a property indexed by the hidden string,
4958  // so we can be sure that GetLocalPropertyPostInterceptor returns a real
4959  // object.
4960  return GetLocalPropertyPostInterceptor(this,
4961  GetHeap()->hidden_string(),
4962  &attributes)->ToObjectUnchecked();
4963  }
4964 }
4965 
4966 Handle<ObjectHashTable> JSObject::GetOrCreateHiddenPropertiesHashtable(
4967  Handle<JSObject> object) {
4968  Isolate* isolate = object->GetIsolate();
4969 
4970  static const int kInitialCapacity = 4;
4971  Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate);
4972  if (inline_value->IsHashTable()) {
4973  return Handle<ObjectHashTable>::cast(inline_value);
4974  }
4975 
4976  Handle<ObjectHashTable> hashtable = isolate->factory()->NewObjectHashTable(
4977  kInitialCapacity,
4979 
4980  if (inline_value->IsSmi()) {
4981  // We were storing the identity hash inline and now allocated an actual
4982  // dictionary. Put the identity hash into the new dictionary.
4983  hashtable = ObjectHashTable::Put(hashtable,
4984  isolate->factory()->identity_hash_string(),
4985  inline_value);
4986  }
4987 
4989  object,
4990  isolate->factory()->hidden_string(),
4991  hashtable,
4992  DONT_ENUM,
4996 
4997  return hashtable;
4998 }
4999 
5000 
5001 Handle<Object> JSObject::SetHiddenPropertiesHashTable(Handle<JSObject> object,
5002  Handle<Object> value) {
5003  ASSERT(!object->IsJSGlobalProxy());
5004 
5005  Isolate* isolate = object->GetIsolate();
5006 
5007  // We can store the identity hash inline iff there is no backing store
5008  // for hidden properties yet.
5009  ASSERT(JSObject::HasHiddenProperties(object) != value->IsSmi());
5010  if (object->HasFastProperties()) {
5011  // If the object has fast properties, check whether the first slot
5012  // in the descriptor array matches the hidden string. Since the
5013  // hidden strings hash code is zero (and no other name has hash
5014  // code zero) it will always occupy the first entry if present.
5015  DescriptorArray* descriptors = object->map()->instance_descriptors();
5016  if (descriptors->number_of_descriptors() > 0) {
5017  int sorted_index = descriptors->GetSortedKeyIndex(0);
5018  if (descriptors->GetKey(sorted_index) == isolate->heap()->hidden_string()
5019  && sorted_index < object->map()->NumberOfOwnDescriptors()) {
5020  ASSERT(descriptors->GetType(sorted_index) == FIELD);
5021  object->FastPropertyAtPut(descriptors->GetFieldIndex(sorted_index),
5022  *value);
5023  return object;
5024  }
5025  }
5026  }
5027 
5029  isolate->factory()->hidden_string(),
5030  value,
5031  DONT_ENUM,
5035  return object;
5036 }
5037 
5038 
5039 Handle<Object> JSObject::DeletePropertyPostInterceptor(Handle<JSObject> object,
5040  Handle<Name> name,
5041  DeleteMode mode) {
5042  // Check local property, ignore interceptor.
5043  Isolate* isolate = object->GetIsolate();
5044  LookupResult result(isolate);
5045  object->LocalLookupRealNamedProperty(*name, &result);
5046  if (!result.IsFound()) return isolate->factory()->true_value();
5047 
5048  // Normalize object if needed.
5050 
5051  return DeleteNormalizedProperty(object, name, mode);
5052 }
5053 
5054 
5055 Handle<Object> JSObject::DeletePropertyWithInterceptor(Handle<JSObject> object,
5056  Handle<Name> name) {
5057  Isolate* isolate = object->GetIsolate();
5058 
5059  // TODO(rossberg): Support symbols in the API.
5060  if (name->IsSymbol()) return isolate->factory()->false_value();
5061 
5062  Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
5063  if (!interceptor->deleter()->IsUndefined()) {
5065  v8::ToCData<v8::NamedPropertyDeleterCallback>(interceptor->deleter());
5066  LOG(isolate,
5067  ApiNamedPropertyAccess("interceptor-named-delete", *object, *name));
5068  PropertyCallbackArguments args(
5069  isolate, interceptor->data(), *object, *object);
5070  v8::Handle<v8::Boolean> result =
5071  args.Call(deleter, v8::Utils::ToLocal(Handle<String>::cast(name)));
5073  if (!result.IsEmpty()) {
5074  ASSERT(result->IsBoolean());
5075  Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
5076  result_internal->VerifyApiCallResultType();
5077  // Rebox CustomArguments::kReturnValueOffset before returning.
5078  return handle(*result_internal, isolate);
5079  }
5080  }
5081  Handle<Object> result =
5082  DeletePropertyPostInterceptor(object, name, NORMAL_DELETION);
5084  return result;
5085 }
5086 
5087 
5088 Handle<Object> JSObject::DeleteElementWithInterceptor(Handle<JSObject> object,
5089  uint32_t index) {
5090  Isolate* isolate = object->GetIsolate();
5091  Factory* factory = isolate->factory();
5092 
5093  // Make sure that the top context does not change when doing
5094  // callbacks or interceptor calls.
5095  AssertNoContextChange ncc(isolate);
5096 
5097  Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
5098  if (interceptor->deleter()->IsUndefined()) return factory->false_value();
5100  v8::ToCData<v8::IndexedPropertyDeleterCallback>(interceptor->deleter());
5101  LOG(isolate,
5102  ApiIndexedPropertyAccess("interceptor-indexed-delete", *object, index));
5103  PropertyCallbackArguments args(
5104  isolate, interceptor->data(), *object, *object);
5105  v8::Handle<v8::Boolean> result = args.Call(deleter, index);
5107  if (!result.IsEmpty()) {
5108  ASSERT(result->IsBoolean());
5109  Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
5110  result_internal->VerifyApiCallResultType();
5111  // Rebox CustomArguments::kReturnValueOffset before returning.
5112  return handle(*result_internal, isolate);
5113  }
5114  Handle<Object> delete_result = object->GetElementsAccessor()->Delete(
5115  object, index, NORMAL_DELETION);
5117  return delete_result;
5118 }
5119 
5120 
5121 Handle<Object> JSObject::DeleteElement(Handle<JSObject> object,
5122  uint32_t index,
5123  DeleteMode mode) {
5124  Isolate* isolate = object->GetIsolate();
5125  Factory* factory = isolate->factory();
5126 
5127  // Check access rights if needed.
5128  if (object->IsAccessCheckNeeded() &&
5129  !isolate->MayIndexedAccessWrapper(object, index, v8::ACCESS_DELETE)) {
5130  isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_DELETE);
5132  return factory->false_value();
5133  }
5134 
5135  if (object->IsStringObjectWithCharacterAt(index)) {
5136  if (mode == STRICT_DELETION) {
5137  // Deleting a non-configurable property in strict mode.
5138  Handle<Object> name = factory->NewNumberFromUint(index);
5139  Handle<Object> args[2] = { name, object };
5140  Handle<Object> error =
5141  factory->NewTypeError("strict_delete_property",
5142  HandleVector(args, 2));
5143  isolate->Throw(*error);
5144  return Handle<Object>();
5145  }
5146  return factory->false_value();
5147  }
5148 
5149  if (object->IsJSGlobalProxy()) {
5150  Handle<Object> proto(object->GetPrototype(), isolate);
5151  if (proto->IsNull()) return factory->false_value();
5152  ASSERT(proto->IsJSGlobalObject());
5153  return DeleteElement(Handle<JSObject>::cast(proto), index, mode);
5154  }
5155 
5156  Handle<Object> old_value;
5157  bool should_enqueue_change_record = false;
5158  if (object->map()->is_observed()) {
5159  should_enqueue_change_record = HasLocalElement(object, index);
5160  if (should_enqueue_change_record) {
5161  if (object->GetLocalElementAccessorPair(index) != NULL) {
5162  old_value = Handle<Object>::cast(factory->the_hole_value());
5163  } else {
5164  old_value = Object::GetElementNoExceptionThrown(isolate, object, index);
5165  }
5166  }
5167  }
5168 
5169  // Skip interceptor if forcing deletion.
5170  Handle<Object> result;
5171  if (object->HasIndexedInterceptor() && mode != FORCE_DELETION) {
5172  result = DeleteElementWithInterceptor(object, index);
5173  } else {
5174  result = object->GetElementsAccessor()->Delete(object, index, mode);
5175  }
5176 
5177  if (should_enqueue_change_record && !HasLocalElement(object, index)) {
5178  Handle<String> name = factory->Uint32ToString(index);
5179  EnqueueChangeRecord(object, "delete", name, old_value);
5180  }
5181 
5182  return result;
5183 }
5184 
5185 
5186 Handle<Object> JSObject::DeleteProperty(Handle<JSObject> object,
5187  Handle<Name> name,
5188  DeleteMode mode) {
5189  Isolate* isolate = object->GetIsolate();
5190  // ECMA-262, 3rd, 8.6.2.5
5191  ASSERT(name->IsName());
5192 
5193  // Check access rights if needed.
5194  if (object->IsAccessCheckNeeded() &&
5195  !isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_DELETE)) {
5196  isolate->ReportFailedAccessCheckWrapper(object, v8::ACCESS_DELETE);
5198  return isolate->factory()->false_value();
5199  }
5200 
5201  if (object->IsJSGlobalProxy()) {
5202  Object* proto = object->GetPrototype();
5203  if (proto->IsNull()) return isolate->factory()->false_value();
5204  ASSERT(proto->IsJSGlobalObject());
5205  return JSGlobalObject::DeleteProperty(
5206  handle(JSGlobalObject::cast(proto)), name, mode);
5207  }
5208 
5209  uint32_t index = 0;
5210  if (name->AsArrayIndex(&index)) {
5211  return DeleteElement(object, index, mode);
5212  }
5213 
5214  LookupResult lookup(isolate);
5215  object->LocalLookup(*name, &lookup, true);
5216  if (!lookup.IsFound()) return isolate->factory()->true_value();
5217  // Ignore attributes if forcing a deletion.
5218  if (lookup.IsDontDelete() && mode != FORCE_DELETION) {
5219  if (mode == STRICT_DELETION) {
5220  // Deleting a non-configurable property in strict mode.
5221  Handle<Object> args[2] = { name, object };
5222  Handle<Object> error = isolate->factory()->NewTypeError(
5223  "strict_delete_property", HandleVector(args, ARRAY_SIZE(args)));
5224  isolate->Throw(*error);
5225  return Handle<Object>();
5226  }
5227  return isolate->factory()->false_value();
5228  }
5229 
5230  Handle<Object> old_value = isolate->factory()->the_hole_value();
5231  bool is_observed = object->map()->is_observed() &&
5232  *name != isolate->heap()->hidden_string();
5233  if (is_observed && lookup.IsDataProperty()) {
5234  old_value = Object::GetProperty(object, name);
5235  CHECK_NOT_EMPTY_HANDLE(isolate, old_value);
5236  }
5237  Handle<Object> result;
5238 
5239  // Check for interceptor.
5240  if (lookup.IsInterceptor()) {
5241  // Skip interceptor if forcing a deletion.
5242  if (mode == FORCE_DELETION) {
5243  result = DeletePropertyPostInterceptor(object, name, mode);
5244  } else {
5245  result = DeletePropertyWithInterceptor(object, name);
5246  }
5247  } else {
5248  // Normalize object if needed.
5250  // Make sure the properties are normalized before removing the entry.
5251  result = DeleteNormalizedProperty(object, name, mode);
5252  }
5253 
5254  if (is_observed && !HasLocalProperty(object, name)) {
5255  EnqueueChangeRecord(object, "delete", name, old_value);
5256  }
5257 
5258  return result;
5259 }
5260 
5261 
5263  uint32_t index,
5264  DeleteMode mode) {
5265  if (object->IsJSProxy()) {
5266  return JSProxy::DeleteElementWithHandler(
5267  Handle<JSProxy>::cast(object), index, mode);
5268  }
5269  return JSObject::DeleteElement(Handle<JSObject>::cast(object), index, mode);
5270 }
5271 
5272 
5274  Handle<Name> name,
5275  DeleteMode mode) {
5276  if (object->IsJSProxy()) {
5277  return JSProxy::DeletePropertyWithHandler(
5278  Handle<JSProxy>::cast(object), name, mode);
5279  }
5280  return JSObject::DeleteProperty(Handle<JSObject>::cast(object), name, mode);
5281 }
5282 
5283 
5284 bool JSObject::ReferencesObjectFromElements(FixedArray* elements,
5285  ElementsKind kind,
5286  Object* object) {
5288  kind == DICTIONARY_ELEMENTS);
5289  if (IsFastObjectElementsKind(kind)) {
5290  int length = IsJSArray()
5291  ? Smi::cast(JSArray::cast(this)->length())->value()
5292  : elements->length();
5293  for (int i = 0; i < length; ++i) {
5294  Object* element = elements->get(i);
5295  if (!element->IsTheHole() && element == object) return true;
5296  }
5297  } else {
5298  Object* key =
5300  if (!key->IsUndefined()) return true;
5301  }
5302  return false;
5303 }
5304 
5305 
5306 // Check whether this object references another object.
5308  Map* map_of_this = map();
5309  Heap* heap = GetHeap();
5310  DisallowHeapAllocation no_allocation;
5311 
5312  // Is the object the constructor for this object?
5313  if (map_of_this->constructor() == obj) {
5314  return true;
5315  }
5316 
5317  // Is the object the prototype for this object?
5318  if (map_of_this->prototype() == obj) {
5319  return true;
5320  }
5321 
5322  // Check if the object is among the named properties.
5323  Object* key = SlowReverseLookup(obj);
5324  if (!key->IsUndefined()) {
5325  return true;
5326  }
5327 
5328  // Check if the object is among the indexed properties.
5329  ElementsKind kind = GetElementsKind();
5330  switch (kind) {
5331  // Raw pixels and external arrays do not reference other
5332  // objects.
5333 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
5334  case EXTERNAL_##TYPE##_ELEMENTS: \
5335  case TYPE##_ELEMENTS: \
5336  break;
5337 
5339 #undef TYPED_ARRAY_CASE
5340 
5341  case FAST_DOUBLE_ELEMENTS:
5343  break;
5344  case FAST_SMI_ELEMENTS:
5346  break;
5347  case FAST_ELEMENTS:
5348  case FAST_HOLEY_ELEMENTS:
5349  case DICTIONARY_ELEMENTS: {
5350  FixedArray* elements = FixedArray::cast(this->elements());
5351  if (ReferencesObjectFromElements(elements, kind, obj)) return true;
5352  break;
5353  }
5355  FixedArray* parameter_map = FixedArray::cast(elements());
5356  // Check the mapped parameters.
5357  int length = parameter_map->length();
5358  for (int i = 2; i < length; ++i) {
5359  Object* value = parameter_map->get(i);
5360  if (!value->IsTheHole() && value == obj) return true;
5361  }
5362  // Check the arguments.
5363  FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
5364  kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS :
5366  if (ReferencesObjectFromElements(arguments, kind, obj)) return true;
5367  break;
5368  }
5369  }
5370 
5371  // For functions check the context.
5372  if (IsJSFunction()) {
5373  // Get the constructor function for arguments array.
5374  JSObject* arguments_boilerplate =
5375  heap->isolate()->context()->native_context()->
5376  sloppy_arguments_boilerplate();
5377  JSFunction* arguments_function =
5378  JSFunction::cast(arguments_boilerplate->map()->constructor());
5379 
5380  // Get the context and don't check if it is the native context.
5381  JSFunction* f = JSFunction::cast(this);
5382  Context* context = f->context();
5383  if (context->IsNativeContext()) {
5384  return false;
5385  }
5386 
5387  // Check the non-special context slots.
5388  for (int i = Context::MIN_CONTEXT_SLOTS; i < context->length(); i++) {
5389  // Only check JS objects.
5390  if (context->get(i)->IsJSObject()) {
5391  JSObject* ctxobj = JSObject::cast(context->get(i));
5392  // If it is an arguments array check the content.
5393  if (ctxobj->map()->constructor() == arguments_function) {
5394  if (ctxobj->ReferencesObject(obj)) {
5395  return true;
5396  }
5397  } else if (ctxobj == obj) {
5398  return true;
5399  }
5400  }
5401  }
5402 
5403  // Check the context extension (if any) if it can have references.
5404  if (context->has_extension() && !context->IsCatchContext()) {
5405  // With harmony scoping, a JSFunction may have a global context.
5406  // TODO(mvstanton): walk into the ScopeInfo.
5407  if (FLAG_harmony_scoping && context->IsGlobalContext()) {
5408  return false;
5409  }
5410 
5411  return JSObject::cast(context->extension())->ReferencesObject(obj);
5412  }
5413  }
5414 
5415  // No references to object.
5416  return false;
5417 }
5418 
5419 
5421  Isolate* isolate = object->GetIsolate();
5422 
5423  if (!object->map()->is_extensible()) return object;
5424 
5425  if (object->IsAccessCheckNeeded() &&
5426  !isolate->MayNamedAccessWrapper(object,
5427  isolate->factory()->undefined_value(),
5428  v8::ACCESS_KEYS)) {
5431  return isolate->factory()->false_value();
5432  }
5433 
5434  if (object->IsJSGlobalProxy()) {
5435  Handle<Object> proto(object->GetPrototype(), isolate);
5436  if (proto->IsNull()) return object;
5437  ASSERT(proto->IsJSGlobalObject());
5439  }
5440 
5441  // It's not possible to seal objects with external array elements
5442  if (object->HasExternalArrayElements() ||
5443  object->HasFixedTypedArrayElements()) {
5444  Handle<Object> error =
5445  isolate->factory()->NewTypeError(
5446  "cant_prevent_ext_external_array_elements",
5447  HandleVector(&object, 1));
5448  isolate->Throw(*error);
5449  return Handle<Object>();
5450  }
5451 
5452  // If there are fast elements we normalize.
5454  ASSERT(object->HasDictionaryElements() ||
5455  object->HasDictionaryArgumentsElements());
5456 
5457  // Make sure that we never go back to fast case.
5458  dictionary->set_requires_slow_elements();
5459 
5460  // Do a map transition, other objects with this map may still
5461  // be extensible.
5462  // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps.
5463  Handle<Map> new_map = Map::Copy(handle(object->map()));
5464 
5465  new_map->set_is_extensible(false);
5466  JSObject::MigrateToMap(object, new_map);
5467  ASSERT(!object->map()->is_extensible());
5468 
5469  if (object->map()->is_observed()) {
5470  EnqueueChangeRecord(object, "preventExtensions", Handle<Name>(),
5471  isolate->factory()->the_hole_value());
5472  }
5473  return object;
5474 }
5475 
5476 
5477 template<typename Dictionary>
5478 static void FreezeDictionary(Dictionary* dictionary) {
5479  int capacity = dictionary->Capacity();
5480  for (int i = 0; i < capacity; i++) {
5481  Object* k = dictionary->KeyAt(i);
5482  if (dictionary->IsKey(k)) {
5483  PropertyDetails details = dictionary->DetailsAt(i);
5484  int attrs = DONT_DELETE;
5485  // READ_ONLY is an invalid attribute for JS setters/getters.
5486  if (details.type() != CALLBACKS ||
5487  !dictionary->ValueAt(i)->IsAccessorPair()) {
5488  attrs |= READ_ONLY;
5489  }
5490  details = details.CopyAddAttributes(
5491  static_cast<PropertyAttributes>(attrs));
5492  dictionary->DetailsAtPut(i, details);
5493  }
5494  }
5495 }
5496 
5497 
5499  // Freezing sloppy arguments should be handled elsewhere.
5500  ASSERT(!object->HasSloppyArgumentsElements());
5501  ASSERT(!object->map()->is_observed());
5502 
5503  if (object->map()->is_frozen()) return object;
5504 
5505  Isolate* isolate = object->GetIsolate();
5506  if (object->IsAccessCheckNeeded() &&
5507  !isolate->MayNamedAccessWrapper(object,
5508  isolate->factory()->undefined_value(),
5509  v8::ACCESS_KEYS)) {
5512  return isolate->factory()->false_value();
5513  }
5514 
5515  if (object->IsJSGlobalProxy()) {
5516  Handle<Object> proto(object->GetPrototype(), isolate);
5517  if (proto->IsNull()) return object;
5518  ASSERT(proto->IsJSGlobalObject());
5519  return Freeze(Handle<JSObject>::cast(proto));
5520  }
5521 
5522  // It's not possible to freeze objects with external array elements
5523  if (object->HasExternalArrayElements() ||
5524  object->HasFixedTypedArrayElements()) {
5525  Handle<Object> error =
5526  isolate->factory()->NewTypeError(
5527  "cant_prevent_ext_external_array_elements",
5528  HandleVector(&object, 1));
5529  isolate->Throw(*error);
5530  return Handle<Object>();
5531  }
5532 
5533  Handle<SeededNumberDictionary> new_element_dictionary;
5534  if (!object->elements()->IsDictionary()) {
5535  int length = object->IsJSArray()
5536  ? Smi::cast(Handle<JSArray>::cast(object)->length())->value()
5537  : object->elements()->length();
5538  if (length > 0) {
5539  int capacity = 0;
5540  int used = 0;
5541  object->GetElementsCapacityAndUsage(&capacity, &used);
5542  new_element_dictionary =
5543  isolate->factory()->NewSeededNumberDictionary(used);
5544 
5545  // Move elements to a dictionary; avoid calling NormalizeElements to avoid
5546  // unnecessary transitions.
5547  new_element_dictionary = CopyFastElementsToDictionary(
5548  handle(object->elements()), length, new_element_dictionary);
5549  } else {
5550  // No existing elements, use a pre-allocated empty backing store
5551  new_element_dictionary =
5552  isolate->factory()->empty_slow_element_dictionary();
5553  }
5554  }
5555 
5556  LookupResult result(isolate);
5557  Handle<Map> old_map(object->map());
5558  old_map->LookupTransition(*object, isolate->heap()->frozen_symbol(), &result);
5559  if (result.IsTransition()) {
5560  Handle<Map> transition_map(result.GetTransitionTarget());
5561  ASSERT(transition_map->has_dictionary_elements());
5562  ASSERT(transition_map->is_frozen());
5563  ASSERT(!transition_map->is_extensible());
5564  JSObject::MigrateToMap(object, transition_map);
5565  } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) {
5566  // Create a new descriptor array with fully-frozen properties
5567  int num_descriptors = old_map->NumberOfOwnDescriptors();
5568  Handle<DescriptorArray> new_descriptors =
5570  handle(old_map->instance_descriptors()), num_descriptors, FROZEN);
5572  old_map, new_descriptors, INSERT_TRANSITION,
5573  isolate->factory()->frozen_symbol());
5574  new_map->freeze();
5575  new_map->set_is_extensible(false);
5576  new_map->set_elements_kind(DICTIONARY_ELEMENTS);
5577  JSObject::MigrateToMap(object, new_map);
5578  } else {
5579  // Slow path: need to normalize properties for safety
5581 
5582  // Create a new map, since other objects with this map may be extensible.
5583  // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps.
5584  Handle<Map> new_map = Map::Copy(handle(object->map()));
5585  new_map->freeze();
5586  new_map->set_is_extensible(false);
5587  new_map->set_elements_kind(DICTIONARY_ELEMENTS);
5588  JSObject::MigrateToMap(object, new_map);
5589 
5590  // Freeze dictionary-mode properties
5591  FreezeDictionary(object->property_dictionary());
5592  }
5593 
5594  ASSERT(object->map()->has_dictionary_elements());
5595  if (!new_element_dictionary.is_null()) {
5596  object->set_elements(*new_element_dictionary);
5597  }
5598 
5599  if (object->elements() != isolate->heap()->empty_slow_element_dictionary()) {
5600  SeededNumberDictionary* dictionary = object->element_dictionary();
5601  // Make sure we never go back to the fast case
5602  dictionary->set_requires_slow_elements();
5603  // Freeze all elements in the dictionary
5604  FreezeDictionary(dictionary);
5605  }
5606 
5607  return object;
5608 }
5609 
5610 
5612  Isolate* isolate = object->GetIsolate();
5613 
5614  if (object->map()->is_observed())
5615  return;
5616 
5617  LookupResult result(isolate);
5618  object->map()->LookupTransition(*object,
5619  isolate->heap()->observed_symbol(),
5620  &result);
5621 
5622  Handle<Map> new_map;
5623  if (result.IsTransition()) {
5624  new_map = handle(result.GetTransitionTarget());
5625  ASSERT(new_map->is_observed());
5626  } else if (object->map()->CanHaveMoreTransitions()) {
5627  new_map = Map::CopyForObserved(handle(object->map()));
5628  } else {
5629  new_map = Map::Copy(handle(object->map()));
5630  new_map->set_is_observed();
5631  }
5632  JSObject::MigrateToMap(object, new_map);
5633 }
5634 
5635 
5637  Isolate* isolate = object->GetIsolate();
5638  CALL_HEAP_FUNCTION(isolate,
5639  isolate->heap()->CopyJSObject(*object), JSObject);
5640 }
5641 
5642 
5643 template<class ContextObject>
5645  public:
5648  : site_context_(site_context),
5649  copying_(copying),
5650  hints_(hints) {}
5651 
5653 
5654  protected:
5656  Handle<JSObject> value) {
5657  Handle<AllocationSite> current_site = site_context()->EnterNewScope();
5658  Handle<JSObject> copy_of_value = StructureWalk(value);
5659  site_context()->ExitScope(current_site, value);
5660  return copy_of_value;
5661  }
5662 
5663  inline ContextObject* site_context() { return site_context_; }
5664  inline Isolate* isolate() { return site_context()->isolate(); }
5665 
5666  inline bool copying() const { return copying_; }
5667 
5668  private:
5669  ContextObject* site_context_;
5670  const bool copying_;
5671  const JSObject::DeepCopyHints hints_;
5672 };
5673 
5674 
5675 template <class ContextObject>
5677  Handle<JSObject> object) {
5678  Isolate* isolate = this->isolate();
5679  bool copying = this->copying();
5680  bool shallow = hints_ == JSObject::kObjectIsShallowArray;
5681 
5682  if (!shallow) {
5683  StackLimitCheck check(isolate);
5684 
5685  if (check.HasOverflowed()) {
5686  isolate->StackOverflow();
5687  return Handle<JSObject>::null();
5688  }
5689  }
5690 
5691  if (object->map()->is_deprecated()) {
5692  JSObject::MigrateInstance(object);
5693  }
5694 
5695  Handle<JSObject> copy;
5696  if (copying) {
5697  Handle<AllocationSite> site_to_pass;
5698  if (site_context()->ShouldCreateMemento(object)) {
5699  site_to_pass = site_context()->current();
5700  }
5701  CALL_AND_RETRY_OR_DIE(isolate,
5702  isolate->heap()->CopyJSObject(*object,
5703  site_to_pass.is_null() ? NULL : *site_to_pass),
5704  { copy = Handle<JSObject>(JSObject::cast(__object__),
5705  isolate);
5706  break;
5707  },
5708  return Handle<JSObject>());
5709  } else {
5710  copy = object;
5711  }
5712 
5713  ASSERT(copying || copy.is_identical_to(object));
5714 
5715  ElementsKind kind = copy->GetElementsKind();
5716  if (copying && IsFastSmiOrObjectElementsKind(kind) &&
5717  FixedArray::cast(copy->elements())->map() ==
5718  isolate->heap()->fixed_cow_array_map()) {
5719  isolate->counters()->cow_arrays_created_runtime()->Increment();
5720  }
5721 
5722  if (!shallow) {
5723  HandleScope scope(isolate);
5724 
5725  // Deep copy local properties.
5726  if (copy->HasFastProperties()) {
5727  Handle<DescriptorArray> descriptors(copy->map()->instance_descriptors());
5728  int limit = copy->map()->NumberOfOwnDescriptors();
5729  for (int i = 0; i < limit; i++) {
5730  PropertyDetails details = descriptors->GetDetails(i);
5731  if (details.type() != FIELD) continue;
5732  int index = descriptors->GetFieldIndex(i);
5733  Handle<Object> value(object->RawFastPropertyAt(index), isolate);
5734  if (value->IsJSObject()) {
5735  value = VisitElementOrProperty(copy, Handle<JSObject>::cast(value));
5737  } else {
5738  Representation representation = details.representation();
5739  value = NewStorageFor(isolate, value, representation);
5740  }
5741  if (copying) {
5742  copy->FastPropertyAtPut(index, *value);
5743  }
5744  }
5745  } else {
5746  Handle<FixedArray> names =
5747  isolate->factory()->NewFixedArray(copy->NumberOfLocalProperties());
5748  copy->GetLocalPropertyNames(*names, 0);
5749  for (int i = 0; i < names->length(); i++) {
5750  ASSERT(names->get(i)->IsString());
5751  Handle<String> key_string(String::cast(names->get(i)));
5752  PropertyAttributes attributes =
5753  JSReceiver::GetLocalPropertyAttribute(copy, key_string);
5754  // Only deep copy fields from the object literal expression.
5755  // In particular, don't try to copy the length attribute of
5756  // an array.
5757  if (attributes != NONE) continue;
5758  Handle<Object> value(
5759  copy->GetProperty(*key_string, &attributes)->ToObjectUnchecked(),
5760  isolate);
5761  if (value->IsJSObject()) {
5762  Handle<JSObject> result = VisitElementOrProperty(
5763  copy, Handle<JSObject>::cast(value));
5764  RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<JSObject>());
5765  if (copying) {
5766  // Creating object copy for literals. No strict mode needed.
5768  copy, key_string, result, NONE, SLOPPY));
5769  }
5770  }
5771  }
5772  }
5773 
5774  // Deep copy local elements.
5775  // Pixel elements cannot be created using an object literal.
5776  ASSERT(!copy->HasExternalArrayElements());
5777  switch (kind) {
5778  case FAST_SMI_ELEMENTS:
5779  case FAST_ELEMENTS:
5781  case FAST_HOLEY_ELEMENTS: {
5782  Handle<FixedArray> elements(FixedArray::cast(copy->elements()));
5783  if (elements->map() == isolate->heap()->fixed_cow_array_map()) {
5784 #ifdef DEBUG
5785  for (int i = 0; i < elements->length(); i++) {
5786  ASSERT(!elements->get(i)->IsJSObject());
5787  }
5788 #endif
5789  } else {
5790  for (int i = 0; i < elements->length(); i++) {
5791  Handle<Object> value(elements->get(i), isolate);
5792  ASSERT(value->IsSmi() ||
5793  value->IsTheHole() ||
5794  (IsFastObjectElementsKind(copy->GetElementsKind())));
5795  if (value->IsJSObject()) {
5796  Handle<JSObject> result = VisitElementOrProperty(
5797  copy, Handle<JSObject>::cast(value));
5798  RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<JSObject>());
5799  if (copying) {
5800  elements->set(i, *result);
5801  }
5802  }
5803  }
5804  }
5805  break;
5806  }
5807  case DICTIONARY_ELEMENTS: {
5808  Handle<SeededNumberDictionary> element_dictionary(
5809  copy->element_dictionary());
5810  int capacity = element_dictionary->Capacity();
5811  for (int i = 0; i < capacity; i++) {
5812  Object* k = element_dictionary->KeyAt(i);
5813  if (element_dictionary->IsKey(k)) {
5814  Handle<Object> value(element_dictionary->ValueAt(i), isolate);
5815  if (value->IsJSObject()) {
5816  Handle<JSObject> result = VisitElementOrProperty(
5817  copy, Handle<JSObject>::cast(value));
5818  RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<JSObject>());
5819  if (copying) {
5820  element_dictionary->ValueAtPut(i, *result);
5821  }
5822  }
5823  }
5824  }
5825  break;
5826  }
5828  UNIMPLEMENTED();
5829  break;
5830 
5831 
5832 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
5833  case EXTERNAL_##TYPE##_ELEMENTS: \
5834  case TYPE##_ELEMENTS: \
5835 
5837 #undef TYPED_ARRAY_CASE
5838 
5839  case FAST_DOUBLE_ELEMENTS:
5841  // No contained objects, nothing to do.
5842  break;
5843  }
5844  }
5845 
5846  return copy;
5847 }
5848 
5849 
5851  Handle<JSObject> object,
5852  AllocationSiteCreationContext* site_context) {
5854  kNoHints);
5855  Handle<JSObject> result = v.StructureWalk(object);
5856  ASSERT(result.is_null() || result.is_identical_to(object));
5857  return result;
5858 }
5859 
5860 
5862  AllocationSiteUsageContext* site_context,
5863  DeepCopyHints hints) {
5864  JSObjectWalkVisitor<AllocationSiteUsageContext> v(site_context, true, hints);
5865  Handle<JSObject> copy = v.StructureWalk(object);
5866  ASSERT(!copy.is_identical_to(object));
5867  return copy;
5868 }
5869 
5870 
5871 // Tests for the fast common case for property enumeration:
5872 // - This object and all prototypes has an enum cache (which means that
5873 // it is no proxy, has no interceptors and needs no access checks).
5874 // - This object has no elements.
5875 // - No prototype has enumerable properties/elements.
5877  Heap* heap = GetHeap();
5878  for (Object* o = this;
5879  o != heap->null_value();
5880  o = JSObject::cast(o)->GetPrototype()) {
5881  if (!o->IsJSObject()) return false;
5882  JSObject* curr = JSObject::cast(o);
5883  int enum_length = curr->map()->EnumLength();
5884  if (enum_length == kInvalidEnumCacheSentinel) return false;
5885  if (curr->IsAccessCheckNeeded()) return false;
5886  ASSERT(!curr->HasNamedInterceptor());
5887  ASSERT(!curr->HasIndexedInterceptor());
5888  if (curr->NumberOfEnumElements() > 0) return false;
5889  if (curr != this && enum_length != 0) return false;
5890  }
5891  return true;
5892 }
5893 
5894 
5895 static bool FilterKey(Object* key, PropertyAttributes filter) {
5896  if ((filter & SYMBOLIC) && key->IsSymbol()) {
5897  return true;
5898  }
5899 
5900  if ((filter & PRIVATE_SYMBOL) &&
5901  key->IsSymbol() && Symbol::cast(key)->is_private()) {
5902  return true;
5903  }
5904 
5905  if ((filter & STRING) && !key->IsSymbol()) {
5906  return true;
5907  }
5908 
5909  return false;
5910 }
5911 
5912 
5914  PropertyAttributes filter) {
5915  int result = 0;
5916  DescriptorArray* descs = instance_descriptors();
5917  int limit = which == ALL_DESCRIPTORS
5918  ? descs->number_of_descriptors()
5920  for (int i = 0; i < limit; i++) {
5921  if ((descs->GetDetails(i).attributes() & filter) == 0 &&
5922  !FilterKey(descs->GetKey(i), filter)) {
5923  result++;
5924  }
5925  }
5926  return result;
5927 }
5928 
5929 
5931  int max_index = -1;
5932  int number_of_own_descriptors = NumberOfOwnDescriptors();
5933  DescriptorArray* descs = instance_descriptors();
5934  for (int i = 0; i < number_of_own_descriptors; i++) {
5935  if (descs->GetType(i) == FIELD) {
5936  int current_index = descs->GetFieldIndex(i);
5937  if (current_index > max_index) max_index = current_index;
5938  }
5939  }
5940  return max_index + 1;
5941 }
5942 
5943 
5945  DescriptorArray* descs = instance_descriptors();
5946  int number_of_own_descriptors = NumberOfOwnDescriptors();
5947  for (int i = 0; i < number_of_own_descriptors; i++) {
5948  if (descs->GetType(i) == CALLBACKS && name->Equals(descs->GetKey(i))) {
5949  return descs->GetCallbacks(i);
5950  }
5951  }
5952  return NULL;
5953 }
5954 
5955 
5957  Name* name, LookupResult* result, bool search_hidden_prototypes) {
5958  ASSERT(name->IsName());
5959 
5960  Heap* heap = GetHeap();
5961 
5962  if (IsJSGlobalProxy()) {
5963  Object* proto = GetPrototype();
5964  if (proto->IsNull()) return result->NotFound();
5965  ASSERT(proto->IsJSGlobalObject());
5966  return JSReceiver::cast(proto)->LocalLookup(
5967  name, result, search_hidden_prototypes);
5968  }
5969 
5970  if (IsJSProxy()) {
5971  result->HandlerResult(JSProxy::cast(this));
5972  return;
5973  }
5974 
5975  // Do not use inline caching if the object is a non-global object
5976  // that requires access checks.
5977  if (IsAccessCheckNeeded()) {
5978  result->DisallowCaching();
5979  }
5980 
5981  JSObject* js_object = JSObject::cast(this);
5982 
5983  // Check for lookup interceptor except when bootstrapping.
5984  if (js_object->HasNamedInterceptor() &&
5985  !heap->isolate()->bootstrapper()->IsActive()) {
5986  result->InterceptorResult(js_object);
5987  return;
5988  }
5989 
5990  js_object->LocalLookupRealNamedProperty(name, result);
5991  if (result->IsFound() || !search_hidden_prototypes) return;
5992 
5993  Object* proto = js_object->GetPrototype();
5994  if (!proto->IsJSReceiver()) return;
5995  JSReceiver* receiver = JSReceiver::cast(proto);
5996  if (receiver->map()->is_hidden_prototype()) {
5997  receiver->LocalLookup(name, result, search_hidden_prototypes);
5998  }
5999 }
6000 
6001 
6002 void JSReceiver::Lookup(Name* name, LookupResult* result) {
6003  // Ecma-262 3rd 8.6.2.4
6004  Heap* heap = GetHeap();
6005  for (Object* current = this;
6006  current != heap->null_value();
6007  current = JSObject::cast(current)->GetPrototype()) {
6008  JSReceiver::cast(current)->LocalLookup(name, result, false);
6009  if (result->IsFound()) return;
6010  }
6011  result->NotFound();
6012 }
6013 
6014 
6015 // Search object and its prototype chain for callback properties.
6016 void JSObject::LookupCallbackProperty(Name* name, LookupResult* result) {
6017  Heap* heap = GetHeap();
6018  for (Object* current = this;
6019  current != heap->null_value() && current->IsJSObject();
6020  current = JSObject::cast(current)->GetPrototype()) {
6021  JSObject::cast(current)->LocalLookupRealNamedProperty(name, result);
6022  if (result->IsPropertyCallbacks()) return;
6023  }
6024  result->NotFound();
6025 }
6026 
6027 
6028 // Try to update an accessor in an elements dictionary. Return true if the
6029 // update succeeded, and false otherwise.
6030 static bool UpdateGetterSetterInDictionary(
6031  SeededNumberDictionary* dictionary,
6032  uint32_t index,
6033  Object* getter,
6034  Object* setter,
6035  PropertyAttributes attributes) {
6036  int entry = dictionary->FindEntry(index);
6037  if (entry != SeededNumberDictionary::kNotFound) {
6038  Object* result = dictionary->ValueAt(entry);
6039  PropertyDetails details = dictionary->DetailsAt(entry);
6040  if (details.type() == CALLBACKS && result->IsAccessorPair()) {
6041  ASSERT(!details.IsDontDelete());
6042  if (details.attributes() != attributes) {
6043  dictionary->DetailsAtPut(
6044  entry,
6045  PropertyDetails(attributes, CALLBACKS, index));
6046  }
6047  AccessorPair::cast(result)->SetComponents(getter, setter);
6048  return true;
6049  }
6050  }
6051  return false;
6052 }
6053 
6054 
6055 void JSObject::DefineElementAccessor(Handle<JSObject> object,
6056  uint32_t index,
6057  Handle<Object> getter,
6058  Handle<Object> setter,
6059  PropertyAttributes attributes,
6060  v8::AccessControl access_control) {
6061  switch (object->GetElementsKind()) {
6062  case FAST_SMI_ELEMENTS:
6063  case FAST_ELEMENTS:
6064  case FAST_DOUBLE_ELEMENTS:
6066  case FAST_HOLEY_ELEMENTS:
6068  break;
6069 
6070 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
6071  case EXTERNAL_##TYPE##_ELEMENTS: \
6072  case TYPE##_ELEMENTS: \
6073 
6075 #undef TYPED_ARRAY_CASE
6076  // Ignore getters and setters on pixel and external array elements.
6077  return;
6078 
6079  case DICTIONARY_ELEMENTS:
6080  if (UpdateGetterSetterInDictionary(object->element_dictionary(),
6081  index,
6082  *getter,
6083  *setter,
6084  attributes)) {
6085  return;
6086  }
6087  break;
6089  // Ascertain whether we have read-only properties or an existing
6090  // getter/setter pair in an arguments elements dictionary backing
6091  // store.
6092  FixedArray* parameter_map = FixedArray::cast(object->elements());
6093  uint32_t length = parameter_map->length();
6094  Object* probe =
6095  index < (length - 2) ? parameter_map->get(index + 2) : NULL;
6096  if (probe == NULL || probe->IsTheHole()) {
6097  FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
6098  if (arguments->IsDictionary()) {
6099  SeededNumberDictionary* dictionary =
6100  SeededNumberDictionary::cast(arguments);
6101  if (UpdateGetterSetterInDictionary(dictionary,
6102  index,
6103  *getter,
6104  *setter,
6105  attributes)) {
6106  return;
6107  }
6108  }
6109  }
6110  break;
6111  }
6112  }
6113 
6114  Isolate* isolate = object->GetIsolate();
6115  Handle<AccessorPair> accessors = isolate->factory()->NewAccessorPair();
6116  accessors->SetComponents(*getter, *setter);
6117  accessors->set_access_flags(access_control);
6118 
6119  SetElementCallback(object, index, accessors, attributes);
6120 }
6121 
6122 
6123 Handle<AccessorPair> JSObject::CreateAccessorPairFor(Handle<JSObject> object,
6124  Handle<Name> name) {
6125  Isolate* isolate = object->GetIsolate();
6126  LookupResult result(isolate);
6127  object->LocalLookupRealNamedProperty(*name, &result);
6128  if (result.IsPropertyCallbacks()) {
6129  // Note that the result can actually have IsDontDelete() == true when we
6130  // e.g. have to fall back to the slow case while adding a setter after
6131  // successfully reusing a map transition for a getter. Nevertheless, this is
6132  // OK, because the assertion only holds for the whole addition of both
6133  // accessors, not for the addition of each part. See first comment in
6134  // DefinePropertyAccessor below.
6135  Object* obj = result.GetCallbackObject();
6136  if (obj->IsAccessorPair()) {
6137  return AccessorPair::Copy(handle(AccessorPair::cast(obj), isolate));
6138  }
6139  }
6140  return isolate->factory()->NewAccessorPair();
6141 }
6142 
6143 
6144 void JSObject::DefinePropertyAccessor(Handle<JSObject> object,
6145  Handle<Name> name,
6146  Handle<Object> getter,
6147  Handle<Object> setter,
6148  PropertyAttributes attributes,
6149  v8::AccessControl access_control) {
6150  // We could assert that the property is configurable here, but we would need
6151  // to do a lookup, which seems to be a bit of overkill.
6152  bool only_attribute_changes = getter->IsNull() && setter->IsNull();
6153  if (object->HasFastProperties() && !only_attribute_changes &&
6154  access_control == v8::DEFAULT &&
6155  (object->map()->NumberOfOwnDescriptors() <= kMaxNumberOfDescriptors)) {
6156  bool getterOk = getter->IsNull() ||
6157  DefineFastAccessor(object, name, ACCESSOR_GETTER, getter, attributes);
6158  bool setterOk = !getterOk || setter->IsNull() ||
6159  DefineFastAccessor(object, name, ACCESSOR_SETTER, setter, attributes);
6160  if (getterOk && setterOk) return;
6161  }
6162 
6163  Handle<AccessorPair> accessors = CreateAccessorPairFor(object, name);
6164  accessors->SetComponents(*getter, *setter);
6165  accessors->set_access_flags(access_control);
6166 
6167  SetPropertyCallback(object, name, accessors, attributes);
6168 }
6169 
6170 
6171 bool JSObject::CanSetCallback(Handle<JSObject> object, Handle<Name> name) {
6172  Isolate* isolate = object->GetIsolate();
6173  ASSERT(!object->IsAccessCheckNeeded() ||
6174  isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_SET));
6175 
6176  // Check if there is an API defined callback object which prohibits
6177  // callback overwriting in this object or its prototype chain.
6178  // This mechanism is needed for instance in a browser setting, where
6179  // certain accessors such as window.location should not be allowed
6180  // to be overwritten because allowing overwriting could potentially
6181  // cause security problems.
6182  LookupResult callback_result(isolate);
6183  object->LookupCallbackProperty(*name, &callback_result);
6184  if (callback_result.IsFound()) {
6185  Object* callback_obj = callback_result.GetCallbackObject();
6186  if (callback_obj->IsAccessorInfo()) {
6187  return !AccessorInfo::cast(callback_obj)->prohibits_overwriting();
6188  }
6189  if (callback_obj->IsAccessorPair()) {
6190  return !AccessorPair::cast(callback_obj)->prohibits_overwriting();
6191  }
6192  }
6193  return true;
6194 }
6195 
6196 
6198  Heap* heap = GetHeap();
6199 
6201  return false;
6202  }
6203 
6204  for (Object* prototype = this->prototype();
6205  prototype != heap->null_value();
6206  prototype = prototype->GetPrototype(GetIsolate())) {
6207  if (prototype->IsJSProxy()) {
6208  // Be conservative, don't walk into proxies.
6209  return true;
6210  }
6211 
6213  JSObject::cast(prototype)->map()->elements_kind())) {
6214  return true;
6215  }
6216  }
6217 
6218  return false;
6219 }
6220 
6221 
6222 void JSObject::SetElementCallback(Handle<JSObject> object,
6223  uint32_t index,
6224  Handle<Object> structure,
6225  PropertyAttributes attributes) {
6226  Heap* heap = object->GetHeap();
6227  PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
6228 
6229  // Normalize elements to make this operation simple.
6230  bool had_dictionary_elements = object->HasDictionaryElements();
6232  ASSERT(object->HasDictionaryElements() ||
6233  object->HasDictionaryArgumentsElements());
6234  // Update the dictionary with the new CALLBACKS property.
6235  dictionary = SeededNumberDictionary::Set(dictionary, index, structure,
6236  details);
6237  dictionary->set_requires_slow_elements();
6238 
6239  // Update the dictionary backing store on the object.
6240  if (object->elements()->map() == heap->sloppy_arguments_elements_map()) {
6241  // Also delete any parameter alias.
6242  //
6243  // TODO(kmillikin): when deleting the last parameter alias we could
6244  // switch to a direct backing store without the parameter map. This
6245  // would allow GC of the context.
6246  FixedArray* parameter_map = FixedArray::cast(object->elements());
6247  if (index < static_cast<uint32_t>(parameter_map->length()) - 2) {
6248  parameter_map->set(index + 2, heap->the_hole_value());
6249  }
6250  parameter_map->set(1, *dictionary);
6251  } else {
6252  object->set_elements(*dictionary);
6253 
6254  if (!had_dictionary_elements) {
6255  // KeyedStoreICs (at least the non-generic ones) need a reset.
6256  heap->ClearAllICsByKind(Code::KEYED_STORE_IC);
6257  }
6258  }
6259 }
6260 
6261 
6262 void JSObject::SetPropertyCallback(Handle<JSObject> object,
6263  Handle<Name> name,
6264  Handle<Object> structure,
6265  PropertyAttributes attributes) {
6266  // Normalize object to make this operation simple.
6268 
6269  // For the global object allocate a new map to invalidate the global inline
6270  // caches which have a global property cell reference directly in the code.
6271  if (object->IsGlobalObject()) {
6272  Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map()));
6273  ASSERT(new_map->is_dictionary_map());
6274  object->set_map(*new_map);
6275 
6276  // When running crankshaft, changing the map is not enough. We
6277  // need to deoptimize all functions that rely on this global
6278  // object.
6280  }
6281 
6282  // Update the dictionary with the new CALLBACKS property.
6283  PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
6284  SetNormalizedProperty(object, name, structure, details);
6285 }
6286 
6287 
6289  Handle<Name> name,
6290  Handle<Object> getter,
6291  Handle<Object> setter,
6292  PropertyAttributes attributes,
6293  v8::AccessControl access_control) {
6294  Isolate* isolate = object->GetIsolate();
6295  // Check access rights if needed.
6296  if (object->IsAccessCheckNeeded() &&
6297  !isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_SET)) {
6299  return;
6300  }
6301 
6302  if (object->IsJSGlobalProxy()) {
6303  Handle<Object> proto(object->GetPrototype(), isolate);
6304  if (proto->IsNull()) return;
6305  ASSERT(proto->IsJSGlobalObject());
6307  name,
6308  getter,
6309  setter,
6310  attributes,
6311  access_control);
6312  return;
6313  }
6314 
6315  // Make sure that the top context does not change when doing callbacks or
6316  // interceptor calls.
6317  AssertNoContextChange ncc(isolate);
6318 
6319  // Try to flatten before operating on the string.
6320  if (name->IsString()) String::cast(*name)->TryFlatten();
6321 
6322  if (!JSObject::CanSetCallback(object, name)) return;
6323 
6324  uint32_t index = 0;
6325  bool is_element = name->AsArrayIndex(&index);
6326 
6327  Handle<Object> old_value = isolate->factory()->the_hole_value();
6328  bool is_observed = object->map()->is_observed() &&
6329  *name != isolate->heap()->hidden_string();
6330  bool preexists = false;
6331  if (is_observed) {
6332  if (is_element) {
6333  preexists = HasLocalElement(object, index);
6334  if (preexists && object->GetLocalElementAccessorPair(index) == NULL) {
6335  old_value = Object::GetElementNoExceptionThrown(isolate, object, index);
6336  }
6337  } else {
6338  LookupResult lookup(isolate);
6339  object->LocalLookup(*name, &lookup, true);
6340  preexists = lookup.IsProperty();
6341  if (preexists && lookup.IsDataProperty()) {
6342  old_value = Object::GetProperty(object, name);
6343  CHECK_NOT_EMPTY_HANDLE(isolate, old_value);
6344  }
6345  }
6346  }
6347 
6348  if (is_element) {
6349  DefineElementAccessor(
6350  object, index, getter, setter, attributes, access_control);
6351  } else {
6352  DefinePropertyAccessor(
6353  object, name, getter, setter, attributes, access_control);
6354  }
6355 
6356  if (is_observed) {
6357  const char* type = preexists ? "reconfigure" : "add";
6358  EnqueueChangeRecord(object, type, name, old_value);
6359  }
6360 }
6361 
6362 
6363 static bool TryAccessorTransition(Handle<JSObject> self,
6364  Handle<Map> transitioned_map,
6365  int target_descriptor,
6366  AccessorComponent component,
6367  Handle<Object> accessor,
6368  PropertyAttributes attributes) {
6369  DescriptorArray* descs = transitioned_map->instance_descriptors();
6370  PropertyDetails details = descs->GetDetails(target_descriptor);
6371 
6372  // If the transition target was not callbacks, fall back to the slow case.
6373  if (details.type() != CALLBACKS) return false;
6374  Object* descriptor = descs->GetCallbacksObject(target_descriptor);
6375  if (!descriptor->IsAccessorPair()) return false;
6376 
6377  Object* target_accessor = AccessorPair::cast(descriptor)->get(component);
6378  PropertyAttributes target_attributes = details.attributes();
6379 
6380  // Reuse transition if adding same accessor with same attributes.
6381  if (target_accessor == *accessor && target_attributes == attributes) {
6382  JSObject::MigrateToMap(self, transitioned_map);
6383  return true;
6384  }
6385 
6386  // If either not the same accessor, or not the same attributes, fall back to
6387  // the slow case.
6388  return false;
6389 }
6390 
6391 
6392 static MaybeObject* CopyInsertDescriptor(Map* map,
6393  Name* name,
6394  AccessorPair* accessors,
6395  PropertyAttributes attributes) {
6396  CallbacksDescriptor new_accessors_desc(name, accessors, attributes);
6397  return map->CopyInsertDescriptor(&new_accessors_desc, INSERT_TRANSITION);
6398 }
6399 
6400 
6401 static Handle<Map> CopyInsertDescriptor(Handle<Map> map,
6402  Handle<Name> name,
6403  Handle<AccessorPair> accessors,
6404  PropertyAttributes attributes) {
6405  CALL_HEAP_FUNCTION(map->GetIsolate(),
6406  CopyInsertDescriptor(*map, *name, *accessors, attributes),
6407  Map);
6408 }
6409 
6410 
6411 bool JSObject::DefineFastAccessor(Handle<JSObject> object,
6412  Handle<Name> name,
6413  AccessorComponent component,
6414  Handle<Object> accessor,
6415  PropertyAttributes attributes) {
6416  ASSERT(accessor->IsSpecFunction() || accessor->IsUndefined());
6417  Isolate* isolate = object->GetIsolate();
6418  LookupResult result(isolate);
6419  object->LocalLookup(*name, &result);
6420 
6421  if (result.IsFound() && !result.IsPropertyCallbacks()) {
6422  return false;
6423  }
6424 
6425  // Return success if the same accessor with the same attributes already exist.
6426  AccessorPair* source_accessors = NULL;
6427  if (result.IsPropertyCallbacks()) {
6428  Object* callback_value = result.GetCallbackObject();
6429  if (callback_value->IsAccessorPair()) {
6430  source_accessors = AccessorPair::cast(callback_value);
6431  Object* entry = source_accessors->get(component);
6432  if (entry == *accessor && result.GetAttributes() == attributes) {
6433  return true;
6434  }
6435  } else {
6436  return false;
6437  }
6438 
6439  int descriptor_number = result.GetDescriptorIndex();
6440 
6441  object->map()->LookupTransition(*object, *name, &result);
6442 
6443  if (result.IsFound()) {
6444  Handle<Map> target(result.GetTransitionTarget());
6445  ASSERT(target->NumberOfOwnDescriptors() ==
6446  object->map()->NumberOfOwnDescriptors());
6447  // This works since descriptors are sorted in order of addition.
6448  ASSERT(object->map()->instance_descriptors()->
6449  GetKey(descriptor_number) == *name);
6450  return TryAccessorTransition(object, target, descriptor_number,
6451  component, accessor, attributes);
6452  }
6453  } else {
6454  // If not, lookup a transition.
6455  object->map()->LookupTransition(*object, *name, &result);
6456 
6457  // If there is a transition, try to follow it.
6458  if (result.IsFound()) {
6459  Handle<Map> target(result.GetTransitionTarget());
6460  int descriptor_number = target->LastAdded();
6461  ASSERT(target->instance_descriptors()->GetKey(descriptor_number)
6462  ->Equals(*name));
6463  return TryAccessorTransition(object, target, descriptor_number,
6464  component, accessor, attributes);
6465  }
6466  }
6467 
6468  // If there is no transition yet, add a transition to the a new accessor pair
6469  // containing the accessor. Allocate a new pair if there were no source
6470  // accessors. Otherwise, copy the pair and modify the accessor.
6471  Handle<AccessorPair> accessors = source_accessors != NULL
6472  ? AccessorPair::Copy(Handle<AccessorPair>(source_accessors))
6473  : isolate->factory()->NewAccessorPair();
6474  accessors->set(component, *accessor);
6475  Handle<Map> new_map = CopyInsertDescriptor(Handle<Map>(object->map()),
6476  name, accessors, attributes);
6477  JSObject::MigrateToMap(object, new_map);
6478  return true;
6479 }
6480 
6481 
6483  Handle<AccessorInfo> info) {
6484  Isolate* isolate = object->GetIsolate();
6485  Factory* factory = isolate->factory();
6486  Handle<Name> name(Name::cast(info->name()));
6487 
6488  // Check access rights if needed.
6489  if (object->IsAccessCheckNeeded() &&
6490  !isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_SET)) {
6493  return factory->undefined_value();
6494  }
6495 
6496  if (object->IsJSGlobalProxy()) {
6497  Handle<Object> proto(object->GetPrototype(), isolate);
6498  if (proto->IsNull()) return object;
6499  ASSERT(proto->IsJSGlobalObject());
6500  return SetAccessor(Handle<JSObject>::cast(proto), info);
6501  }
6502 
6503  // Make sure that the top context does not change when doing callbacks or
6504  // interceptor calls.
6505  AssertNoContextChange ncc(isolate);
6506 
6507  // Try to flatten before operating on the string.
6508  if (name->IsString()) FlattenString(Handle<String>::cast(name));
6509 
6510  if (!JSObject::CanSetCallback(object, name)) {
6511  return factory->undefined_value();
6512  }
6513 
6514  uint32_t index = 0;
6515  bool is_element = name->AsArrayIndex(&index);
6516 
6517  if (is_element) {
6518  if (object->IsJSArray()) return factory->undefined_value();
6519 
6520  // Accessors overwrite previous callbacks (cf. with getters/setters).
6521  switch (object->GetElementsKind()) {
6522  case FAST_SMI_ELEMENTS:
6523  case FAST_ELEMENTS:
6524  case FAST_DOUBLE_ELEMENTS:
6526  case FAST_HOLEY_ELEMENTS:
6528  break;
6529 
6530 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
6531  case EXTERNAL_##TYPE##_ELEMENTS: \
6532  case TYPE##_ELEMENTS: \
6533 
6535 #undef TYPED_ARRAY_CASE
6536  // Ignore getters and setters on pixel and external array
6537  // elements.
6538  return factory->undefined_value();
6539 
6540  case DICTIONARY_ELEMENTS:
6541  break;
6543  UNIMPLEMENTED();
6544  break;
6545  }
6546 
6547  SetElementCallback(object, index, info, info->property_attributes());
6548  } else {
6549  // Lookup the name.
6550  LookupResult result(isolate);
6551  object->LocalLookup(*name, &result, true);
6552  // ES5 forbids turning a property into an accessor if it's not
6553  // configurable (that is IsDontDelete in ES3 and v8), see 8.6.1 (Table 5).
6554  if (result.IsFound() && (result.IsReadOnly() || result.IsDontDelete())) {
6555  return factory->undefined_value();
6556  }
6557 
6558  SetPropertyCallback(object, name, info, info->property_attributes());
6559  }
6560 
6561  return object;
6562 }
6563 
6564 
6566  Handle<Name> name,
6567  AccessorComponent component) {
6568  Isolate* isolate = object->GetIsolate();
6569 
6570  // Make sure that the top context does not change when doing callbacks or
6571  // interceptor calls.
6572  AssertNoContextChange ncc(isolate);
6573 
6574  // Check access rights if needed.
6575  if (object->IsAccessCheckNeeded() &&
6576  !isolate->MayNamedAccessWrapper(object, name, v8::ACCESS_HAS)) {
6579  return isolate->factory()->undefined_value();
6580  }
6581 
6582  // Make the lookup and include prototypes.
6583  uint32_t index = 0;
6584  if (name->AsArrayIndex(&index)) {
6585  for (Handle<Object> obj = object;
6586  !obj->IsNull();
6587  obj = handle(JSReceiver::cast(*obj)->GetPrototype(), isolate)) {
6588  if (obj->IsJSObject() && JSObject::cast(*obj)->HasDictionaryElements()) {
6589  JSObject* js_object = JSObject::cast(*obj);
6590  SeededNumberDictionary* dictionary = js_object->element_dictionary();
6591  int entry = dictionary->FindEntry(index);
6592  if (entry != SeededNumberDictionary::kNotFound) {
6593  Object* element = dictionary->ValueAt(entry);
6594  if (dictionary->DetailsAt(entry).type() == CALLBACKS &&
6595  element->IsAccessorPair()) {
6596  return handle(AccessorPair::cast(element)->GetComponent(component),
6597  isolate);
6598  }
6599  }
6600  }
6601  }
6602  } else {
6603  for (Handle<Object> obj = object;
6604  !obj->IsNull();
6605  obj = handle(JSReceiver::cast(*obj)->GetPrototype(), isolate)) {
6606  LookupResult result(isolate);
6607  JSReceiver::cast(*obj)->LocalLookup(*name, &result);
6608  if (result.IsFound()) {
6609  if (result.IsReadOnly()) return isolate->factory()->undefined_value();
6610  if (result.IsPropertyCallbacks()) {
6611  Object* obj = result.GetCallbackObject();
6612  if (obj->IsAccessorPair()) {
6613  return handle(AccessorPair::cast(obj)->GetComponent(component),
6614  isolate);
6615  }
6616  }
6617  }
6618  }
6619  }
6620  return isolate->factory()->undefined_value();
6621 }
6622 
6623 
6625  if (HasFastProperties()) {
6626  int number_of_own_descriptors = map()->NumberOfOwnDescriptors();
6627  DescriptorArray* descs = map()->instance_descriptors();
6628  for (int i = 0; i < number_of_own_descriptors; i++) {
6629  if (descs->GetType(i) == FIELD) {
6630  Object* property = RawFastPropertyAt(descs->GetFieldIndex(i));
6631  if (descs->GetDetails(i).representation().IsDouble()) {
6632  ASSERT(property->IsHeapNumber());
6633  if (value->IsNumber() && property->Number() == value->Number()) {
6634  return descs->GetKey(i);
6635  }
6636  } else if (property == value) {
6637  return descs->GetKey(i);
6638  }
6639  } else if (descs->GetType(i) == CONSTANT) {
6640  if (descs->GetConstant(i) == value) {
6641  return descs->GetKey(i);
6642  }
6643  }
6644  }
6645  return GetHeap()->undefined_value();
6646  } else {
6647  return property_dictionary()->SlowReverseLookup(value);
6648  }
6649 }
6650 
6651 
6653  int instance_size) {
6654  CALL_HEAP_FUNCTION(map->GetIsolate(),
6655  map->RawCopy(instance_size),
6656  Map);
6657 }
6658 
6659 
6660 MaybeObject* Map::RawCopy(int instance_size) {
6661  Map* result;
6662  MaybeObject* maybe_result =
6663  GetHeap()->AllocateMap(instance_type(), instance_size);
6664  if (!maybe_result->To(&result)) return maybe_result;
6665 
6666  result->set_prototype(prototype());
6667  result->set_constructor(constructor());
6668  result->set_bit_field(bit_field());
6669  result->set_bit_field2(bit_field2());
6670  int new_bit_field3 = bit_field3();
6671  new_bit_field3 = OwnsDescriptors::update(new_bit_field3, true);
6672  new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0);
6673  new_bit_field3 = EnumLengthBits::update(new_bit_field3,
6674  kInvalidEnumCacheSentinel);
6675  new_bit_field3 = Deprecated::update(new_bit_field3, false);
6676  if (!is_dictionary_map()) {
6677  new_bit_field3 = IsUnstable::update(new_bit_field3, false);
6678  }
6679  result->set_bit_field3(new_bit_field3);
6680  return result;
6681 }
6682 
6683 
6686  NormalizedMapSharingMode sharing) {
6687  int new_instance_size = map->instance_size();
6688  if (mode == CLEAR_INOBJECT_PROPERTIES) {
6689  new_instance_size -= map->inobject_properties() * kPointerSize;
6690  }
6691 
6692  Handle<Map> result = Map::RawCopy(map, new_instance_size);
6693 
6694  if (mode != CLEAR_INOBJECT_PROPERTIES) {
6695  result->set_inobject_properties(map->inobject_properties());
6696  }
6697 
6698  result->set_is_shared(sharing == SHARED_NORMALIZED_MAP);
6699  result->set_dictionary_map(true);
6700  result->set_migration_target(false);
6701 
6702 #ifdef VERIFY_HEAP
6703  if (FLAG_verify_heap && result->is_shared()) {
6704  result->SharedMapVerify();
6705  }
6706 #endif
6707 
6708  return result;
6709 }
6710 
6711 
6713  CALL_HEAP_FUNCTION(map->GetIsolate(), map->CopyDropDescriptors(), Map);
6714 }
6715 
6716 
6718  Map* result;
6719  MaybeObject* maybe_result = RawCopy(instance_size());
6720  if (!maybe_result->To(&result)) return maybe_result;
6721 
6722  // Please note instance_type and instance_size are set when allocated.
6725 
6727  result->set_is_shared(false);
6728  result->ClearCodeCache(GetHeap());
6730  return result;
6731 }
6732 
6733 
6734 MaybeObject* Map::ShareDescriptor(DescriptorArray* descriptors,
6735  Descriptor* descriptor) {
6736  // Sanity check. This path is only to be taken if the map owns its descriptor
6737  // array, implying that its NumberOfOwnDescriptors equals the number of
6738  // descriptors in the descriptor array.
6740  instance_descriptors()->number_of_descriptors());
6741  Map* result;
6742  MaybeObject* maybe_result = CopyDropDescriptors();
6743  if (!maybe_result->To(&result)) return maybe_result;
6744 
6745  Name* name = descriptor->GetKey();
6746 
6747  TransitionArray* transitions;
6748  MaybeObject* maybe_transitions =
6749  AddTransition(name, result, SIMPLE_TRANSITION);
6750  if (!maybe_transitions->To(&transitions)) return maybe_transitions;
6751 
6752  int old_size = descriptors->number_of_descriptors();
6753 
6754  DescriptorArray* new_descriptors;
6755 
6756  if (descriptors->NumberOfSlackDescriptors() > 0) {
6757  new_descriptors = descriptors;
6758  new_descriptors->Append(descriptor);
6759  } else {
6760  // Descriptor arrays grow by 50%.
6761  MaybeObject* maybe_descriptors = DescriptorArray::Allocate(
6762  GetIsolate(), old_size, old_size < 4 ? 1 : old_size / 2);
6763  if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
6764 
6765  DescriptorArray::WhitenessWitness witness(new_descriptors);
6766 
6767  // Copy the descriptors, inserting a descriptor.
6768  for (int i = 0; i < old_size; ++i) {
6769  new_descriptors->CopyFrom(i, descriptors, i, witness);
6770  }
6771 
6772  new_descriptors->Append(descriptor, witness);
6773 
6774  if (old_size > 0) {
6775  // If the source descriptors had an enum cache we copy it. This ensures
6776  // that the maps to which we push the new descriptor array back can rely
6777  // on a cache always being available once it is set. If the map has more
6778  // enumerated descriptors than available in the original cache, the cache
6779  // will be lazily replaced by the extended cache when needed.
6780  if (descriptors->HasEnumCache()) {
6781  new_descriptors->CopyEnumCacheFrom(descriptors);
6782  }
6783 
6784  Map* map;
6785  // Replace descriptors by new_descriptors in all maps that share it.
6786 
6787  GetHeap()->incremental_marking()->RecordWrites(descriptors);
6788  for (Object* current = GetBackPointer();
6789  !current->IsUndefined();
6790  current = map->GetBackPointer()) {
6791  map = Map::cast(current);
6792  if (map->instance_descriptors() != descriptors) break;
6793  map->set_instance_descriptors(new_descriptors);
6794  }
6795 
6796  set_instance_descriptors(new_descriptors);
6797  }
6798  }
6799 
6800  result->SetBackPointer(this);
6801  result->InitializeDescriptors(new_descriptors);
6803 
6804  set_transitions(transitions);
6805  set_owns_descriptors(false);
6806 
6807  return result;
6808 }
6809 
6810 
6812  Handle<DescriptorArray> descriptors,
6813  TransitionFlag flag,
6814  Handle<Name> name) {
6815  CALL_HEAP_FUNCTION(map->GetIsolate(),
6816  map->CopyReplaceDescriptors(*descriptors, flag, *name),
6817  Map);
6818 }
6819 
6820 
6822  TransitionFlag flag,
6823  Name* name,
6824  SimpleTransitionFlag simple_flag) {
6825  ASSERT(descriptors->IsSortedNoDuplicates());
6826 
6827  Map* result;
6828  MaybeObject* maybe_result = CopyDropDescriptors();
6829  if (!maybe_result->To(&result)) return maybe_result;
6830 
6831  result->InitializeDescriptors(descriptors);
6832 
6833  if (flag == INSERT_TRANSITION && CanHaveMoreTransitions()) {
6834  TransitionArray* transitions;
6835  MaybeObject* maybe_transitions = AddTransition(name, result, simple_flag);
6836  if (!maybe_transitions->To(&transitions)) return maybe_transitions;
6837  set_transitions(transitions);
6838  result->SetBackPointer(this);
6839  } else {
6841  }
6842 
6843  return result;
6844 }
6845 
6846 
6847 // Since this method is used to rewrite an existing transition tree, it can
6848 // always insert transitions without checking.
6850  int new_descriptor,
6851  Handle<DescriptorArray> descriptors) {
6852  ASSERT(descriptors->IsSortedNoDuplicates());
6853 
6854  Handle<Map> result = Map::CopyDropDescriptors(map);
6855 
6856  result->InitializeDescriptors(*descriptors);
6857  result->SetNumberOfOwnDescriptors(new_descriptor + 1);
6858 
6859  int unused_property_fields = map->unused_property_fields();
6860  if (descriptors->GetDetails(new_descriptor).type() == FIELD) {
6861  unused_property_fields = map->unused_property_fields() - 1;
6862  if (unused_property_fields < 0) {
6863  unused_property_fields += JSObject::kFieldsAdded;
6864  }
6865  }
6866 
6867  result->set_unused_property_fields(unused_property_fields);
6868  result->set_owns_descriptors(false);
6869 
6870  Handle<Name> name = handle(descriptors->GetKey(new_descriptor));
6871  Handle<TransitionArray> transitions = Map::AddTransition(map, name, result,
6873 
6874  map->set_transitions(*transitions);
6875  result->SetBackPointer(*map);
6876 
6877  return result;
6878 }
6879 
6880 
6882  if (flag == INSERT_TRANSITION) {
6887  (kind == DICTIONARY_ELEMENTS ||
6888  IsExternalArrayElementsKind(kind))));
6889  ASSERT(!IsFastElementsKind(kind) ||
6891  ASSERT(kind != elements_kind());
6892  }
6893 
6894  bool insert_transition =
6896 
6897  if (insert_transition && owns_descriptors()) {
6898  // In case the map owned its own descriptors, share the descriptors and
6899  // transfer ownership to the new map.
6900  Map* new_map;
6901  MaybeObject* maybe_new_map = CopyDropDescriptors();
6902  if (!maybe_new_map->To(&new_map)) return maybe_new_map;
6903 
6904  MaybeObject* added_elements = set_elements_transition_map(new_map);
6905  if (added_elements->IsFailure()) return added_elements;
6906 
6907  new_map->set_elements_kind(kind);
6908  new_map->InitializeDescriptors(instance_descriptors());
6909  new_map->SetBackPointer(this);
6910  set_owns_descriptors(false);
6911  return new_map;
6912  }
6913 
6914  // In case the map did not own its own descriptors, a split is forced by
6915  // copying the map; creating a new descriptor array cell.
6916  // Create a new free-floating map only if we are not allowed to store it.
6917  Map* new_map;
6918  MaybeObject* maybe_new_map = Copy();
6919  if (!maybe_new_map->To(&new_map)) return maybe_new_map;
6920 
6921  new_map->set_elements_kind(kind);
6922 
6923  if (insert_transition) {
6924  MaybeObject* added_elements = set_elements_transition_map(new_map);
6925  if (added_elements->IsFailure()) return added_elements;
6926  new_map->SetBackPointer(this);
6927  }
6928 
6929  return new_map;
6930 }
6931 
6932 
6934  ASSERT(!map->is_observed());
6935 
6936  Isolate* isolate = map->GetIsolate();
6937 
6938  // In case the map owned its own descriptors, share the descriptors and
6939  // transfer ownership to the new map.
6940  Handle<Map> new_map;
6941  if (map->owns_descriptors()) {
6942  new_map = Map::CopyDropDescriptors(map);
6943  } else {
6944  new_map = Map::Copy(map);
6945  }
6946 
6947  Handle<TransitionArray> transitions =
6948  Map::AddTransition(map, isolate->factory()->observed_symbol(), new_map,
6949  FULL_TRANSITION);
6950 
6951  map->set_transitions(*transitions);
6952 
6953  new_map->set_is_observed();
6954 
6955  if (map->owns_descriptors()) {
6956  new_map->InitializeDescriptors(map->instance_descriptors());
6957  map->set_owns_descriptors(false);
6958  }
6959 
6960  new_map->SetBackPointer(*map);
6961  return new_map;
6962 }
6963 
6964 
6967 
6968  // If the map has pre-allocated properties always start out with a descriptor
6969  // array describing these properties.
6970  ASSERT(constructor()->IsJSFunction());
6971  JSFunction* ctor = JSFunction::cast(constructor());
6972  Map* map = ctor->initial_map();
6973  DescriptorArray* descriptors = map->instance_descriptors();
6974 
6975  int number_of_own_descriptors = map->NumberOfOwnDescriptors();
6976  DescriptorArray* new_descriptors;
6977  MaybeObject* maybe_descriptors =
6978  descriptors->CopyUpTo(number_of_own_descriptors);
6979  if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
6980 
6981  return CopyReplaceDescriptors(new_descriptors, OMIT_TRANSITION);
6982 }
6983 
6984 
6986  CALL_HEAP_FUNCTION(map->GetIsolate(), map->Copy(), Map);
6987 }
6988 
6989 
6990 MaybeObject* Map::Copy() {
6991  DescriptorArray* descriptors = instance_descriptors();
6992  DescriptorArray* new_descriptors;
6993  int number_of_own_descriptors = NumberOfOwnDescriptors();
6994  MaybeObject* maybe_descriptors =
6995  descriptors->CopyUpTo(number_of_own_descriptors);
6996  if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
6997 
6998  return CopyReplaceDescriptors(new_descriptors, OMIT_TRANSITION);
6999 }
7000 
7001 
7002 MaybeObject* Map::CopyAddDescriptor(Descriptor* descriptor,
7003  TransitionFlag flag) {
7004  DescriptorArray* descriptors = instance_descriptors();
7005 
7006  // Ensure the key is unique.
7007  MaybeObject* maybe_failure = descriptor->KeyToUniqueName();
7008  if (maybe_failure->IsFailure()) return maybe_failure;
7009 
7010  int old_size = NumberOfOwnDescriptors();
7011  int new_size = old_size + 1;
7012 
7013  if (flag == INSERT_TRANSITION &&
7014  owns_descriptors() &&
7016  return ShareDescriptor(descriptors, descriptor);
7017  }
7018 
7019  DescriptorArray* new_descriptors;
7020  MaybeObject* maybe_descriptors =
7021  DescriptorArray::Allocate(GetIsolate(), old_size, 1);
7022  if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
7023 
7024  DescriptorArray::WhitenessWitness witness(new_descriptors);
7025 
7026  // Copy the descriptors, inserting a descriptor.
7027  for (int i = 0; i < old_size; ++i) {
7028  new_descriptors->CopyFrom(i, descriptors, i, witness);
7029  }
7030 
7031  if (old_size != descriptors->number_of_descriptors()) {
7032  new_descriptors->SetNumberOfDescriptors(new_size);
7033  new_descriptors->Set(old_size, descriptor, witness);
7034  new_descriptors->Sort();
7035  } else {
7036  new_descriptors->Append(descriptor, witness);
7037  }
7038 
7039  Name* key = descriptor->GetKey();
7040  return CopyReplaceDescriptors(new_descriptors, flag, key, SIMPLE_TRANSITION);
7041 }
7042 
7043 
7044 MaybeObject* Map::CopyInsertDescriptor(Descriptor* descriptor,
7045  TransitionFlag flag) {
7046  DescriptorArray* old_descriptors = instance_descriptors();
7047 
7048  // Ensure the key is unique.
7049  MaybeObject* maybe_result = descriptor->KeyToUniqueName();
7050  if (maybe_result->IsFailure()) return maybe_result;
7051 
7052  // We replace the key if it is already present.
7053  int index = old_descriptors->SearchWithCache(descriptor->GetKey(), this);
7054  if (index != DescriptorArray::kNotFound) {
7055  return CopyReplaceDescriptor(old_descriptors, descriptor, index, flag);
7056  }
7057  return CopyAddDescriptor(descriptor, flag);
7058 }
7059 
7060 
7063  int enumeration_index,
7064  PropertyAttributes attributes) {
7065  CALL_HEAP_FUNCTION(desc->GetIsolate(),
7066  desc->CopyUpToAddAttributes(enumeration_index, attributes),
7067  DescriptorArray);
7068 }
7069 
7070 
7072  int enumeration_index, PropertyAttributes attributes) {
7073  if (enumeration_index == 0) return GetHeap()->empty_descriptor_array();
7074 
7075  int size = enumeration_index;
7076 
7077  DescriptorArray* descriptors;
7078  MaybeObject* maybe_descriptors = Allocate(GetIsolate(), size);
7079  if (!maybe_descriptors->To(&descriptors)) return maybe_descriptors;
7080  DescriptorArray::WhitenessWitness witness(descriptors);
7081 
7082  if (attributes != NONE) {
7083  for (int i = 0; i < size; ++i) {
7084  Object* value = GetValue(i);
7085  PropertyDetails details = GetDetails(i);
7086  int mask = DONT_DELETE | DONT_ENUM;
7087  // READ_ONLY is an invalid attribute for JS setters/getters.
7088  if (details.type() != CALLBACKS || !value->IsAccessorPair()) {
7089  mask |= READ_ONLY;
7090  }
7091  details = details.CopyAddAttributes(
7092  static_cast<PropertyAttributes>(attributes & mask));
7093  Descriptor desc(GetKey(i), value, details);
7094  descriptors->Set(i, &desc, witness);
7095  }
7096  } else {
7097  for (int i = 0; i < size; ++i) {
7098  descriptors->CopyFrom(i, this, i, witness);
7099  }
7100  }
7101 
7102  if (number_of_descriptors() != enumeration_index) descriptors->Sort();
7103 
7104  return descriptors;
7105 }
7106 
7107 
7109  Descriptor* descriptor,
7110  int insertion_index,
7111  TransitionFlag flag) {
7112  // Ensure the key is unique.
7113  MaybeObject* maybe_failure = descriptor->KeyToUniqueName();
7114  if (maybe_failure->IsFailure()) return maybe_failure;
7115 
7116  Name* key = descriptor->GetKey();
7117  ASSERT(key == descriptors->GetKey(insertion_index));
7118 
7119  int new_size = NumberOfOwnDescriptors();
7120  ASSERT(0 <= insertion_index && insertion_index < new_size);
7121 
7122  ASSERT_LT(insertion_index, new_size);
7123 
7124  DescriptorArray* new_descriptors;
7125  MaybeObject* maybe_descriptors =
7127  if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
7128  DescriptorArray::WhitenessWitness witness(new_descriptors);
7129 
7130  for (int i = 0; i < new_size; ++i) {
7131  if (i == insertion_index) {
7132  new_descriptors->Set(i, descriptor, witness);
7133  } else {
7134  new_descriptors->CopyFrom(i, descriptors, i, witness);
7135  }
7136  }
7137 
7138  // Re-sort if descriptors were removed.
7139  if (new_size != descriptors->length()) new_descriptors->Sort();
7140 
7141  SimpleTransitionFlag simple_flag =
7142  (insertion_index == descriptors->number_of_descriptors() - 1)
7144  : FULL_TRANSITION;
7145  return CopyReplaceDescriptors(new_descriptors, flag, key, simple_flag);
7146 }
7147 
7148 
7150  Handle<Name> name,
7151  Handle<Code> code) {
7152  Isolate* isolate = map->GetIsolate();
7153  CALL_HEAP_FUNCTION_VOID(isolate,
7154  map->UpdateCodeCache(*name, *code));
7155 }
7156 
7157 
7158 MaybeObject* Map::UpdateCodeCache(Name* name, Code* code) {
7159  // Allocate the code cache if not present.
7160  if (code_cache()->IsFixedArray()) {
7161  Object* result;
7162  { MaybeObject* maybe_result = GetHeap()->AllocateCodeCache();
7163  if (!maybe_result->ToObject(&result)) return maybe_result;
7164  }
7165  set_code_cache(result);
7166  }
7167 
7168  // Update the code cache.
7169  return CodeCache::cast(code_cache())->Update(name, code);
7170 }
7171 
7172 
7174  // Do a lookup if a code cache exists.
7175  if (!code_cache()->IsFixedArray()) {
7176  return CodeCache::cast(code_cache())->Lookup(name, flags);
7177  } else {
7178  return GetHeap()->undefined_value();
7179  }
7180 }
7181 
7182 
7184  // Get the internal index if a code cache exists.
7185  if (!code_cache()->IsFixedArray()) {
7186  return CodeCache::cast(code_cache())->GetIndex(name, code);
7187  }
7188  return -1;
7189 }
7190 
7191 
7192 void Map::RemoveFromCodeCache(Name* name, Code* code, int index) {
7193  // No GC is supposed to happen between a call to IndexInCodeCache and
7194  // RemoveFromCodeCache so the code cache must be there.
7195  ASSERT(!code_cache()->IsFixedArray());
7196  CodeCache::cast(code_cache())->RemoveByIndex(name, code, index);
7197 }
7198 
7199 
7200 // An iterator over all map transitions in an descriptor array, reusing the map
7201 // field of the contens array while it is running.
7203  public:
7205  : transition_array_(transition_array) { }
7206 
7207  void Start() {
7208  ASSERT(!IsIterating());
7209  *TransitionArrayHeader() = Smi::FromInt(0);
7210  }
7211 
7212  bool IsIterating() {
7213  return (*TransitionArrayHeader())->IsSmi();
7214  }
7215 
7216  Map* Next() {
7217  ASSERT(IsIterating());
7218  int index = Smi::cast(*TransitionArrayHeader())->value();
7219  int number_of_transitions = transition_array_->number_of_transitions();
7220  while (index < number_of_transitions) {
7221  *TransitionArrayHeader() = Smi::FromInt(index + 1);
7222  return transition_array_->GetTarget(index);
7223  }
7224 
7225  *TransitionArrayHeader() = transition_array_->GetHeap()->fixed_array_map();
7226  return NULL;
7227  }
7228 
7229  private:
7230  Object** TransitionArrayHeader() {
7231  return HeapObject::RawField(transition_array_, TransitionArray::kMapOffset);
7232  }
7233 
7234  TransitionArray* transition_array_;
7235 };
7236 
7237 
7238 // An iterator over all prototype transitions, reusing the map field of the
7239 // underlying array while it is running.
7241  public:
7243  : proto_trans_(proto_trans) { }
7244 
7245  void Start() {
7246  ASSERT(!IsIterating());
7247  *Header() = Smi::FromInt(0);
7248  }
7249 
7250  bool IsIterating() {
7251  return (*Header())->IsSmi();
7252  }
7253 
7254  Map* Next() {
7255  ASSERT(IsIterating());
7256  int transitionNumber = Smi::cast(*Header())->value();
7257  if (transitionNumber < NumberOfTransitions()) {
7258  *Header() = Smi::FromInt(transitionNumber + 1);
7259  return GetTransition(transitionNumber);
7260  }
7261  *Header() = proto_trans_->GetHeap()->fixed_array_map();
7262  return NULL;
7263  }
7264 
7265  private:
7266  Object** Header() {
7267  return HeapObject::RawField(proto_trans_, FixedArray::kMapOffset);
7268  }
7269 
7270  int NumberOfTransitions() {
7271  FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
7272  Object* num = proto_trans->get(Map::kProtoTransitionNumberOfEntriesOffset);
7273  return Smi::cast(num)->value();
7274  }
7275 
7276  Map* GetTransition(int transitionNumber) {
7277  FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
7278  return Map::cast(proto_trans->get(IndexFor(transitionNumber)));
7279  }
7280 
7281  int IndexFor(int transitionNumber) {
7284  transitionNumber * Map::kProtoTransitionElementsPerEntry;
7285  }
7286 
7287  HeapObject* proto_trans_;
7288 };
7289 
7290 
7291 // To traverse the transition tree iteratively, we have to store two kinds of
7292 // information in a map: The parent map in the traversal and which children of a
7293 // node have already been visited. To do this without additional memory, we
7294 // temporarily reuse two maps with known values:
7295 //
7296 // (1) The map of the map temporarily holds the parent, and is restored to the
7297 // meta map afterwards.
7298 //
7299 // (2) The info which children have already been visited depends on which part
7300 // of the map we currently iterate:
7301 //
7302 // (a) If we currently follow normal map transitions, we temporarily store
7303 // the current index in the map of the FixedArray of the desciptor
7304 // array's contents, and restore it to the fixed array map afterwards.
7305 // Note that a single descriptor can have 0, 1, or 2 transitions.
7306 //
7307 // (b) If we currently follow prototype transitions, we temporarily store
7308 // the current index in the map of the FixedArray holding the prototype
7309 // transitions, and restore it to the fixed array map afterwards.
7310 //
7311 // Note that the child iterator is just a concatenation of two iterators: One
7312 // iterating over map transitions and one iterating over prototype transisitons.
7313 class TraversableMap : public Map {
7314  public:
7315  // Record the parent in the traversal within this map. Note that this destroys
7316  // this map's map!
7318 
7319  // Reset the current map's map, returning the parent previously stored in it.
7321  TraversableMap* old_parent = static_cast<TraversableMap*>(map());
7322  set_map_no_write_barrier(GetHeap()->meta_map());
7323  return old_parent;
7324  }
7325 
7326  // Start iterating over this map's children, possibly destroying a FixedArray
7327  // map (see explanation above).
7329  if (HasTransitionArray()) {
7330  if (HasPrototypeTransitions()) {
7332  }
7333 
7334  IntrusiveMapTransitionIterator(transitions()).Start();
7335  }
7336  }
7337 
7338  // If we have an unvisited child map, return that one and advance. If we have
7339  // none, return NULL and reset any destroyed FixedArray maps.
7341  TransitionArray* transition_array = unchecked_transition_array();
7342  if (!transition_array->map()->IsSmi() &&
7343  !transition_array->IsTransitionArray()) {
7344  return NULL;
7345  }
7346 
7347  if (transition_array->HasPrototypeTransitions()) {
7348  HeapObject* proto_transitions =
7349  transition_array->UncheckedPrototypeTransitions();
7350  IntrusivePrototypeTransitionIterator proto_iterator(proto_transitions);
7351  if (proto_iterator.IsIterating()) {
7352  Map* next = proto_iterator.Next();
7353  if (next != NULL) return static_cast<TraversableMap*>(next);
7354  }
7355  }
7356 
7357  IntrusiveMapTransitionIterator transition_iterator(transition_array);
7358  if (transition_iterator.IsIterating()) {
7359  Map* next = transition_iterator.Next();
7360  if (next != NULL) return static_cast<TraversableMap*>(next);
7361  }
7362 
7363  return NULL;
7364  }
7365 };
7366 
7367 
7368 // Traverse the transition tree in postorder without using the C++ stack by
7369 // doing pointer reversal.
7370 void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
7371  TraversableMap* current = static_cast<TraversableMap*>(this);
7372  current->ChildIteratorStart();
7373  while (true) {
7374  TraversableMap* child = current->ChildIteratorNext();
7375  if (child != NULL) {
7376  child->ChildIteratorStart();
7377  child->SetParent(current);
7378  current = child;
7379  } else {
7380  TraversableMap* parent = current->GetAndResetParent();
7381  callback(current, data);
7382  if (current == this) break;
7383  current = parent;
7384  }
7385  }
7386 }
7387 
7388 
7389 MaybeObject* CodeCache::Update(Name* name, Code* code) {
7390  // The number of monomorphic stubs for normal load/store/call IC's can grow to
7391  // a large number and therefore they need to go into a hash table. They are
7392  // used to load global properties from cells.
7393  if (code->type() == Code::NORMAL) {
7394  // Make sure that a hash table is allocated for the normal load code cache.
7395  if (normal_type_cache()->IsUndefined()) {
7396  Object* result;
7397  { MaybeObject* maybe_result =
7400  if (!maybe_result->ToObject(&result)) return maybe_result;
7401  }
7402  set_normal_type_cache(result);
7403  }
7404  return UpdateNormalTypeCache(name, code);
7405  } else {
7406  ASSERT(default_cache()->IsFixedArray());
7407  return UpdateDefaultCache(name, code);
7408  }
7409 }
7410 
7411 
7412 MaybeObject* CodeCache::UpdateDefaultCache(Name* name, Code* code) {
7413  // When updating the default code cache we disregard the type encoded in the
7414  // flags. This allows call constant stubs to overwrite call field
7415  // stubs, etc.
7417 
7418  // First check whether we can update existing code cache without
7419  // extending it.
7420  FixedArray* cache = default_cache();
7421  int length = cache->length();
7422  int deleted_index = -1;
7423  for (int i = 0; i < length; i += kCodeCacheEntrySize) {
7424  Object* key = cache->get(i);
7425  if (key->IsNull()) {
7426  if (deleted_index < 0) deleted_index = i;
7427  continue;
7428  }
7429  if (key->IsUndefined()) {
7430  if (deleted_index >= 0) i = deleted_index;
7431  cache->set(i + kCodeCacheEntryNameOffset, name);
7432  cache->set(i + kCodeCacheEntryCodeOffset, code);
7433  return this;
7434  }
7435  if (name->Equals(Name::cast(key))) {
7436  Code::Flags found =
7437  Code::cast(cache->get(i + kCodeCacheEntryCodeOffset))->flags();
7438  if (Code::RemoveTypeFromFlags(found) == flags) {
7439  cache->set(i + kCodeCacheEntryCodeOffset, code);
7440  return this;
7441  }
7442  }
7443  }
7444 
7445  // Reached the end of the code cache. If there were deleted
7446  // elements, reuse the space for the first of them.
7447  if (deleted_index >= 0) {
7448  cache->set(deleted_index + kCodeCacheEntryNameOffset, name);
7449  cache->set(deleted_index + kCodeCacheEntryCodeOffset, code);
7450  return this;
7451  }
7452 
7453  // Extend the code cache with some new entries (at least one). Must be a
7454  // multiple of the entry size.
7455  int new_length = length + ((length >> 1)) + kCodeCacheEntrySize;
7456  new_length = new_length - new_length % kCodeCacheEntrySize;
7457  ASSERT((new_length % kCodeCacheEntrySize) == 0);
7458  Object* result;
7459  { MaybeObject* maybe_result = cache->CopySize(new_length);
7460  if (!maybe_result->ToObject(&result)) return maybe_result;
7461  }
7462 
7463  // Add the (name, code) pair to the new cache.
7464  cache = FixedArray::cast(result);
7465  cache->set(length + kCodeCacheEntryNameOffset, name);
7466  cache->set(length + kCodeCacheEntryCodeOffset, code);
7467  set_default_cache(cache);
7468  return this;
7469 }
7470 
7471 
7472 MaybeObject* CodeCache::UpdateNormalTypeCache(Name* name, Code* code) {
7473  // Adding a new entry can cause a new cache to be allocated.
7474  CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7475  Object* new_cache;
7476  { MaybeObject* maybe_new_cache = cache->Put(name, code);
7477  if (!maybe_new_cache->ToObject(&new_cache)) return maybe_new_cache;
7478  }
7479  set_normal_type_cache(new_cache);
7480  return this;
7481 }
7482 
7483 
7485  Object* result = LookupDefaultCache(name, Code::RemoveTypeFromFlags(flags));
7486  if (result->IsCode()) {
7487  if (Code::cast(result)->flags() == flags) return result;
7488  return GetHeap()->undefined_value();
7489  }
7490  return LookupNormalTypeCache(name, flags);
7491 }
7492 
7493 
7494 Object* CodeCache::LookupDefaultCache(Name* name, Code::Flags flags) {
7495  FixedArray* cache = default_cache();
7496  int length = cache->length();
7497  for (int i = 0; i < length; i += kCodeCacheEntrySize) {
7498  Object* key = cache->get(i + kCodeCacheEntryNameOffset);
7499  // Skip deleted elements.
7500  if (key->IsNull()) continue;
7501  if (key->IsUndefined()) return key;
7502  if (name->Equals(Name::cast(key))) {
7503  Code* code = Code::cast(cache->get(i + kCodeCacheEntryCodeOffset));
7504  if (Code::RemoveTypeFromFlags(code->flags()) == flags) {
7505  return code;
7506  }
7507  }
7508  }
7509  return GetHeap()->undefined_value();
7510 }
7511 
7512 
7513 Object* CodeCache::LookupNormalTypeCache(Name* name, Code::Flags flags) {
7514  if (!normal_type_cache()->IsUndefined()) {
7515  CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7516  return cache->Lookup(name, flags);
7517  } else {
7518  return GetHeap()->undefined_value();
7519  }
7520 }
7521 
7522 
7523 int CodeCache::GetIndex(Object* name, Code* code) {
7524  if (code->type() == Code::NORMAL) {
7525  if (normal_type_cache()->IsUndefined()) return -1;
7526  CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7527  return cache->GetIndex(Name::cast(name), code->flags());
7528  }
7529 
7530  FixedArray* array = default_cache();
7531  int len = array->length();
7532  for (int i = 0; i < len; i += kCodeCacheEntrySize) {
7533  if (array->get(i + kCodeCacheEntryCodeOffset) == code) return i + 1;
7534  }
7535  return -1;
7536 }
7537 
7538 
7539 void CodeCache::RemoveByIndex(Object* name, Code* code, int index) {
7540  if (code->type() == Code::NORMAL) {
7541  ASSERT(!normal_type_cache()->IsUndefined());
7542  CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
7543  ASSERT(cache->GetIndex(Name::cast(name), code->flags()) == index);
7544  cache->RemoveByIndex(index);
7545  } else {
7546  FixedArray* array = default_cache();
7547  ASSERT(array->length() >= index && array->get(index)->IsCode());
7548  // Use null instead of undefined for deleted elements to distinguish
7549  // deleted elements from unused elements. This distinction is used
7550  // when looking up in the cache and when updating the cache.
7551  ASSERT_EQ(1, kCodeCacheEntryCodeOffset - kCodeCacheEntryNameOffset);
7552  array->set_null(index - 1); // Name.
7553  array->set_null(index); // Code.
7554  }
7555 }
7556 
7557 
7558 // The key in the code cache hash table consists of the property name and the
7559 // code object. The actual match is on the name and the code flags. If a key
7560 // is created using the flags and not a code object it can only be used for
7561 // lookup not to create a new entry.
7563  public:
7565  : name_(name), flags_(flags), code_(NULL) { }
7566 
7568  : name_(name), flags_(code->flags()), code_(code) { }
7569 
7570 
7571  bool IsMatch(Object* other) {
7572  if (!other->IsFixedArray()) return false;
7573  FixedArray* pair = FixedArray::cast(other);
7574  Name* name = Name::cast(pair->get(0));
7575  Code::Flags flags = Code::cast(pair->get(1))->flags();
7576  if (flags != flags_) {
7577  return false;
7578  }
7579  return name_->Equals(name);
7580  }
7581 
7582  static uint32_t NameFlagsHashHelper(Name* name, Code::Flags flags) {
7583  return name->Hash() ^ flags;
7584  }
7585 
7586  uint32_t Hash() { return NameFlagsHashHelper(name_, flags_); }
7587 
7588  uint32_t HashForObject(Object* obj) {
7589  FixedArray* pair = FixedArray::cast(obj);
7590  Name* name = Name::cast(pair->get(0));
7591  Code* code = Code::cast(pair->get(1));
7592  return NameFlagsHashHelper(name, code->flags());
7593  }
7594 
7595  MUST_USE_RESULT MaybeObject* AsObject(Heap* heap) {
7596  ASSERT(code_ != NULL);
7597  Object* obj;
7598  { MaybeObject* maybe_obj = heap->AllocateFixedArray(2);
7599  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7600  }
7601  FixedArray* pair = FixedArray::cast(obj);
7602  pair->set(0, name_);
7603  pair->set(1, code_);
7604  return pair;
7605  }
7606 
7607  private:
7608  Name* name_;
7609  Code::Flags flags_;
7610  // TODO(jkummerow): We should be able to get by without this.
7611  Code* code_;
7612 };
7613 
7614 
7616  CodeCacheHashTableKey key(name, flags);
7617  int entry = FindEntry(&key);
7618  if (entry == kNotFound) return GetHeap()->undefined_value();
7619  return get(EntryToIndex(entry) + 1);
7620 }
7621 
7622 
7623 MaybeObject* CodeCacheHashTable::Put(Name* name, Code* code) {
7624  CodeCacheHashTableKey key(name, code);
7625  Object* obj;
7626  { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
7627  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7628  }
7629 
7630  // Don't use |this|, as the table might have grown.
7631  CodeCacheHashTable* cache = reinterpret_cast<CodeCacheHashTable*>(obj);
7632 
7633  int entry = cache->FindInsertionEntry(key.Hash());
7634  Object* k;
7635  { MaybeObject* maybe_k = key.AsObject(GetHeap());
7636  if (!maybe_k->ToObject(&k)) return maybe_k;
7637  }
7638 
7639  cache->set(EntryToIndex(entry), k);
7640  cache->set(EntryToIndex(entry) + 1, code);
7641  cache->ElementAdded();
7642  return cache;
7643 }
7644 
7645 
7647  CodeCacheHashTableKey key(name, flags);
7648  int entry = FindEntry(&key);
7649  return (entry == kNotFound) ? -1 : entry;
7650 }
7651 
7652 
7654  ASSERT(index >= 0);
7655  Heap* heap = GetHeap();
7656  set(EntryToIndex(index), heap->the_hole_value());
7657  set(EntryToIndex(index) + 1, heap->the_hole_value());
7658  ElementRemoved();
7659 }
7660 
7661 
7663  MapHandleList* maps,
7664  Code::Flags flags,
7665  Handle<Code> code) {
7666  Isolate* isolate = cache->GetIsolate();
7667  CALL_HEAP_FUNCTION_VOID(isolate, cache->Update(maps, flags, *code));
7668 }
7669 
7670 
7672  Code::Flags flags,
7673  Code* code) {
7674  // Initialize cache if necessary.
7675  if (cache()->IsUndefined()) {
7676  Object* result;
7677  { MaybeObject* maybe_result =
7679  GetHeap(),
7681  if (!maybe_result->ToObject(&result)) return maybe_result;
7682  }
7683  set_cache(result);
7684  } else {
7685  // This entry shouldn't be contained in the cache yet.
7687  ->Lookup(maps, flags)->IsUndefined());
7688  }
7689  PolymorphicCodeCacheHashTable* hash_table =
7691  Object* new_cache;
7692  { MaybeObject* maybe_new_cache = hash_table->Put(maps, flags, code);
7693  if (!maybe_new_cache->ToObject(&new_cache)) return maybe_new_cache;
7694  }
7695  set_cache(new_cache);
7696  return this;
7697 }
7698 
7699 
7701  Code::Flags flags) {
7702  if (!cache()->IsUndefined()) {
7703  PolymorphicCodeCacheHashTable* hash_table =
7705  return Handle<Object>(hash_table->Lookup(maps, flags), GetIsolate());
7706  } else {
7707  return GetIsolate()->factory()->undefined_value();
7708  }
7709 }
7710 
7711 
7712 // Despite their name, object of this class are not stored in the actual
7713 // hash table; instead they're temporarily used for lookups. It is therefore
7714 // safe to have a weak (non-owning) pointer to a MapList as a member field.
7716  public:
7717  // Callers must ensure that |maps| outlives the newly constructed object.
7719  : maps_(maps),
7720  code_flags_(code_flags) {}
7721 
7722  bool IsMatch(Object* other) {
7723  MapHandleList other_maps(kDefaultListAllocationSize);
7724  int other_flags;
7725  FromObject(other, &other_flags, &other_maps);
7726  if (code_flags_ != other_flags) return false;
7727  if (maps_->length() != other_maps.length()) return false;
7728  // Compare just the hashes first because it's faster.
7729  int this_hash = MapsHashHelper(maps_, code_flags_);
7730  int other_hash = MapsHashHelper(&other_maps, other_flags);
7731  if (this_hash != other_hash) return false;
7732 
7733  // Full comparison: for each map in maps_, look for an equivalent map in
7734  // other_maps. This implementation is slow, but probably good enough for
7735  // now because the lists are short (<= 4 elements currently).
7736  for (int i = 0; i < maps_->length(); ++i) {
7737  bool match_found = false;
7738  for (int j = 0; j < other_maps.length(); ++j) {
7739  if (*(maps_->at(i)) == *(other_maps.at(j))) {
7740  match_found = true;
7741  break;
7742  }
7743  }
7744  if (!match_found) return false;
7745  }
7746  return true;
7747  }
7748 
7749  static uint32_t MapsHashHelper(MapHandleList* maps, int code_flags) {
7750  uint32_t hash = code_flags;
7751  for (int i = 0; i < maps->length(); ++i) {
7752  hash ^= maps->at(i)->Hash();
7753  }
7754  return hash;
7755  }
7756 
7757  uint32_t Hash() {
7758  return MapsHashHelper(maps_, code_flags_);
7759  }
7760 
7761  uint32_t HashForObject(Object* obj) {
7762  MapHandleList other_maps(kDefaultListAllocationSize);
7763  int other_flags;
7764  FromObject(obj, &other_flags, &other_maps);
7765  return MapsHashHelper(&other_maps, other_flags);
7766  }
7767 
7768  MUST_USE_RESULT MaybeObject* AsObject(Heap* heap) {
7769  Object* obj;
7770  // The maps in |maps_| must be copied to a newly allocated FixedArray,
7771  // both because the referenced MapList is short-lived, and because C++
7772  // objects can't be stored in the heap anyway.
7773  { MaybeObject* maybe_obj =
7774  heap->AllocateUninitializedFixedArray(maps_->length() + 1);
7775  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7776  }
7777  FixedArray* list = FixedArray::cast(obj);
7778  list->set(0, Smi::FromInt(code_flags_));
7779  for (int i = 0; i < maps_->length(); ++i) {
7780  list->set(i + 1, *maps_->at(i));
7781  }
7782  return list;
7783  }
7784 
7785  private:
7786  static MapHandleList* FromObject(Object* obj,
7787  int* code_flags,
7788  MapHandleList* maps) {
7789  FixedArray* list = FixedArray::cast(obj);
7790  maps->Rewind(0);
7791  *code_flags = Smi::cast(list->get(0))->value();
7792  for (int i = 1; i < list->length(); ++i) {
7793  maps->Add(Handle<Map>(Map::cast(list->get(i))));
7794  }
7795  return maps;
7796  }
7797 
7798  MapHandleList* maps_; // weak.
7799  int code_flags_;
7800  static const int kDefaultListAllocationSize = kMaxKeyedPolymorphism + 1;
7801 };
7802 
7803 
7805  int code_flags) {
7806  PolymorphicCodeCacheHashTableKey key(maps, code_flags);
7807  int entry = FindEntry(&key);
7808  if (entry == kNotFound) return GetHeap()->undefined_value();
7809  return get(EntryToIndex(entry) + 1);
7810 }
7811 
7812 
7814  int code_flags,
7815  Code* code) {
7816  PolymorphicCodeCacheHashTableKey key(maps, code_flags);
7817  Object* obj;
7818  { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
7819  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7820  }
7822  reinterpret_cast<PolymorphicCodeCacheHashTable*>(obj);
7823  int entry = cache->FindInsertionEntry(key.Hash());
7824  { MaybeObject* maybe_obj = key.AsObject(GetHeap());
7825  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7826  }
7827  cache->set(EntryToIndex(entry), obj);
7828  cache->set(EntryToIndex(entry) + 1, code);
7829  cache->ElementAdded();
7830  return cache;
7831 }
7832 
7833 
7834 void FixedArray::Shrink(int new_length) {
7835  ASSERT(0 <= new_length && new_length <= length());
7836  if (new_length < length()) {
7837  RightTrimFixedArray<Heap::FROM_MUTATOR>(
7838  GetHeap(), this, length() - new_length);
7839  }
7840 }
7841 
7842 
7844  ElementsAccessor* accessor = array->GetElementsAccessor();
7845  MaybeObject* maybe_result =
7846  accessor->AddElementsToFixedArray(array, array, this);
7847  FixedArray* result;
7848  if (!maybe_result->To<FixedArray>(&result)) return maybe_result;
7849 #ifdef ENABLE_SLOW_ASSERTS
7851  for (int i = 0; i < result->length(); i++) {
7852  Object* current = result->get(i);
7853  ASSERT(current->IsNumber() || current->IsName());
7854  }
7855  }
7856 #endif
7857  return result;
7858 }
7859 
7860 
7861 MaybeObject* FixedArray::UnionOfKeys(FixedArray* other) {
7862  ElementsAccessor* accessor = ElementsAccessor::ForArray(other);
7863  MaybeObject* maybe_result =
7864  accessor->AddElementsToFixedArray(NULL, NULL, this, other);
7865  FixedArray* result;
7866  if (!maybe_result->To(&result)) return maybe_result;
7867 #ifdef ENABLE_SLOW_ASSERTS
7869  for (int i = 0; i < result->length(); i++) {
7870  Object* current = result->get(i);
7871  ASSERT(current->IsNumber() || current->IsName());
7872  }
7873  }
7874 #endif
7875  return result;
7876 }
7877 
7878 
7879 MaybeObject* FixedArray::CopySize(int new_length, PretenureFlag pretenure) {
7880  Heap* heap = GetHeap();
7881  if (new_length == 0) return heap->empty_fixed_array();
7882  Object* obj;
7883  { MaybeObject* maybe_obj = heap->AllocateFixedArray(new_length, pretenure);
7884  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
7885  }
7886  FixedArray* result = FixedArray::cast(obj);
7887  // Copy the content
7888  DisallowHeapAllocation no_gc;
7889  int len = length();
7890  if (new_length < len) len = new_length;
7891  // We are taking the map from the old fixed array so the map is sure to
7892  // be an immortal immutable object.
7893  result->set_map_no_write_barrier(map());
7894  WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
7895  for (int i = 0; i < len; i++) {
7896  result->set(i, get(i), mode);
7897  }
7898  return result;
7899 }
7900 
7901 
7902 void FixedArray::CopyTo(int pos, FixedArray* dest, int dest_pos, int len) {
7903  DisallowHeapAllocation no_gc;
7904  WriteBarrierMode mode = dest->GetWriteBarrierMode(no_gc);
7905  for (int index = 0; index < len; index++) {
7906  dest->set(dest_pos+index, get(pos+index), mode);
7907  }
7908 }
7909 
7910 
7911 #ifdef DEBUG
7912 bool FixedArray::IsEqualTo(FixedArray* other) {
7913  if (length() != other->length()) return false;
7914  for (int i = 0 ; i < length(); ++i) {
7915  if (get(i) != other->get(i)) return false;
7916  }
7917  return true;
7918 }
7919 #endif
7920 
7921 
7922 MaybeObject* DescriptorArray::Allocate(Isolate* isolate,
7923  int number_of_descriptors,
7924  int slack) {
7925  Heap* heap = isolate->heap();
7926  // Do not use DescriptorArray::cast on incomplete object.
7927  int size = number_of_descriptors + slack;
7928  if (size == 0) return heap->empty_descriptor_array();
7929  FixedArray* result;
7930  // Allocate the array of keys.
7931  MaybeObject* maybe_array = heap->AllocateFixedArray(LengthFor(size));
7932  if (!maybe_array->To(&result)) return maybe_array;
7933 
7934  result->set(kDescriptorLengthIndex, Smi::FromInt(number_of_descriptors));
7935  result->set(kEnumCacheIndex, Smi::FromInt(0));
7936  return result;
7937 }
7938 
7939 
7942 }
7943 
7944 
7946  FixedArray* new_cache,
7947  Object* new_index_cache) {
7948  ASSERT(bridge_storage->length() >= kEnumCacheBridgeLength);
7949  ASSERT(new_index_cache->IsSmi() || new_index_cache->IsFixedArray());
7950  ASSERT(!IsEmpty());
7951  ASSERT(!HasEnumCache() || new_cache->length() > GetEnumCache()->length());
7952  FixedArray::cast(bridge_storage)->
7953  set(kEnumCacheBridgeCacheIndex, new_cache);
7954  FixedArray::cast(bridge_storage)->
7955  set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
7956  set(kEnumCacheIndex, bridge_storage);
7957 }
7958 
7959 
7960 void DescriptorArray::CopyFrom(int dst_index,
7961  DescriptorArray* src,
7962  int src_index,
7963  const WhitenessWitness& witness) {
7964  Object* value = src->GetValue(src_index);
7965  PropertyDetails details = src->GetDetails(src_index);
7966  Descriptor desc(src->GetKey(src_index), value, details);
7967  Set(dst_index, &desc, witness);
7968 }
7969 
7970 
7972  int verbatim,
7973  int valid,
7974  int new_size,
7975  int modify_index,
7976  StoreMode store_mode,
7977  Handle<DescriptorArray> other) {
7978  CALL_HEAP_FUNCTION(desc->GetIsolate(),
7979  desc->Merge(verbatim, valid, new_size, modify_index,
7980  store_mode, *other),
7981  DescriptorArray);
7982 }
7983 
7984 
7985 // Generalize the |other| descriptor array by merging it into the (at least
7986 // partly) updated |this| descriptor array.
7987 // The method merges two descriptor array in three parts. Both descriptor arrays
7988 // are identical up to |verbatim|. They also overlap in keys up to |valid|.
7989 // Between |verbatim| and |valid|, the resulting descriptor type as well as the
7990 // representation are generalized from both |this| and |other|. Beyond |valid|,
7991 // the descriptors are copied verbatim from |other| up to |new_size|.
7992 // In case of incompatible types, the type and representation of |other| is
7993 // used.
7994 MaybeObject* DescriptorArray::Merge(int verbatim,
7995  int valid,
7996  int new_size,
7997  int modify_index,
7998  StoreMode store_mode,
7999  DescriptorArray* other) {
8000  ASSERT(verbatim <= valid);
8001  ASSERT(valid <= new_size);
8002 
8003  DescriptorArray* result;
8004  // Allocate a new descriptor array large enough to hold the required
8005  // descriptors, with minimally the exact same size as this descriptor array.
8006  MaybeObject* maybe_descriptors = DescriptorArray::Allocate(
8007  GetIsolate(), new_size,
8008  Max(new_size, other->number_of_descriptors()) - new_size);
8009  if (!maybe_descriptors->To(&result)) return maybe_descriptors;
8010  ASSERT(result->length() > length() ||
8011  result->NumberOfSlackDescriptors() > 0 ||
8012  result->number_of_descriptors() == other->number_of_descriptors());
8013  ASSERT(result->number_of_descriptors() == new_size);
8014 
8015  DescriptorArray::WhitenessWitness witness(result);
8016 
8017  int descriptor;
8018 
8019  // 0 -> |verbatim|
8020  int current_offset = 0;
8021  for (descriptor = 0; descriptor < verbatim; descriptor++) {
8022  if (GetDetails(descriptor).type() == FIELD) current_offset++;
8023  result->CopyFrom(descriptor, other, descriptor, witness);
8024  }
8025 
8026  // |verbatim| -> |valid|
8027  for (; descriptor < valid; descriptor++) {
8028  Name* key = GetKey(descriptor);
8029  PropertyDetails details = GetDetails(descriptor);
8030  PropertyDetails other_details = other->GetDetails(descriptor);
8031 
8032  if (details.type() == FIELD || other_details.type() == FIELD ||
8033  (store_mode == FORCE_FIELD && descriptor == modify_index) ||
8034  (details.type() == CONSTANT &&
8035  other_details.type() == CONSTANT &&
8036  GetValue(descriptor) != other->GetValue(descriptor))) {
8037  Representation representation =
8038  details.representation().generalize(other_details.representation());
8039  FieldDescriptor d(key,
8040  current_offset++,
8041  other_details.attributes(),
8042  representation);
8043  result->Set(descriptor, &d, witness);
8044  } else {
8045  result->CopyFrom(descriptor, other, descriptor, witness);
8046  }
8047  }
8048 
8049  // |valid| -> |new_size|
8050  for (; descriptor < new_size; descriptor++) {
8051  PropertyDetails details = other->GetDetails(descriptor);
8052  if (details.type() == FIELD ||
8053  (store_mode == FORCE_FIELD && descriptor == modify_index)) {
8054  Name* key = other->GetKey(descriptor);
8055  FieldDescriptor d(key,
8056  current_offset++,
8057  details.attributes(),
8058  details.representation());
8059  result->Set(descriptor, &d, witness);
8060  } else {
8061  result->CopyFrom(descriptor, other, descriptor, witness);
8062  }
8063  }
8064 
8065  result->Sort();
8066  return result;
8067 }
8068 
8069 
8070 // Checks whether a merge of |other| into |this| would return a copy of |this|.
8072  int valid,
8073  int new_size,
8074  DescriptorArray* other) {
8075  ASSERT(verbatim <= valid);
8076  ASSERT(valid <= new_size);
8077  if (valid != new_size) return false;
8078 
8079  for (int descriptor = verbatim; descriptor < valid; descriptor++) {
8080  PropertyDetails details = GetDetails(descriptor);
8081  PropertyDetails other_details = other->GetDetails(descriptor);
8082  if (!other_details.representation().fits_into(details.representation())) {
8083  return false;
8084  }
8085  if (details.type() == CONSTANT) {
8086  if (other_details.type() != CONSTANT) return false;
8087  if (GetValue(descriptor) != other->GetValue(descriptor)) return false;
8088  }
8089  }
8090 
8091  return true;
8092 }
8093 
8094 
8095 // We need the whiteness witness since sort will reshuffle the entries in the
8096 // descriptor array. If the descriptor array were to be black, the shuffling
8097 // would move a slot that was already recorded as pointing into an evacuation
8098 // candidate. This would result in missing updates upon evacuation.
8100  // In-place heap sort.
8101  int len = number_of_descriptors();
8102  // Reset sorting since the descriptor array might contain invalid pointers.
8103  for (int i = 0; i < len; ++i) SetSortedKey(i, i);
8104  // Bottom-up max-heap construction.
8105  // Index of the last node with children
8106  const int max_parent_index = (len / 2) - 1;
8107  for (int i = max_parent_index; i >= 0; --i) {
8108  int parent_index = i;
8109  const uint32_t parent_hash = GetSortedKey(i)->Hash();
8110  while (parent_index <= max_parent_index) {
8111  int child_index = 2 * parent_index + 1;
8112  uint32_t child_hash = GetSortedKey(child_index)->Hash();
8113  if (child_index + 1 < len) {
8114  uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
8115  if (right_child_hash > child_hash) {
8116  child_index++;
8117  child_hash = right_child_hash;
8118  }
8119  }
8120  if (child_hash <= parent_hash) break;
8121  SwapSortedKeys(parent_index, child_index);
8122  // Now element at child_index could be < its children.
8123  parent_index = child_index; // parent_hash remains correct.
8124  }
8125  }
8126 
8127  // Extract elements and create sorted array.
8128  for (int i = len - 1; i > 0; --i) {
8129  // Put max element at the back of the array.
8130  SwapSortedKeys(0, i);
8131  // Shift down the new top element.
8132  int parent_index = 0;
8133  const uint32_t parent_hash = GetSortedKey(parent_index)->Hash();
8134  const int max_parent_index = (i / 2) - 1;
8135  while (parent_index <= max_parent_index) {
8136  int child_index = parent_index * 2 + 1;
8137  uint32_t child_hash = GetSortedKey(child_index)->Hash();
8138  if (child_index + 1 < i) {
8139  uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
8140  if (right_child_hash > child_hash) {
8141  child_index++;
8142  child_hash = right_child_hash;
8143  }
8144  }
8145  if (child_hash <= parent_hash) break;
8146  SwapSortedKeys(parent_index, child_index);
8147  parent_index = child_index;
8148  }
8149  }
8150  ASSERT(IsSortedNoDuplicates());
8151 }
8152 
8153 
8155  Handle<AccessorPair> copy = pair->GetIsolate()->factory()->NewAccessorPair();
8156  copy->set_getter(pair->getter());
8157  copy->set_setter(pair->setter());
8158  return copy;
8159 }
8160 
8161 
8163  Object* accessor = get(component);
8164  return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor;
8165 }
8166 
8167 
8169  int deopt_entry_count,
8170  PretenureFlag pretenure) {
8171  ASSERT(deopt_entry_count > 0);
8172  return isolate->heap()->AllocateFixedArray(LengthFor(deopt_entry_count),
8173  pretenure);
8174 }
8175 
8176 
8178  int number_of_deopt_points,
8179  PretenureFlag pretenure) {
8180  if (number_of_deopt_points == 0) return isolate->heap()->empty_fixed_array();
8181  return isolate->heap()->AllocateFixedArray(
8182  LengthOfFixedArray(number_of_deopt_points), pretenure);
8183 }
8184 
8185 
8186 #ifdef DEBUG
8187 bool DescriptorArray::IsEqualTo(DescriptorArray* other) {
8188  if (IsEmpty()) return other->IsEmpty();
8189  if (other->IsEmpty()) return false;
8190  if (length() != other->length()) return false;
8191  for (int i = 0; i < length(); ++i) {
8192  if (get(i) != other->get(i)) return false;
8193  }
8194  return true;
8195 }
8196 #endif
8197 
8198 
8199 static bool IsIdentifier(UnicodeCache* cache, Name* name) {
8200  // Checks whether the buffer contains an identifier (no escape).
8201  if (!name->IsString()) return false;
8202  String* string = String::cast(name);
8203  if (string->length() == 0) return true;
8204  ConsStringIteratorOp op;
8205  StringCharacterStream stream(string, &op);
8206  if (!cache->IsIdentifierStart(stream.GetNext())) {
8207  return false;
8208  }
8209  while (stream.HasMore()) {
8210  if (!cache->IsIdentifierPart(stream.GetNext())) {
8211  return false;
8212  }
8213  }
8214  return true;
8215 }
8216 
8217 
8218 bool Name::IsCacheable(Isolate* isolate) {
8219  return IsSymbol() || IsIdentifier(isolate->unicode_cache(), this);
8220 }
8221 
8222 
8224  if (!GetIsolate()->heap()->Contains(this)) return false;
8225  return true;
8226 }
8227 
8228 
8230  ASSERT(!AllowHeapAllocation::IsAllowed());
8231  int length = this->length();
8232  StringShape shape(this);
8233  String* string = this;
8234  int offset = 0;
8235  if (shape.representation_tag() == kConsStringTag) {
8236  ConsString* cons = ConsString::cast(string);
8237  if (cons->second()->length() != 0) {
8238  return FlatContent();
8239  }
8240  string = cons->first();
8241  shape = StringShape(string);
8242  }
8243  if (shape.representation_tag() == kSlicedStringTag) {
8244  SlicedString* slice = SlicedString::cast(string);
8245  offset = slice->offset();
8246  string = slice->parent();
8247  shape = StringShape(string);
8248  ASSERT(shape.representation_tag() != kConsStringTag &&
8249  shape.representation_tag() != kSlicedStringTag);
8250  }
8251  if (shape.encoding_tag() == kOneByteStringTag) {
8252  const uint8_t* start;
8253  if (shape.representation_tag() == kSeqStringTag) {
8254  start = SeqOneByteString::cast(string)->GetChars();
8255  } else {
8256  start = ExternalAsciiString::cast(string)->GetChars();
8257  }
8258  return FlatContent(Vector<const uint8_t>(start + offset, length));
8259  } else {
8260  ASSERT(shape.encoding_tag() == kTwoByteStringTag);
8261  const uc16* start;
8262  if (shape.representation_tag() == kSeqStringTag) {
8263  start = SeqTwoByteString::cast(string)->GetChars();
8264  } else {
8265  start = ExternalTwoByteString::cast(string)->GetChars();
8266  }
8267  return FlatContent(Vector<const uc16>(start + offset, length));
8268  }
8269 }
8270 
8271 
8273  RobustnessFlag robust_flag,
8274  int offset,
8275  int length,
8276  int* length_return) {
8277  if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
8278  return SmartArrayPointer<char>(NULL);
8279  }
8280  Heap* heap = GetHeap();
8281 
8282  // Negative length means the to the end of the string.
8283  if (length < 0) length = kMaxInt - offset;
8284 
8285  // Compute the size of the UTF-8 string. Start at the specified offset.
8287  heap->isolate()->objects_string_iterator());
8288  StringCharacterStream stream(this, op.value(), offset);
8289  int character_position = offset;
8290  int utf8_bytes = 0;
8292  while (stream.HasMore() && character_position++ < offset + length) {
8293  uint16_t character = stream.GetNext();
8294  utf8_bytes += unibrow::Utf8::Length(character, last);
8295  last = character;
8296  }
8297 
8298  if (length_return) {
8299  *length_return = utf8_bytes;
8300  }
8301 
8302  char* result = NewArray<char>(utf8_bytes + 1);
8303 
8304  // Convert the UTF-16 string to a UTF-8 buffer. Start at the specified offset.
8305  stream.Reset(this, offset);
8306  character_position = offset;
8307  int utf8_byte_position = 0;
8309  while (stream.HasMore() && character_position++ < offset + length) {
8310  uint16_t character = stream.GetNext();
8311  if (allow_nulls == DISALLOW_NULLS && character == 0) {
8312  character = ' ';
8313  }
8314  utf8_byte_position +=
8315  unibrow::Utf8::Encode(result + utf8_byte_position, character, last);
8316  last = character;
8317  }
8318  result[utf8_byte_position] = 0;
8319  return SmartArrayPointer<char>(result);
8320 }
8321 
8322 
8324  RobustnessFlag robust_flag,
8325  int* length_return) {
8326  return ToCString(allow_nulls, robust_flag, 0, -1, length_return);
8327 }
8328 
8329 
8330 const uc16* String::GetTwoByteData(unsigned start) {
8332  switch (StringShape(this).representation_tag()) {
8333  case kSeqStringTag:
8334  return SeqTwoByteString::cast(this)->SeqTwoByteStringGetData(start);
8335  case kExternalStringTag:
8336  return ExternalTwoByteString::cast(this)->
8337  ExternalTwoByteStringGetData(start);
8338  case kSlicedStringTag: {
8339  SlicedString* slice = SlicedString::cast(this);
8340  return slice->parent()->GetTwoByteData(start + slice->offset());
8341  }
8342  case kConsStringTag:
8343  UNREACHABLE();
8344  return NULL;
8345  }
8346  UNREACHABLE();
8347  return NULL;
8348 }
8349 
8350 
8352  if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) {
8353  return SmartArrayPointer<uc16>();
8354  }
8355  Heap* heap = GetHeap();
8356 
8358  heap->isolate()->objects_string_iterator());
8359  StringCharacterStream stream(this, op.value());
8360 
8361  uc16* result = NewArray<uc16>(length() + 1);
8362 
8363  int i = 0;
8364  while (stream.HasMore()) {
8365  uint16_t character = stream.GetNext();
8366  result[i++] = character;
8367  }
8368  result[i] = 0;
8369  return SmartArrayPointer<uc16>(result);
8370 }
8371 
8372 
8374  return reinterpret_cast<uc16*>(
8375  reinterpret_cast<char*>(this) - kHeapObjectTag + kHeaderSize) + start;
8376 }
8377 
8378 
8379 void Relocatable::PostGarbageCollectionProcessing(Isolate* isolate) {
8380  Relocatable* current = isolate->relocatable_top();
8381  while (current != NULL) {
8382  current->PostGarbageCollection();
8383  current = current->prev_;
8384  }
8385 }
8386 
8387 
8388 // Reserve space for statics needing saving and restoring.
8389 int Relocatable::ArchiveSpacePerThread() {
8390  return sizeof(Relocatable*); // NOLINT
8391 }
8392 
8393 
8394 // Archive statics that are thread local.
8395 char* Relocatable::ArchiveState(Isolate* isolate, char* to) {
8396  *reinterpret_cast<Relocatable**>(to) = isolate->relocatable_top();
8397  isolate->set_relocatable_top(NULL);
8398  return to + ArchiveSpacePerThread();
8399 }
8400 
8401 
8402 // Restore statics that are thread local.
8403 char* Relocatable::RestoreState(Isolate* isolate, char* from) {
8404  isolate->set_relocatable_top(*reinterpret_cast<Relocatable**>(from));
8405  return from + ArchiveSpacePerThread();
8406 }
8407 
8408 
8409 char* Relocatable::Iterate(ObjectVisitor* v, char* thread_storage) {
8410  Relocatable* top = *reinterpret_cast<Relocatable**>(thread_storage);
8411  Iterate(v, top);
8412  return thread_storage + ArchiveSpacePerThread();
8413 }
8414 
8415 
8416 void Relocatable::Iterate(Isolate* isolate, ObjectVisitor* v) {
8417  Iterate(v, isolate->relocatable_top());
8418 }
8419 
8420 
8421 void Relocatable::Iterate(ObjectVisitor* v, Relocatable* top) {
8422  Relocatable* current = top;
8423  while (current != NULL) {
8424  current->IterateInstance(v);
8425  current = current->prev_;
8426  }
8427 }
8428 
8429 
8431  : Relocatable(isolate),
8432  str_(str.location()),
8433  length_(str->length()) {
8435 }
8436 
8437 
8439  : Relocatable(isolate),
8440  str_(0),
8441  is_ascii_(true),
8442  length_(input.length()),
8443  start_(input.start()) { }
8444 
8445 
8447  if (str_ == NULL) return;
8448  Handle<String> str(str_);
8449  ASSERT(str->IsFlat());
8450  DisallowHeapAllocation no_gc;
8451  // This does not actually prevent the vector from being relocated later.
8452  String::FlatContent content = str->GetFlatContent();
8453  ASSERT(content.IsFlat());
8454  is_ascii_ = content.IsAscii();
8455  if (is_ascii_) {
8456  start_ = content.ToOneByteVector().start();
8457  } else {
8458  start_ = content.ToUC16Vector().start();
8459  }
8460 }
8461 
8462 
8464  unsigned* offset_out,
8465  int32_t* type_out,
8466  unsigned* length_out) {
8467  ASSERT(string->IsConsString());
8468  ConsString* cons_string = ConsString::cast(string);
8469  // Set up search data.
8470  root_ = cons_string;
8471  consumed_ = *offset_out;
8472  // Now search.
8473  return Search(offset_out, type_out, length_out);
8474 }
8475 
8476 
8477 String* ConsStringIteratorOp::Search(unsigned* offset_out,
8478  int32_t* type_out,
8479  unsigned* length_out) {
8480  ConsString* cons_string = root_;
8481  // Reset the stack, pushing the root string.
8482  depth_ = 1;
8483  maximum_depth_ = 1;
8484  frames_[0] = cons_string;
8485  const unsigned consumed = consumed_;
8486  unsigned offset = 0;
8487  while (true) {
8488  // Loop until the string is found which contains the target offset.
8489  String* string = cons_string->first();
8490  unsigned length = string->length();
8491  int32_t type;
8492  if (consumed < offset + length) {
8493  // Target offset is in the left branch.
8494  // Keep going if we're still in a ConString.
8495  type = string->map()->instance_type();
8496  if ((type & kStringRepresentationMask) == kConsStringTag) {
8497  cons_string = ConsString::cast(string);
8498  PushLeft(cons_string);
8499  continue;
8500  }
8501  // Tell the stack we're done decending.
8502  AdjustMaximumDepth();
8503  } else {
8504  // Descend right.
8505  // Update progress through the string.
8506  offset += length;
8507  // Keep going if we're still in a ConString.
8508  string = cons_string->second();
8509  type = string->map()->instance_type();
8510  if ((type & kStringRepresentationMask) == kConsStringTag) {
8511  cons_string = ConsString::cast(string);
8512  PushRight(cons_string);
8513  // TODO(dcarney) Add back root optimization.
8514  continue;
8515  }
8516  // Need this to be updated for the current string.
8517  length = string->length();
8518  // Account for the possibility of an empty right leaf.
8519  // This happens only if we have asked for an offset outside the string.
8520  if (length == 0) {
8521  // Reset depth so future operations will return null immediately.
8522  Reset();
8523  return NULL;
8524  }
8525  // Tell the stack we're done decending.
8526  AdjustMaximumDepth();
8527  // Pop stack so next iteration is in correct place.
8528  Pop();
8529  }
8530  ASSERT(length != 0);
8531  // Adjust return values and exit.
8532  consumed_ = offset + length;
8533  *offset_out = consumed - offset;
8534  *type_out = type;
8535  *length_out = length;
8536  return string;
8537  }
8538  UNREACHABLE();
8539  return NULL;
8540 }
8541 
8542 
8543 String* ConsStringIteratorOp::NextLeaf(bool* blew_stack,
8544  int32_t* type_out,
8545  unsigned* length_out) {
8546  while (true) {
8547  // Tree traversal complete.
8548  if (depth_ == 0) {
8549  *blew_stack = false;
8550  return NULL;
8551  }
8552  // We've lost track of higher nodes.
8553  if (maximum_depth_ - depth_ == kStackSize) {
8554  *blew_stack = true;
8555  return NULL;
8556  }
8557  // Go right.
8558  ConsString* cons_string = frames_[OffsetForDepth(depth_ - 1)];
8559  String* string = cons_string->second();
8560  int32_t type = string->map()->instance_type();
8561  if ((type & kStringRepresentationMask) != kConsStringTag) {
8562  // Pop stack so next iteration is in correct place.
8563  Pop();
8564  unsigned length = static_cast<unsigned>(string->length());
8565  // Could be a flattened ConsString.
8566  if (length == 0) continue;
8567  *length_out = length;
8568  *type_out = type;
8569  consumed_ += length;
8570  return string;
8571  }
8572  cons_string = ConsString::cast(string);
8573  // TODO(dcarney) Add back root optimization.
8574  PushRight(cons_string);
8575  // Need to traverse all the way left.
8576  while (true) {
8577  // Continue left.
8578  string = cons_string->first();
8579  type = string->map()->instance_type();
8580  if ((type & kStringRepresentationMask) != kConsStringTag) {
8581  AdjustMaximumDepth();
8582  unsigned length = static_cast<unsigned>(string->length());
8583  ASSERT(length != 0);
8584  *length_out = length;
8585  *type_out = type;
8586  consumed_ += length;
8587  return string;
8588  }
8589  cons_string = ConsString::cast(string);
8590  PushLeft(cons_string);
8591  }
8592  }
8593  UNREACHABLE();
8594  return NULL;
8595 }
8596 
8597 
8599  ASSERT(index >= 0 && index < this->length());
8600 
8601  // Check for a flattened cons string
8602  if (second()->length() == 0) {
8603  String* left = first();
8604  return left->Get(index);
8605  }
8606 
8607  String* string = String::cast(this);
8608 
8609  while (true) {
8610  if (StringShape(string).IsCons()) {
8611  ConsString* cons_string = ConsString::cast(string);
8612  String* left = cons_string->first();
8613  if (left->length() > index) {
8614  string = left;
8615  } else {
8616  index -= left->length();
8617  string = cons_string->second();
8618  }
8619  } else {
8620  return string->Get(index);
8621  }
8622  }
8623 
8624  UNREACHABLE();
8625  return 0;
8626 }
8627 
8628 
8630  return parent()->Get(offset() + index);
8631 }
8632 
8633 
8634 template <typename sinkchar>
8636  sinkchar* sink,
8637  int f,
8638  int t) {
8639  String* source = src;
8640  int from = f;
8641  int to = t;
8642  while (true) {
8643  ASSERT(0 <= from && from <= to && to <= source->length());
8644  switch (StringShape(source).full_representation_tag()) {
8646  CopyChars(sink,
8647  ExternalAsciiString::cast(source)->GetChars() + from,
8648  to - from);
8649  return;
8650  }
8652  const uc16* data =
8654  CopyChars(sink,
8655  data + from,
8656  to - from);
8657  return;
8658  }
8660  CopyChars(sink,
8661  SeqOneByteString::cast(source)->GetChars() + from,
8662  to - from);
8663  return;
8664  }
8666  CopyChars(sink,
8667  SeqTwoByteString::cast(source)->GetChars() + from,
8668  to - from);
8669  return;
8670  }
8673  ConsString* cons_string = ConsString::cast(source);
8674  String* first = cons_string->first();
8675  int boundary = first->length();
8676  if (to - boundary >= boundary - from) {
8677  // Right hand side is longer. Recurse over left.
8678  if (from < boundary) {
8679  WriteToFlat(first, sink, from, boundary);
8680  sink += boundary - from;
8681  from = 0;
8682  } else {
8683  from -= boundary;
8684  }
8685  to -= boundary;
8686  source = cons_string->second();
8687  } else {
8688  // Left hand side is longer. Recurse over right.
8689  if (to > boundary) {
8690  String* second = cons_string->second();
8691  // When repeatedly appending to a string, we get a cons string that
8692  // is unbalanced to the left, a list, essentially. We inline the
8693  // common case of sequential ascii right child.
8694  if (to - boundary == 1) {
8695  sink[boundary - from] = static_cast<sinkchar>(second->Get(0));
8696  } else if (second->IsSeqOneByteString()) {
8697  CopyChars(sink + boundary - from,
8698  SeqOneByteString::cast(second)->GetChars(),
8699  to - boundary);
8700  } else {
8701  WriteToFlat(second,
8702  sink + boundary - from,
8703  0,
8704  to - boundary);
8705  }
8706  to = boundary;
8707  }
8708  source = first;
8709  }
8710  break;
8711  }
8714  SlicedString* slice = SlicedString::cast(source);
8715  unsigned offset = slice->offset();
8716  WriteToFlat(slice->parent(), sink, from + offset, to + offset);
8717  return;
8718  }
8719  }
8720  }
8721 }
8722 
8723 
8724 // Compares the contents of two strings by reading and comparing
8725 // int-sized blocks of characters.
8726 template <typename Char>
8727 static inline bool CompareRawStringContents(const Char* const a,
8728  const Char* const b,
8729  int length) {
8730  int i = 0;
8731 #ifndef V8_HOST_CAN_READ_UNALIGNED
8732  // If this architecture isn't comfortable reading unaligned ints
8733  // then we have to check that the strings are aligned before
8734  // comparing them blockwise.
8735  const int kAlignmentMask = sizeof(uint32_t) - 1; // NOLINT
8736  uint32_t pa_addr = reinterpret_cast<uint32_t>(a);
8737  uint32_t pb_addr = reinterpret_cast<uint32_t>(b);
8738  if (((pa_addr & kAlignmentMask) | (pb_addr & kAlignmentMask)) == 0) {
8739 #endif
8740  const int kStepSize = sizeof(int) / sizeof(Char); // NOLINT
8741  int endpoint = length - kStepSize;
8742  // Compare blocks until we reach near the end of the string.
8743  for (; i <= endpoint; i += kStepSize) {
8744  uint32_t wa = *reinterpret_cast<const uint32_t*>(a + i);
8745  uint32_t wb = *reinterpret_cast<const uint32_t*>(b + i);
8746  if (wa != wb) {
8747  return false;
8748  }
8749  }
8750 #ifndef V8_HOST_CAN_READ_UNALIGNED
8751  }
8752 #endif
8753  // Compare the remaining characters that didn't fit into a block.
8754  for (; i < length; i++) {
8755  if (a[i] != b[i]) {
8756  return false;
8757  }
8758  }
8759  return true;
8760 }
8761 
8762 
8763 template<typename Chars1, typename Chars2>
8765  public:
8766  static inline bool compare(const Chars1* a, const Chars2* b, int len) {
8767  ASSERT(sizeof(Chars1) != sizeof(Chars2));
8768  for (int i = 0; i < len; i++) {
8769  if (a[i] != b[i]) {
8770  return false;
8771  }
8772  }
8773  return true;
8774  }
8775 };
8776 
8777 
8778 template<>
8780  public:
8781  static inline bool compare(const uint16_t* a, const uint16_t* b, int len) {
8782  return CompareRawStringContents(a, b, len);
8783  }
8784 };
8785 
8786 
8787 template<>
8788 class RawStringComparator<uint8_t, uint8_t> {
8789  public:
8790  static inline bool compare(const uint8_t* a, const uint8_t* b, int len) {
8791  return CompareRawStringContents(a, b, len);
8792  }
8793 };
8794 
8795 
8797  class State {
8798  public:
8799  explicit inline State(ConsStringIteratorOp* op)
8800  : op_(op), is_one_byte_(true), length_(0), buffer8_(NULL) {}
8801 
8802  inline void Init(String* string, unsigned len) {
8803  op_->Reset();
8804  int32_t type = string->map()->instance_type();
8805  String::Visit(string, 0, *this, *op_, type, len);
8806  }
8807 
8808  inline void VisitOneByteString(const uint8_t* chars, unsigned length) {
8809  is_one_byte_ = true;
8810  buffer8_ = chars;
8811  length_ = length;
8812  }
8813 
8814  inline void VisitTwoByteString(const uint16_t* chars, unsigned length) {
8815  is_one_byte_ = false;
8816  buffer16_ = chars;
8817  length_ = length;
8818  }
8819 
8820  void Advance(unsigned consumed) {
8821  ASSERT(consumed <= length_);
8822  // Still in buffer.
8823  if (length_ != consumed) {
8824  if (is_one_byte_) {
8825  buffer8_ += consumed;
8826  } else {
8827  buffer16_ += consumed;
8828  }
8829  length_ -= consumed;
8830  return;
8831  }
8832  // Advance state.
8833  ASSERT(op_->HasMore());
8834  int32_t type = 0;
8835  unsigned length = 0;
8836  String* next = op_->ContinueOperation(&type, &length);
8837  ASSERT(next != NULL);
8838  ConsStringNullOp null_op;
8839  String::Visit(next, 0, *this, null_op, type, length);
8840  }
8841 
8842  ConsStringIteratorOp* const op_;
8843  bool is_one_byte_;
8844  unsigned length_;
8845  union {
8846  const uint8_t* buffer8_;
8847  const uint16_t* buffer16_;
8848  };
8849 
8850  private:
8852  };
8853 
8854  public:
8856  ConsStringIteratorOp* op_2)
8857  : state_1_(op_1),
8858  state_2_(op_2) {
8859  }
8860 
8861  template<typename Chars1, typename Chars2>
8862  static inline bool Equals(State* state_1, State* state_2, unsigned to_check) {
8863  const Chars1* a = reinterpret_cast<const Chars1*>(state_1->buffer8_);
8864  const Chars2* b = reinterpret_cast<const Chars2*>(state_2->buffer8_);
8865  return RawStringComparator<Chars1, Chars2>::compare(a, b, to_check);
8866  }
8867 
8868  bool Equals(unsigned length, String* string_1, String* string_2) {
8869  ASSERT(length != 0);
8870  state_1_.Init(string_1, length);
8871  state_2_.Init(string_2, length);
8872  while (true) {
8873  unsigned to_check = Min(state_1_.length_, state_2_.length_);
8874  ASSERT(to_check > 0 && to_check <= length);
8875  bool is_equal;
8876  if (state_1_.is_one_byte_) {
8877  if (state_2_.is_one_byte_) {
8878  is_equal = Equals<uint8_t, uint8_t>(&state_1_, &state_2_, to_check);
8879  } else {
8880  is_equal = Equals<uint8_t, uint16_t>(&state_1_, &state_2_, to_check);
8881  }
8882  } else {
8883  if (state_2_.is_one_byte_) {
8884  is_equal = Equals<uint16_t, uint8_t>(&state_1_, &state_2_, to_check);
8885  } else {
8886  is_equal = Equals<uint16_t, uint16_t>(&state_1_, &state_2_, to_check);
8887  }
8888  }
8889  // Looping done.
8890  if (!is_equal) return false;
8891  length -= to_check;
8892  // Exit condition. Strings are equal.
8893  if (length == 0) return true;
8894  state_1_.Advance(to_check);
8895  state_2_.Advance(to_check);
8896  }
8897  }
8898 
8899  private:
8900  State state_1_;
8901  State state_2_;
8902  DISALLOW_IMPLICIT_CONSTRUCTORS(StringComparator);
8903 };
8904 
8905 
8906 bool String::SlowEquals(String* other) {
8907  // Fast check: negative check with lengths.
8908  int len = length();
8909  if (len != other->length()) return false;
8910  if (len == 0) return true;
8911 
8912  // Fast check: if hash code is computed for both strings
8913  // a fast negative check can be performed.
8914  if (HasHashCode() && other->HasHashCode()) {
8915 #ifdef ENABLE_SLOW_ASSERTS
8917  if (Hash() != other->Hash()) {
8918  bool found_difference = false;
8919  for (int i = 0; i < len; i++) {
8920  if (Get(i) != other->Get(i)) {
8921  found_difference = true;
8922  break;
8923  }
8924  }
8925  ASSERT(found_difference);
8926  }
8927  }
8928 #endif
8929  if (Hash() != other->Hash()) return false;
8930  }
8931 
8932  // We know the strings are both non-empty. Compare the first chars
8933  // before we try to flatten the strings.
8934  if (this->Get(0) != other->Get(0)) return false;
8935 
8936  String* lhs = this->TryFlattenGetString();
8937  String* rhs = other->TryFlattenGetString();
8938 
8939  // TODO(dcarney): Compare all types of flat strings with a Visitor.
8940  if (StringShape(lhs).IsSequentialAscii() &&
8941  StringShape(rhs).IsSequentialAscii()) {
8942  const uint8_t* str1 = SeqOneByteString::cast(lhs)->GetChars();
8943  const uint8_t* str2 = SeqOneByteString::cast(rhs)->GetChars();
8944  return CompareRawStringContents(str1, str2, len);
8945  }
8946 
8947  Isolate* isolate = GetIsolate();
8948  StringComparator comparator(isolate->objects_string_compare_iterator_a(),
8949  isolate->objects_string_compare_iterator_b());
8950 
8951  return comparator.Equals(static_cast<unsigned>(len), lhs, rhs);
8952 }
8953 
8954 
8956  if (StringShape(this).IsInternalized()) return false;
8957 
8958  Map* map = this->map();
8959  Heap* heap = GetHeap();
8960  if (map == heap->string_map()) {
8961  this->set_map(heap->undetectable_string_map());
8962  return true;
8963  } else if (map == heap->ascii_string_map()) {
8964  this->set_map(heap->undetectable_ascii_string_map());
8965  return true;
8966  }
8967  // Rest cannot be marked as undetectable
8968  return false;
8969 }
8970 
8971 
8972 bool String::IsUtf8EqualTo(Vector<const char> str, bool allow_prefix_match) {
8973  int slen = length();
8974  // Can't check exact length equality, but we can check bounds.
8975  int str_len = str.length();
8976  if (!allow_prefix_match &&
8977  (str_len < slen ||
8978  str_len > slen*static_cast<int>(unibrow::Utf8::kMaxEncodedSize))) {
8979  return false;
8980  }
8981  int i;
8982  unsigned remaining_in_str = static_cast<unsigned>(str_len);
8983  const uint8_t* utf8_data = reinterpret_cast<const uint8_t*>(str.start());
8984  for (i = 0; i < slen && remaining_in_str > 0; i++) {
8985  unsigned cursor = 0;
8986  uint32_t r = unibrow::Utf8::ValueOf(utf8_data, remaining_in_str, &cursor);
8987  ASSERT(cursor > 0 && cursor <= remaining_in_str);
8989  if (i > slen - 1) return false;
8990  if (Get(i++) != unibrow::Utf16::LeadSurrogate(r)) return false;
8991  if (Get(i) != unibrow::Utf16::TrailSurrogate(r)) return false;
8992  } else {
8993  if (Get(i) != r) return false;
8994  }
8995  utf8_data += cursor;
8996  remaining_in_str -= cursor;
8997  }
8998  return (allow_prefix_match || i == slen) && remaining_in_str == 0;
8999 }
9000 
9001 
9003  int slen = length();
9004  if (str.length() != slen) return false;
9005  DisallowHeapAllocation no_gc;
9006  FlatContent content = GetFlatContent();
9007  if (content.IsAscii()) {
9008  return CompareChars(content.ToOneByteVector().start(),
9009  str.start(), slen) == 0;
9010  }
9011  for (int i = 0; i < slen; i++) {
9012  if (Get(i) != static_cast<uint16_t>(str[i])) return false;
9013  }
9014  return true;
9015 }
9016 
9017 
9019  int slen = length();
9020  if (str.length() != slen) return false;
9021  DisallowHeapAllocation no_gc;
9022  FlatContent content = GetFlatContent();
9023  if (content.IsTwoByte()) {
9024  return CompareChars(content.ToUC16Vector().start(), str.start(), slen) == 0;
9025  }
9026  for (int i = 0; i < slen; i++) {
9027  if (Get(i) != str[i]) return false;
9028  }
9029  return true;
9030 }
9031 
9032 
9034  public:
9035  static inline uint32_t Hash(String* string, uint32_t seed) {
9036  const unsigned len = static_cast<unsigned>(string->length());
9037  IteratingStringHasher hasher(len, seed);
9038  if (hasher.has_trivial_hash()) {
9039  return hasher.GetHashField();
9040  }
9041  int32_t type = string->map()->instance_type();
9042  ConsStringNullOp null_op;
9043  String::Visit(string, 0, hasher, null_op, type, len);
9044  // Flat strings terminate immediately.
9045  if (hasher.consumed_ == len) {
9046  ASSERT(!string->IsConsString());
9047  return hasher.GetHashField();
9048  }
9049  ASSERT(string->IsConsString());
9050  // This is a ConsString, iterate across it.
9052  unsigned offset = 0;
9053  unsigned leaf_length = len;
9054  string = op.Operate(string, &offset, &type, &leaf_length);
9055  while (true) {
9056  ASSERT(hasher.consumed_ < len);
9057  String::Visit(string, 0, hasher, null_op, type, leaf_length);
9058  if (hasher.consumed_ == len) break;
9059  string = op.ContinueOperation(&type, &leaf_length);
9060  // This should be taken care of by the length check.
9061  ASSERT(string != NULL);
9062  }
9063  return hasher.GetHashField();
9064  }
9065  inline void VisitOneByteString(const uint8_t* chars, unsigned length) {
9066  AddCharacters(chars, static_cast<int>(length));
9067  consumed_ += length;
9068  }
9069  inline void VisitTwoByteString(const uint16_t* chars, unsigned length) {
9070  AddCharacters(chars, static_cast<int>(length));
9071  consumed_ += length;
9072  }
9073 
9074  private:
9075  inline IteratingStringHasher(int len, uint32_t seed)
9076  : StringHasher(len, seed),
9077  consumed_(0) {}
9078  unsigned consumed_;
9079  DISALLOW_COPY_AND_ASSIGN(IteratingStringHasher);
9080 };
9081 
9082 
9083 uint32_t String::ComputeAndSetHash() {
9084  // Should only be called if hash code has not yet been computed.
9085  ASSERT(!HasHashCode());
9086 
9087  // Store the hash code in the object.
9088  uint32_t field = IteratingStringHasher::Hash(this, GetHeap()->HashSeed());
9089  set_hash_field(field);
9090 
9091  // Check the hash code is there.
9092  ASSERT(HasHashCode());
9093  uint32_t result = field >> kHashShift;
9094  ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
9095  return result;
9096 }
9097 
9098 
9099 bool String::ComputeArrayIndex(uint32_t* index) {
9100  int length = this->length();
9101  if (length == 0 || length > kMaxArrayIndexSize) return false;
9103  StringCharacterStream stream(this, &op);
9104  uint16_t ch = stream.GetNext();
9105 
9106  // If the string begins with a '0' character, it must only consist
9107  // of it to be a legal array index.
9108  if (ch == '0') {
9109  *index = 0;
9110  return length == 1;
9111  }
9112 
9113  // Convert string to uint32 array index; character by character.
9114  int d = ch - '0';
9115  if (d < 0 || d > 9) return false;
9116  uint32_t result = d;
9117  while (stream.HasMore()) {
9118  d = stream.GetNext() - '0';
9119  if (d < 0 || d > 9) return false;
9120  // Check that the new result is below the 32 bit limit.
9121  if (result > 429496729U - ((d > 5) ? 1 : 0)) return false;
9122  result = (result * 10) + d;
9123  }
9124 
9125  *index = result;
9126  return true;
9127 }
9128 
9129 
9130 bool String::SlowAsArrayIndex(uint32_t* index) {
9132  Hash(); // force computation of hash code
9133  uint32_t field = hash_field();
9134  if ((field & kIsNotArrayIndexMask) != 0) return false;
9135  // Isolate the array index form the full hash field.
9136  *index = (kArrayIndexHashMask & field) >> kHashShift;
9137  return true;
9138  } else {
9139  return ComputeArrayIndex(index);
9140  }
9141 }
9142 
9143 
9145  int new_size, old_size;
9146  int old_length = string->length();
9147  if (old_length <= new_length) return string;
9148 
9149  if (string->IsSeqOneByteString()) {
9150  old_size = SeqOneByteString::SizeFor(old_length);
9151  new_size = SeqOneByteString::SizeFor(new_length);
9152  } else {
9153  ASSERT(string->IsSeqTwoByteString());
9154  old_size = SeqTwoByteString::SizeFor(old_length);
9155  new_size = SeqTwoByteString::SizeFor(new_length);
9156  }
9157 
9158  int delta = old_size - new_size;
9159  string->set_length(new_length);
9160 
9161  Address start_of_string = string->address();
9162  ASSERT_OBJECT_ALIGNED(start_of_string);
9163  ASSERT_OBJECT_ALIGNED(start_of_string + new_size);
9164 
9165  Heap* heap = string->GetHeap();
9166  NewSpace* newspace = heap->new_space();
9167  if (newspace->Contains(start_of_string) &&
9168  newspace->top() == start_of_string + old_size) {
9169  // Last allocated object in new space. Simply lower allocation top.
9170  newspace->set_top(start_of_string + new_size);
9171  } else {
9172  // Sizes are pointer size aligned, so that we can use filler objects
9173  // that are a multiple of pointer size.
9174  heap->CreateFillerObjectAt(start_of_string + new_size, delta);
9175  }
9176  heap->AdjustLiveBytes(start_of_string, -delta, Heap::FROM_MUTATOR);
9177 
9178  if (new_length == 0) return heap->isolate()->factory()->empty_string();
9179  return string;
9180 }
9181 
9182 
9183 uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) {
9184  // For array indexes mix the length into the hash as an array index could
9185  // be zero.
9186  ASSERT(length > 0);
9190 
9191  value <<= String::kHashShift;
9192  value |= length << String::kArrayIndexHashLengthShift;
9193 
9194  ASSERT((value & String::kIsNotArrayIndexMask) == 0);
9196  (value & String::kContainsCachedArrayIndexMask) == 0);
9197  return value;
9198 }
9199 
9200 
9202  if (length_ <= String::kMaxHashCalcLength) {
9203  if (is_array_index_) {
9204  return MakeArrayIndexHash(array_index_, length_);
9205  }
9206  return (GetHashCore(raw_running_hash_) << String::kHashShift) |
9208  } else {
9209  return (length_ << String::kHashShift) | String::kIsNotArrayIndexMask;
9210  }
9211 }
9212 
9213 
9215  uint32_t seed,
9216  int* utf16_length_out) {
9217  int vector_length = chars.length();
9218  // Handle some edge cases
9219  if (vector_length <= 1) {
9220  ASSERT(vector_length == 0 ||
9221  static_cast<uint8_t>(chars.start()[0]) <=
9223  *utf16_length_out = vector_length;
9224  return HashSequentialString(chars.start(), vector_length, seed);
9225  }
9226  // Start with a fake length which won't affect computation.
9227  // It will be updated later.
9229  unsigned remaining = static_cast<unsigned>(vector_length);
9230  const uint8_t* stream = reinterpret_cast<const uint8_t*>(chars.start());
9231  int utf16_length = 0;
9232  bool is_index = true;
9233  ASSERT(hasher.is_array_index_);
9234  while (remaining > 0) {
9235  unsigned consumed = 0;
9236  uint32_t c = unibrow::Utf8::ValueOf(stream, remaining, &consumed);
9237  ASSERT(consumed > 0 && consumed <= remaining);
9238  stream += consumed;
9239  remaining -= consumed;
9240  bool is_two_characters = c > unibrow::Utf16::kMaxNonSurrogateCharCode;
9241  utf16_length += is_two_characters ? 2 : 1;
9242  // No need to keep hashing. But we do need to calculate utf16_length.
9243  if (utf16_length > String::kMaxHashCalcLength) continue;
9244  if (is_two_characters) {
9247  hasher.AddCharacter(c1);
9248  hasher.AddCharacter(c2);
9249  if (is_index) is_index = hasher.UpdateIndex(c1);
9250  if (is_index) is_index = hasher.UpdateIndex(c2);
9251  } else {
9252  hasher.AddCharacter(c);
9253  if (is_index) is_index = hasher.UpdateIndex(c);
9254  }
9255  }
9256  *utf16_length_out = static_cast<int>(utf16_length);
9257  // Must set length here so that hash computation is correct.
9258  hasher.length_ = utf16_length;
9259  return hasher.GetHashField();
9260 }
9261 
9262 
9263 void String::PrintOn(FILE* file) {
9264  int length = this->length();
9265  for (int i = 0; i < length; i++) {
9266  PrintF(file, "%c", Get(i));
9267  }
9268 }
9269 
9270 
9271 static void TrimEnumCache(Heap* heap, Map* map, DescriptorArray* descriptors) {
9272  int live_enum = map->EnumLength();
9273  if (live_enum == kInvalidEnumCacheSentinel) {
9275  }
9276  if (live_enum == 0) return descriptors->ClearEnumCache();
9277 
9278  FixedArray* enum_cache = descriptors->GetEnumCache();
9279 
9280  int to_trim = enum_cache->length() - live_enum;
9281  if (to_trim <= 0) return;
9282  RightTrimFixedArray<Heap::FROM_GC>(
9283  heap, descriptors->GetEnumCache(), to_trim);
9284 
9285  if (!descriptors->HasEnumIndicesCache()) return;
9286  FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache();
9287  RightTrimFixedArray<Heap::FROM_GC>(heap, enum_indices_cache, to_trim);
9288 }
9289 
9290 
9291 static void TrimDescriptorArray(Heap* heap,
9292  Map* map,
9293  DescriptorArray* descriptors,
9294  int number_of_own_descriptors) {
9295  int number_of_descriptors = descriptors->number_of_descriptors_storage();
9296  int to_trim = number_of_descriptors - number_of_own_descriptors;
9297  if (to_trim == 0) return;
9298 
9299  RightTrimFixedArray<Heap::FROM_GC>(
9300  heap, descriptors, to_trim * DescriptorArray::kDescriptorSize);
9301  descriptors->SetNumberOfDescriptors(number_of_own_descriptors);
9302 
9303  if (descriptors->HasEnumCache()) TrimEnumCache(heap, map, descriptors);
9304  descriptors->Sort();
9305 }
9306 
9307 
9308 // Clear a possible back pointer in case the transition leads to a dead map.
9309 // Return true in case a back pointer has been cleared and false otherwise.
9310 static bool ClearBackPointer(Heap* heap, Map* target) {
9311  if (Marking::MarkBitFrom(target).Get()) return false;
9312  target->SetBackPointer(heap->undefined_value(), SKIP_WRITE_BARRIER);
9313  return true;
9314 }
9315 
9316 
9317 // TODO(mstarzinger): This method should be moved into MarkCompactCollector,
9318 // because it cannot be called from outside the GC and we already have methods
9319 // depending on the transitions layout in the GC anyways.
9321  // If there are no transitions to be cleared, return.
9322  // TODO(verwaest) Should be an assert, otherwise back pointers are not
9323  // properly cleared.
9324  if (!HasTransitionArray()) return;
9325 
9326  TransitionArray* t = transitions();
9327  MarkCompactCollector* collector = heap->mark_compact_collector();
9328 
9329  int transition_index = 0;
9330 
9331  DescriptorArray* descriptors = instance_descriptors();
9332  bool descriptors_owner_died = false;
9333 
9334  // Compact all live descriptors to the left.
9335  for (int i = 0; i < t->number_of_transitions(); ++i) {
9336  Map* target = t->GetTarget(i);
9337  if (ClearBackPointer(heap, target)) {
9338  if (target->instance_descriptors() == descriptors) {
9339  descriptors_owner_died = true;
9340  }
9341  } else {
9342  if (i != transition_index) {
9343  Name* key = t->GetKey(i);
9344  t->SetKey(transition_index, key);
9345  Object** key_slot = t->GetKeySlot(transition_index);
9346  collector->RecordSlot(key_slot, key_slot, key);
9347  // Target slots do not need to be recorded since maps are not compacted.
9348  t->SetTarget(transition_index, t->GetTarget(i));
9349  }
9350  transition_index++;
9351  }
9352  }
9353 
9354  // If there are no transitions to be cleared, return.
9355  // TODO(verwaest) Should be an assert, otherwise back pointers are not
9356  // properly cleared.
9357  if (transition_index == t->number_of_transitions()) return;
9358 
9359  int number_of_own_descriptors = NumberOfOwnDescriptors();
9360 
9361  if (descriptors_owner_died) {
9362  if (number_of_own_descriptors > 0) {
9363  TrimDescriptorArray(heap, this, descriptors, number_of_own_descriptors);
9364  ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
9365  set_owns_descriptors(true);
9366  } else {
9367  ASSERT(descriptors == GetHeap()->empty_descriptor_array());
9368  }
9369  }
9370 
9371  int trim = t->number_of_transitions() - transition_index;
9372  if (trim > 0) {
9373  RightTrimFixedArray<Heap::FROM_GC>(heap, t, t->IsSimpleTransition()
9374  ? trim : trim * TransitionArray::kTransitionSize);
9375  }
9376 }
9377 
9378 
9379 int Map::Hash() {
9380  // For performance reasons we only hash the 3 most variable fields of a map:
9381  // constructor, prototype and bit_field2.
9382 
9383  // Shift away the tag.
9384  int hash = (static_cast<uint32_t>(
9385  reinterpret_cast<uintptr_t>(constructor())) >> 2);
9386 
9387  // XOR-ing the prototype and constructor directly yields too many zero bits
9388  // when the two pointers are close (which is fairly common).
9389  // To avoid this we shift the prototype 4 bits relatively to the constructor.
9390  hash ^= (static_cast<uint32_t>(
9391  reinterpret_cast<uintptr_t>(prototype())) << 2);
9392 
9393  return hash ^ (hash >> 16) ^ bit_field2();
9394 }
9395 
9396 
9397 static bool CheckEquivalent(Map* first, Map* second) {
9398  return
9399  first->constructor() == second->constructor() &&
9400  first->prototype() == second->prototype() &&
9401  first->instance_type() == second->instance_type() &&
9402  first->bit_field() == second->bit_field() &&
9403  first->bit_field2() == second->bit_field2() &&
9404  first->is_observed() == second->is_observed() &&
9405  first->function_with_prototype() == second->function_with_prototype();
9406 }
9407 
9408 
9410  return CheckEquivalent(this, other);
9411 }
9412 
9413 
9416  int properties = mode == CLEAR_INOBJECT_PROPERTIES
9417  ? 0 : other->inobject_properties();
9418  return CheckEquivalent(this, other) && inobject_properties() == properties;
9419 }
9420 
9421 
9423  for (int i = 0; i < count_of_code_ptr_entries(); i++) {
9424  int index = first_code_ptr_index() + i;
9425  v->VisitCodeEntry(reinterpret_cast<Address>(RawFieldOfElementAt(index)));
9426  }
9427  for (int i = 0; i < count_of_heap_ptr_entries(); i++) {
9428  int index = first_heap_ptr_index() + i;
9429  v->VisitPointer(RawFieldOfElementAt(index));
9430  }
9431 }
9432 
9433 
9434 void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) {
9435  // Iterate over all fields in the body but take care in dealing with
9436  // the code entry.
9438  v->VisitCodeEntry(this->address() + kCodeEntryOffset);
9439  IteratePointers(v, kCodeEntryOffset + kPointerSize, object_size);
9440 }
9441 
9442 
9444  ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
9445  ASSERT(!IsOptimized());
9446  ASSERT(shared()->allows_lazy_compilation() ||
9447  code()->optimizable());
9448  ASSERT(!shared()->is_generator());
9450  GetIsolate()->builtins()->builtin(Builtins::kCompileOptimized));
9451  // No write barrier required, since the builtin is part of the root set.
9452 }
9453 
9454 
9456  ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
9457  ASSERT(!IsOptimized());
9458  ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
9459  ASSERT(!shared()->is_generator());
9460  ASSERT(GetIsolate()->concurrent_recompilation_enabled());
9461  if (FLAG_trace_concurrent_recompilation) {
9462  PrintF(" ** Marking ");
9463  PrintName();
9464  PrintF(" for concurrent recompilation.\n");
9465  }
9467  GetIsolate()->builtins()->builtin(Builtins::kCompileOptimizedConcurrent));
9468  // No write barrier required, since the builtin is part of the root set.
9469 }
9470 
9471 
9473  // We can only arrive here via the concurrent-recompilation builtin. If
9474  // break points were set, the code would point to the lazy-compile builtin.
9475  ASSERT(!GetIsolate()->DebuggerHasBreakPoints());
9477  ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
9478  ASSERT(GetIsolate()->concurrent_recompilation_enabled());
9479  if (FLAG_trace_concurrent_recompilation) {
9480  PrintF(" ** Queueing ");
9481  PrintName();
9482  PrintF(" for concurrent recompilation.\n");
9483  }
9485  GetIsolate()->builtins()->builtin(Builtins::kInOptimizationQueue));
9486  // No write barrier required, since the builtin is part of the root set.
9487 }
9488 
9489 
9492  Handle<Context> native_context,
9493  Handle<Code> code,
9495  BailoutId osr_ast_id) {
9497  shared->GetIsolate(),
9498  shared->AddToOptimizedCodeMap(
9499  *native_context, *code, *literals, osr_ast_id));
9500 }
9501 
9502 
9504  Code* code,
9506  BailoutId osr_ast_id) {
9507  ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
9508  ASSERT(native_context->IsNativeContext());
9509  STATIC_ASSERT(kEntryLength == 4);
9510  Heap* heap = GetHeap();
9511  FixedArray* new_code_map;
9512  Object* value = optimized_code_map();
9513  Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
9514  if (value->IsSmi()) {
9515  // No optimized code map.
9516  ASSERT_EQ(0, Smi::cast(value)->value());
9517  // Create 3 entries per context {context, code, literals}.
9518  MaybeObject* maybe = heap->AllocateFixedArray(kInitialLength);
9519  if (!maybe->To(&new_code_map)) return maybe;
9520  new_code_map->set(kEntriesStart + kContextOffset, native_context);
9521  new_code_map->set(kEntriesStart + kCachedCodeOffset, code);
9522  new_code_map->set(kEntriesStart + kLiteralsOffset, literals);
9523  new_code_map->set(kEntriesStart + kOsrAstIdOffset, osr_ast_id_smi);
9524  } else {
9525  // Copy old map and append one new entry.
9526  FixedArray* old_code_map = FixedArray::cast(value);
9527  ASSERT_EQ(-1, SearchOptimizedCodeMap(native_context, osr_ast_id));
9528  int old_length = old_code_map->length();
9529  int new_length = old_length + kEntryLength;
9530  MaybeObject* maybe = old_code_map->CopySize(new_length);
9531  if (!maybe->To(&new_code_map)) return maybe;
9532  new_code_map->set(old_length + kContextOffset, native_context);
9533  new_code_map->set(old_length + kCachedCodeOffset, code);
9534  new_code_map->set(old_length + kLiteralsOffset, literals);
9535  new_code_map->set(old_length + kOsrAstIdOffset, osr_ast_id_smi);
9536  // Zap the old map for the sake of the heap verifier.
9537  if (Heap::ShouldZapGarbage()) {
9538  Object** data = old_code_map->data_start();
9539  MemsetPointer(data, heap->the_hole_value(), old_length);
9540  }
9541  }
9542 #ifdef DEBUG
9543  for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
9544  ASSERT(new_code_map->get(i + kContextOffset)->IsNativeContext());
9545  ASSERT(new_code_map->get(i + kCachedCodeOffset)->IsCode());
9546  ASSERT(Code::cast(new_code_map->get(i + kCachedCodeOffset))->kind() ==
9547  Code::OPTIMIZED_FUNCTION);
9548  ASSERT(new_code_map->get(i + kLiteralsOffset)->IsFixedArray());
9549  ASSERT(new_code_map->get(i + kOsrAstIdOffset)->IsSmi());
9550  }
9551 #endif
9552  set_optimized_code_map(new_code_map);
9553  return new_code_map;
9554 }
9555 
9556 
9558  ASSERT(index > kEntriesStart);
9559  FixedArray* code_map = FixedArray::cast(optimized_code_map());
9560  if (!bound()) {
9561  FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1));
9562  ASSERT_NE(NULL, cached_literals);
9563  return cached_literals;
9564  }
9565  return NULL;
9566 }
9567 
9568 
9570  ASSERT(index > kEntriesStart);
9571  FixedArray* code_map = FixedArray::cast(optimized_code_map());
9572  Code* code = Code::cast(code_map->get(index));
9573  ASSERT_NE(NULL, code);
9574  return code;
9575 }
9576 
9577 
9579  FixedArray* code_map = FixedArray::cast(optimized_code_map());
9580 
9581  // If the next map link slot is already used then the function was
9582  // enqueued with code flushing and we remove it now.
9583  if (!code_map->get(kNextMapIndex)->IsUndefined()) {
9585  flusher->EvictOptimizedCodeMap(this);
9586  }
9587 
9588  ASSERT(code_map->get(kNextMapIndex)->IsUndefined());
9589  set_optimized_code_map(Smi::FromInt(0));
9590 }
9591 
9592 
9594  const char* reason) {
9595  if (optimized_code_map()->IsSmi()) return;
9596 
9597  FixedArray* code_map = FixedArray::cast(optimized_code_map());
9598  int dst = kEntriesStart;
9599  int length = code_map->length();
9600  for (int src = kEntriesStart; src < length; src += kEntryLength) {
9601  ASSERT(code_map->get(src)->IsNativeContext());
9602  if (Code::cast(code_map->get(src + kCachedCodeOffset)) == optimized_code) {
9603  // Evict the src entry by not copying it to the dst entry.
9604  if (FLAG_trace_opt) {
9605  PrintF("[evicting entry from optimizing code map (%s) for ", reason);
9606  ShortPrint();
9607  BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value());
9608  if (osr.IsNone()) {
9609  PrintF("]\n");
9610  } else {
9611  PrintF(" (osr ast id %d)]\n", osr.ToInt());
9612  }
9613  }
9614  } else {
9615  // Keep the src entry by copying it to the dst entry.
9616  if (dst != src) {
9617  code_map->set(dst + kContextOffset,
9618  code_map->get(src + kContextOffset));
9619  code_map->set(dst + kCachedCodeOffset,
9620  code_map->get(src + kCachedCodeOffset));
9621  code_map->set(dst + kLiteralsOffset,
9622  code_map->get(src + kLiteralsOffset));
9623  code_map->set(dst + kOsrAstIdOffset,
9624  code_map->get(src + kOsrAstIdOffset));
9625  }
9626  dst += kEntryLength;
9627  }
9628  }
9629  if (dst != length) {
9630  // Always trim even when array is cleared because of heap verifier.
9631  RightTrimFixedArray<Heap::FROM_MUTATOR>(GetHeap(), code_map, length - dst);
9632  if (code_map->length() == kEntriesStart) ClearOptimizedCodeMap();
9633  }
9634 }
9635 
9636 
9638  FixedArray* code_map = FixedArray::cast(optimized_code_map());
9639  ASSERT(shrink_by % kEntryLength == 0);
9640  ASSERT(shrink_by <= code_map->length() - kEntriesStart);
9641  // Always trim even when array is cleared because of heap verifier.
9642  RightTrimFixedArray<Heap::FROM_GC>(GetHeap(), code_map, shrink_by);
9643  if (code_map->length() == kEntriesStart) {
9645  }
9646 }
9647 
9648 
9650  if (object->IsGlobalObject()) return;
9651 
9652  // Make sure prototypes are fast objects and their maps have the bit set
9653  // so they remain fast.
9654  if (!object->HasFastProperties()) {
9655  TransformToFastProperties(object, 0);
9656  }
9657 }
9658 
9659 
9660 static MUST_USE_RESULT MaybeObject* CacheInitialJSArrayMaps(
9661  Context* native_context, Map* initial_map) {
9662  // Replace all of the cached initial array maps in the native context with
9663  // the appropriate transitioned elements kind maps.
9664  Heap* heap = native_context->GetHeap();
9665  MaybeObject* maybe_maps =
9667  FixedArray* maps;
9668  if (!maybe_maps->To(&maps)) return maybe_maps;
9669 
9670  Map* current_map = initial_map;
9671  ElementsKind kind = current_map->elements_kind();
9673  maps->set(kind, current_map);
9674  for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1;
9675  i < kFastElementsKindCount; ++i) {
9676  Map* new_map;
9678  if (current_map->HasElementsTransition()) {
9679  new_map = current_map->elements_transition_map();
9680  ASSERT(new_map->elements_kind() == next_kind);
9681  } else {
9682  MaybeObject* maybe_new_map =
9683  current_map->CopyAsElementsKind(next_kind, INSERT_TRANSITION);
9684  if (!maybe_new_map->To(&new_map)) return maybe_new_map;
9685  }
9686  maps->set(next_kind, new_map);
9687  current_map = new_map;
9688  }
9689  native_context->set_js_array_maps(maps);
9690  return initial_map;
9691 }
9692 
9693 
9694 Handle<Object> CacheInitialJSArrayMaps(Handle<Context> native_context,
9695  Handle<Map> initial_map) {
9696  CALL_HEAP_FUNCTION(native_context->GetIsolate(),
9697  CacheInitialJSArrayMaps(*native_context, *initial_map),
9698  Object);
9699 }
9700 
9701 
9703  Handle<Object> value) {
9704  ASSERT(value->IsJSReceiver());
9705 
9706  // First some logic for the map of the prototype to make sure it is in fast
9707  // mode.
9708  if (value->IsJSObject()) {
9710  }
9711 
9712  // Now some logic for the maps of the objects that are created by using this
9713  // function as a constructor.
9714  if (function->has_initial_map()) {
9715  // If the function has allocated the initial map replace it with a
9716  // copy containing the new prototype. Also complete any in-object
9717  // slack tracking that is in progress at this point because it is
9718  // still tracking the old copy.
9719  if (function->shared()->IsInobjectSlackTrackingInProgress()) {
9720  function->shared()->CompleteInobjectSlackTracking();
9721  }
9722  Handle<Map> new_map = Map::Copy(handle(function->initial_map()));
9723  new_map->set_prototype(*value);
9724 
9725  // If the function is used as the global Array function, cache the
9726  // initial map (and transitioned versions) in the native context.
9727  Context* native_context = function->context()->native_context();
9728  Object* array_function = native_context->get(Context::ARRAY_FUNCTION_INDEX);
9729  if (array_function->IsJSFunction() &&
9730  *function == JSFunction::cast(array_function)) {
9731  CacheInitialJSArrayMaps(handle(native_context), new_map);
9732  }
9733 
9734  function->set_initial_map(*new_map);
9735  } else {
9736  // Put the value in the initial map field until an initial map is
9737  // needed. At that point, a new initial map is created and the
9738  // prototype is put into the initial map where it belongs.
9739  function->set_prototype_or_initial_map(*value);
9740  }
9741  function->GetHeap()->ClearInstanceofCache();
9742 }
9743 
9744 
9746  Handle<Object> value) {
9747  ASSERT(function->should_have_prototype());
9748  Handle<Object> construct_prototype = value;
9749 
9750  // If the value is not a JSReceiver, store the value in the map's
9751  // constructor field so it can be accessed. Also, set the prototype
9752  // used for constructing objects to the original object prototype.
9753  // See ECMA-262 13.2.2.
9754  if (!value->IsJSReceiver()) {
9755  // Copy the map so this does not affect unrelated functions.
9756  // Remove map transitions because they point to maps with a
9757  // different prototype.
9758  Handle<Map> new_map = Map::Copy(handle(function->map()));
9759 
9760  JSObject::MigrateToMap(function, new_map);
9761  new_map->set_constructor(*value);
9762  new_map->set_non_instance_prototype(true);
9763  Isolate* isolate = new_map->GetIsolate();
9764  construct_prototype = handle(
9765  isolate->context()->native_context()->initial_object_prototype(),
9766  isolate);
9767  } else {
9768  function->map()->set_non_instance_prototype(false);
9769  }
9770 
9771  return SetInstancePrototype(function, construct_prototype);
9772 }
9773 
9774 
9776  Context* native_context = context()->native_context();
9777  Map* no_prototype_map = shared()->strict_mode() == SLOPPY
9778  ? native_context->sloppy_function_without_prototype_map()
9779  : native_context->strict_function_without_prototype_map();
9780 
9781  if (map() == no_prototype_map) return;
9782 
9783  ASSERT(map() == (shared()->strict_mode() == SLOPPY
9784  ? native_context->sloppy_function_map()
9785  : native_context->strict_function_map()));
9786 
9787  set_map(no_prototype_map);
9788  set_prototype_or_initial_map(no_prototype_map->GetHeap()->the_hole_value());
9789 }
9790 
9791 
9793  if (function->has_initial_map()) return;
9794  Isolate* isolate = function->GetIsolate();
9795 
9796  // First create a new map with the size and number of in-object properties
9797  // suggested by the function.
9798  InstanceType instance_type;
9799  int instance_size;
9800  int in_object_properties;
9801  if (function->shared()->is_generator()) {
9802  instance_type = JS_GENERATOR_OBJECT_TYPE;
9803  instance_size = JSGeneratorObject::kSize;
9804  in_object_properties = 0;
9805  } else {
9806  instance_type = JS_OBJECT_TYPE;
9807  instance_size = function->shared()->CalculateInstanceSize();
9808  in_object_properties = function->shared()->CalculateInObjectProperties();
9809  }
9810  Handle<Map> map = isolate->factory()->NewMap(instance_type, instance_size);
9811 
9812  // Fetch or allocate prototype.
9814  if (function->has_instance_prototype()) {
9815  prototype = handle(function->instance_prototype(), isolate);
9816  } else {
9817  prototype = isolate->factory()->NewFunctionPrototype(function);
9818  }
9819  map->set_inobject_properties(in_object_properties);
9820  map->set_unused_property_fields(in_object_properties);
9821  map->set_prototype(*prototype);
9822  ASSERT(map->has_fast_object_elements());
9823 
9824  if (!function->shared()->is_generator()) {
9825  function->shared()->StartInobjectSlackTracking(*map);
9826  }
9827 
9828  // Finally link initial map and constructor function.
9829  function->set_initial_map(*map);
9830  map->set_constructor(*function);
9831 }
9832 
9833 
9835  shared()->set_instance_class_name(name);
9836 }
9837 
9838 
9839 void JSFunction::PrintName(FILE* out) {
9840  SmartArrayPointer<char> name = shared()->DebugName()->ToCString();
9841  PrintF(out, "%s", name.get());
9842 }
9843 
9844 
9847 }
9848 
9849 
9850 // The filter is a pattern that matches function names in this way:
9851 // "*" all; the default
9852 // "-" all but the top-level function
9853 // "-name" all but the function "name"
9854 // "" only the top-level function
9855 // "name" only the function "name"
9856 // "name*" only functions starting with "name"
9857 bool JSFunction::PassesFilter(const char* raw_filter) {
9858  if (*raw_filter == '*') return true;
9859  String* name = shared()->DebugName();
9860  Vector<const char> filter = CStrVector(raw_filter);
9861  if (filter.length() == 0) return name->length() == 0;
9862  if (filter[0] == '-') {
9863  // Negative filter.
9864  if (filter.length() == 1) {
9865  return (name->length() != 0);
9866  } else if (name->IsUtf8EqualTo(filter.SubVector(1, filter.length()))) {
9867  return false;
9868  }
9869  if (filter[filter.length() - 1] == '*' &&
9870  name->IsUtf8EqualTo(filter.SubVector(1, filter.length() - 1), true)) {
9871  return false;
9872  }
9873  return true;
9874 
9875  } else if (name->IsUtf8EqualTo(filter)) {
9876  return true;
9877  }
9878  if (filter[filter.length() - 1] == '*' &&
9879  name->IsUtf8EqualTo(filter.SubVector(0, filter.length() - 1), true)) {
9880  return true;
9881  }
9882  return false;
9883 }
9884 
9885 
9886 MaybeObject* Oddball::Initialize(Heap* heap,
9887  const char* to_string,
9888  Object* to_number,
9889  byte kind) {
9890  String* internalized_to_string;
9891  { MaybeObject* maybe_string =
9892  heap->InternalizeUtf8String(
9893  CStrVector(to_string));
9894  if (!maybe_string->To(&internalized_to_string)) return maybe_string;
9895  }
9896  set_to_string(internalized_to_string);
9897  set_to_number(to_number);
9898  set_kind(kind);
9899  return this;
9900 }
9901 
9902 
9904  Object* n = name();
9905  if (!n->IsString() || String::cast(n)->length() == 0) return inferred_name();
9906  return String::cast(n);
9907 }
9908 
9909 
9911  return !script()->IsUndefined() &&
9912  !reinterpret_cast<Script*>(script())->source()->IsUndefined();
9913 }
9914 
9915 
9917  if (!HasSourceCode()) return GetIsolate()->factory()->undefined_value();
9918  Handle<String> source(String::cast(Script::cast(script())->source()));
9919  return GetIsolate()->factory()->NewSubString(
9920  source, start_position(), end_position());
9921 }
9922 
9923 
9925  // Check that the function has a script associated with it.
9926  if (!script()->IsScript()) return false;
9927  if (optimization_disabled()) return false;
9928  // If we never ran this (unlikely) then lets try to optimize it.
9929  if (code()->kind() != Code::FUNCTION) return true;
9930  return code()->optimizable();
9931 }
9932 
9933 
9935  return end_position() - start_position();
9936 }
9937 
9938 
9940  int instance_size =
9943  if (instance_size > JSObject::kMaxInstanceSize) {
9944  instance_size = JSObject::kMaxInstanceSize;
9945  }
9946  return instance_size;
9947 }
9948 
9949 
9952 }
9953 
9954 
9955 // Support function for printing the source code to a StringStream
9956 // without any allocation in the heap.
9958  int max_length) {
9959  // For some native functions there is no source.
9960  if (!HasSourceCode()) {
9961  accumulator->Add("<No Source>");
9962  return;
9963  }
9964 
9965  // Get the source for the script which this function came from.
9966  // Don't use String::cast because we don't want more assertion errors while
9967  // we are already creating a stack dump.
9968  String* script_source =
9969  reinterpret_cast<String*>(Script::cast(script())->source());
9970 
9971  if (!script_source->LooksValid()) {
9972  accumulator->Add("<Invalid Source>");
9973  return;
9974  }
9975 
9976  if (!is_toplevel()) {
9977  accumulator->Add("function ");
9978  Object* name = this->name();
9979  if (name->IsString() && String::cast(name)->length() > 0) {
9980  accumulator->PrintName(name);
9981  }
9982  }
9983 
9984  int len = end_position() - start_position();
9985  if (len <= max_length || max_length < 0) {
9986  accumulator->Put(script_source, start_position(), end_position());
9987  } else {
9988  accumulator->Put(script_source,
9989  start_position(),
9990  start_position() + max_length);
9991  accumulator->Add("...\n");
9992  }
9993 }
9994 
9995 
9996 static bool IsCodeEquivalent(Code* code, Code* recompiled) {
9997  if (code->instruction_size() != recompiled->instruction_size()) return false;
9998  ByteArray* code_relocation = code->relocation_info();
9999  ByteArray* recompiled_relocation = recompiled->relocation_info();
10000  int length = code_relocation->length();
10001  if (length != recompiled_relocation->length()) return false;
10002  int compare = memcmp(code_relocation->GetDataStartAddress(),
10003  recompiled_relocation->GetDataStartAddress(),
10004  length);
10005  return compare == 0;
10006 }
10007 
10008 
10011  DisallowHeapAllocation no_allocation;
10012  Code* code = this->code();
10013  if (IsCodeEquivalent(code, recompiled)) {
10014  // Copy the deoptimization data from the recompiled code.
10015  code->set_deoptimization_data(recompiled->deoptimization_data());
10016  code->set_has_deoptimization_support(true);
10017  } else {
10018  // TODO(3025757): In case the recompiled isn't equivalent to the
10019  // old code, we have to replace it. We should try to avoid this
10020  // altogether because it flushes valuable type feedback by
10021  // effectively resetting all IC state.
10022  ReplaceCode(recompiled);
10023  }
10025 }
10026 
10027 
10029  // Disable optimization for the shared function info and mark the
10030  // code as non-optimizable. The marker on the shared function info
10031  // is there because we flush non-optimized code thereby loosing the
10032  // non-optimizable information for the code. When the code is
10033  // regenerated and set on the shared function info it is marked as
10034  // non-optimizable if optimization is disabled for the shared
10035  // function info.
10036  set_optimization_disabled(true);
10037  set_bailout_reason(reason);
10038  // Code should be the lazy compilation stub or else unoptimized. If the
10039  // latter, disable optimization for the code too.
10040  ASSERT(code()->kind() == Code::FUNCTION || code()->kind() == Code::BUILTIN);
10041  if (code()->kind() == Code::FUNCTION) {
10042  code()->set_optimizable(false);
10043  }
10044  PROFILE(GetIsolate(),
10045  LogExistingFunction(Handle<SharedFunctionInfo>(this),
10046  Handle<Code>(code())));
10047  if (FLAG_trace_opt) {
10048  PrintF("[disabled optimization for ");
10049  ShortPrint();
10050  PrintF(", reason: %s]\n", GetBailoutReason(reason));
10051  }
10052 }
10053 
10054 
10056  ASSERT(!id.IsNone());
10057  Code* unoptimized = code();
10058  DeoptimizationOutputData* data =
10059  DeoptimizationOutputData::cast(unoptimized->deoptimization_data());
10060  unsigned ignore = Deoptimizer::GetOutputInfo(data, id, this);
10061  USE(ignore);
10062  return true; // Return true if there was no ASSERT.
10063 }
10064 
10065 
10068 
10069  if (!FLAG_clever_optimizations) return;
10070 
10071  // Only initiate the tracking the first time.
10072  if (live_objects_may_exist()) return;
10073  set_live_objects_may_exist(true);
10074 
10075  // No tracking during the snapshot construction phase.
10076  if (Serializer::enabled()) return;
10077 
10078  if (map->unused_property_fields() == 0) return;
10079 
10080  // Nonzero counter is a leftover from the previous attempt interrupted
10081  // by GC, keep it.
10082  if (construction_count() == 0) {
10084  }
10085  set_initial_map(map);
10086  Builtins* builtins = map->GetHeap()->isolate()->builtins();
10087  ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubGeneric),
10088  construct_stub());
10089  set_construct_stub(builtins->builtin(Builtins::kJSConstructStubCountdown));
10090 }
10091 
10092 
10093 // Called from GC, hence reinterpret_cast and unchecked accessors.
10095  Map* map = reinterpret_cast<Map*>(initial_map());
10096 
10097  // Make the map remember to restore the link if it survives the GC.
10098  map->set_bit_field2(
10100 
10101  // Undo state changes made by StartInobjectTracking (except the
10102  // construction_count). This way if the initial map does not survive the GC
10103  // then StartInobjectTracking will be called again the next time the
10104  // constructor is called. The countdown will continue and (possibly after
10105  // several more GCs) CompleteInobjectSlackTracking will eventually be called.
10106  Heap* heap = map->GetHeap();
10107  set_initial_map(heap->undefined_value());
10108  Builtins* builtins = heap->isolate()->builtins();
10109  ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubCountdown),
10110  *RawField(this, kConstructStubOffset));
10111  set_construct_stub(builtins->builtin(Builtins::kJSConstructStubGeneric));
10112  // It is safe to clear the flag: it will be set again if the map is live.
10113  set_live_objects_may_exist(false);
10114 }
10115 
10116 
10117 // Called from GC, hence reinterpret_cast and unchecked accessors.
10119  map->set_bit_field2(
10121 
10122  // Resume inobject slack tracking.
10123  set_initial_map(map);
10124  Builtins* builtins = map->GetHeap()->isolate()->builtins();
10125  ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubGeneric),
10126  *RawField(this, kConstructStubOffset));
10127  set_construct_stub(builtins->builtin(Builtins::kJSConstructStubCountdown));
10128  // The map survived the gc, so there may be objects referencing it.
10129  set_live_objects_may_exist(true);
10130 }
10131 
10132 
10134  code()->ClearInlineCaches();
10135  set_ic_age(new_ic_age);
10136  if (code()->kind() == Code::FUNCTION) {
10137  code()->set_profiler_ticks(0);
10138  if (optimization_disabled() &&
10139  opt_count() >= FLAG_max_opt_count) {
10140  // Re-enable optimizations if they were disabled due to opt_count limit.
10141  set_optimization_disabled(false);
10142  code()->set_optimizable(true);
10143  }
10144  set_opt_count(0);
10145  set_deopt_count(0);
10146  }
10147 }
10148 
10149 
10150 static void GetMinInobjectSlack(Map* map, void* data) {
10151  int slack = map->unused_property_fields();
10152  if (*reinterpret_cast<int*>(data) > slack) {
10153  *reinterpret_cast<int*>(data) = slack;
10154  }
10155 }
10156 
10157 
10158 static void ShrinkInstanceSize(Map* map, void* data) {
10159  int slack = *reinterpret_cast<int*>(data);
10160  map->set_inobject_properties(map->inobject_properties() - slack);
10161  map->set_unused_property_fields(map->unused_property_fields() - slack);
10162  map->set_instance_size(map->instance_size() - slack * kPointerSize);
10163 
10164  // Visitor id might depend on the instance size, recalculate it.
10165  map->set_visitor_id(StaticVisitorBase::GetVisitorId(map));
10166 }
10167 
10168 
10170  ASSERT(live_objects_may_exist() && IsInobjectSlackTrackingInProgress());
10171  Map* map = Map::cast(initial_map());
10172 
10173  Heap* heap = map->GetHeap();
10174  set_initial_map(heap->undefined_value());
10175  Builtins* builtins = heap->isolate()->builtins();
10176  ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubCountdown),
10177  construct_stub());
10178  set_construct_stub(builtins->builtin(Builtins::kJSConstructStubGeneric));
10179 
10180  int slack = map->unused_property_fields();
10181  map->TraverseTransitionTree(&GetMinInobjectSlack, &slack);
10182  if (slack != 0) {
10183  // Resize the initial map and all maps in its transition tree.
10184  map->TraverseTransitionTree(&ShrinkInstanceSize, &slack);
10185 
10186  // Give the correct expected_nof_properties to initial maps created later.
10187  ASSERT(expected_nof_properties() >= slack);
10189  }
10190 }
10191 
10192 
10194  BailoutId osr_ast_id) {
10195  ASSERT(native_context->IsNativeContext());
10196  if (!FLAG_cache_optimized_code) return -1;
10197  Object* value = optimized_code_map();
10198  if (!value->IsSmi()) {
10199  FixedArray* optimized_code_map = FixedArray::cast(value);
10200  int length = optimized_code_map->length();
10201  Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
10202  for (int i = kEntriesStart; i < length; i += kEntryLength) {
10203  if (optimized_code_map->get(i + kContextOffset) == native_context &&
10204  optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) {
10205  return i + kCachedCodeOffset;
10206  }
10207  }
10208  if (FLAG_trace_opt) {
10209  PrintF("[didn't find optimized code in optimized code map for ");
10210  ShortPrint();
10211  PrintF("]\n");
10212  }
10213  }
10214  return -1;
10215 }
10216 
10217 
10218 #define DECLARE_TAG(ignore1, name, ignore2) name,
10219 const char* const VisitorSynchronization::kTags[
10222 };
10223 #undef DECLARE_TAG
10224 
10225 
10226 #define DECLARE_TAG(ignore1, ignore2, name) name,
10227 const char* const VisitorSynchronization::kTagNames[
10230 };
10231 #undef DECLARE_TAG
10232 
10233 
10234 void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
10235  ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
10236  Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
10237  Object* old_target = target;
10238  VisitPointer(&target);
10239  CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
10240 }
10241 
10242 
10243 void ObjectVisitor::VisitCodeAgeSequence(RelocInfo* rinfo) {
10244  ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
10245  Object* stub = rinfo->code_age_stub();
10246  if (stub) {
10247  VisitPointer(&stub);
10248  }
10249 }
10250 
10251 
10252 void ObjectVisitor::VisitCodeEntry(Address entry_address) {
10253  Object* code = Code::GetObjectFromEntryAddress(entry_address);
10254  Object* old_code = code;
10255  VisitPointer(&code);
10256  if (code != old_code) {
10257  Memory::Address_at(entry_address) = reinterpret_cast<Code*>(code)->entry();
10258  }
10259 }
10260 
10261 
10262 void ObjectVisitor::VisitCell(RelocInfo* rinfo) {
10263  ASSERT(rinfo->rmode() == RelocInfo::CELL);
10264  Object* cell = rinfo->target_cell();
10265  Object* old_cell = cell;
10266  VisitPointer(&cell);
10267  if (cell != old_cell) {
10268  rinfo->set_target_cell(reinterpret_cast<Cell*>(cell));
10269  }
10270 }
10271 
10272 
10273 void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) {
10274  ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
10275  rinfo->IsPatchedReturnSequence()) ||
10276  (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
10277  rinfo->IsPatchedDebugBreakSlotSequence()));
10278  Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
10279  Object* old_target = target;
10280  VisitPointer(&target);
10281  CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
10282 }
10283 
10284 
10285 void ObjectVisitor::VisitEmbeddedPointer(RelocInfo* rinfo) {
10286  ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
10287  Object* p = rinfo->target_object();
10288  VisitPointer(&p);
10289 }
10290 
10291 
10292 void ObjectVisitor::VisitExternalReference(RelocInfo* rinfo) {
10293  Address p = rinfo->target_reference();
10294  VisitExternalReference(&p);
10295 }
10296 
10297 
10299  set_relocation_info(GetHeap()->empty_byte_array());
10300 }
10301 
10302 
10304  Object* undefined = GetHeap()->undefined_value();
10305  Cell* undefined_cell = GetHeap()->undefined_cell();
10306  int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
10307  RelocInfo::ModeMask(RelocInfo::CELL);
10308  for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
10309  RelocInfo::Mode mode = it.rinfo()->rmode();
10310  if (mode == RelocInfo::EMBEDDED_OBJECT) {
10311  it.rinfo()->set_target_object(undefined, SKIP_WRITE_BARRIER);
10312  } else if (mode == RelocInfo::CELL) {
10313  it.rinfo()->set_target_cell(undefined_cell, SKIP_WRITE_BARRIER);
10314  }
10315  }
10316 }
10317 
10318 
10319 void Code::Relocate(intptr_t delta) {
10320  for (RelocIterator it(this, RelocInfo::kApplyMask); !it.done(); it.next()) {
10321  it.rinfo()->apply(delta);
10322  }
10323  CPU::FlushICache(instruction_start(), instruction_size());
10324 }
10325 
10326 
10327 void Code::CopyFrom(const CodeDesc& desc) {
10328  ASSERT(Marking::Color(this) == Marking::WHITE_OBJECT);
10329 
10330  // copy code
10332  static_cast<size_t>(desc.instr_size));
10333 
10334  // copy reloc info
10336  desc.buffer + desc.buffer_size - desc.reloc_size,
10337  static_cast<size_t>(desc.reloc_size));
10338 
10339  // unbox handles and relocate
10340  intptr_t delta = instruction_start() - desc.buffer;
10341  int mode_mask = RelocInfo::kCodeTargetMask |
10342  RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
10343  RelocInfo::ModeMask(RelocInfo::CELL) |
10344  RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
10345  RelocInfo::kApplyMask;
10346  // Needed to find target_object and runtime_entry on X64
10347  Assembler* origin = desc.origin;
10348  AllowDeferredHandleDereference embedding_raw_address;
10349  for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
10350  RelocInfo::Mode mode = it.rinfo()->rmode();
10351  if (mode == RelocInfo::EMBEDDED_OBJECT) {
10352  Handle<Object> p = it.rinfo()->target_object_handle(origin);
10353  it.rinfo()->set_target_object(*p, SKIP_WRITE_BARRIER);
10354  } else if (mode == RelocInfo::CELL) {
10355  Handle<Cell> cell = it.rinfo()->target_cell_handle();
10356  it.rinfo()->set_target_cell(*cell, SKIP_WRITE_BARRIER);
10357  } else if (RelocInfo::IsCodeTarget(mode)) {
10358  // rewrite code handles in inline cache targets to direct
10359  // pointers to the first instruction in the code object
10360  Handle<Object> p = it.rinfo()->target_object_handle(origin);
10361  Code* code = Code::cast(*p);
10362  it.rinfo()->set_target_address(code->instruction_start(),
10364  } else if (RelocInfo::IsRuntimeEntry(mode)) {
10365  Address p = it.rinfo()->target_runtime_entry(origin);
10366  it.rinfo()->set_target_runtime_entry(p, SKIP_WRITE_BARRIER);
10367  } else if (mode == RelocInfo::CODE_AGE_SEQUENCE) {
10368  Handle<Object> p = it.rinfo()->code_age_stub_handle(origin);
10369  Code* code = Code::cast(*p);
10370  it.rinfo()->set_code_age_stub(code);
10371  } else {
10372  it.rinfo()->apply(delta);
10373  }
10374  }
10375  CPU::FlushICache(instruction_start(), instruction_size());
10376 }
10377 
10378 
10379 // Locate the source position which is closest to the address in the code. This
10380 // is using the source position information embedded in the relocation info.
10381 // The position returned is relative to the beginning of the script where the
10382 // source for this function is found.
10384  int distance = kMaxInt;
10385  int position = RelocInfo::kNoPosition; // Initially no position found.
10386  // Run through all the relocation info to find the best matching source
10387  // position. All the code needs to be considered as the sequence of the
10388  // instructions in the code does not necessarily follow the same order as the
10389  // source.
10390  RelocIterator it(this, RelocInfo::kPositionMask);
10391  while (!it.done()) {
10392  // Only look at positions after the current pc.
10393  if (it.rinfo()->pc() < pc) {
10394  // Get position and distance.
10395 
10396  int dist = static_cast<int>(pc - it.rinfo()->pc());
10397  int pos = static_cast<int>(it.rinfo()->data());
10398  // If this position is closer than the current candidate or if it has the
10399  // same distance as the current candidate and the position is higher then
10400  // this position is the new candidate.
10401  if ((dist < distance) ||
10402  (dist == distance && pos > position)) {
10403  position = pos;
10404  distance = dist;
10405  }
10406  }
10407  it.next();
10408  }
10409  return position;
10410 }
10411 
10412 
10413 // Same as Code::SourcePosition above except it only looks for statement
10414 // positions.
10416  // First find the position as close as possible using all position
10417  // information.
10418  int position = SourcePosition(pc);
10419  // Now find the closest statement position before the position.
10420  int statement_position = 0;
10421  RelocIterator it(this, RelocInfo::kPositionMask);
10422  while (!it.done()) {
10423  if (RelocInfo::IsStatementPosition(it.rinfo()->rmode())) {
10424  int p = static_cast<int>(it.rinfo()->data());
10425  if (statement_position < p && p <= position) {
10426  statement_position = p;
10427  }
10428  }
10429  it.next();
10430  }
10431  return statement_position;
10432 }
10433 
10434 
10436  SafepointTable table(this);
10437  return table.FindEntry(pc);
10438 }
10439 
10440 
10441 Object* Code::FindNthObject(int n, Map* match_map) {
10443  DisallowHeapAllocation no_allocation;
10444  int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10445  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10446  RelocInfo* info = it.rinfo();
10447  Object* object = info->target_object();
10448  if (object->IsHeapObject()) {
10449  if (HeapObject::cast(object)->map() == match_map) {
10450  if (--n == 0) return object;
10451  }
10452  }
10453  }
10454  return NULL;
10455 }
10456 
10457 
10459  Object* result = FindNthObject(1, GetHeap()->allocation_site_map());
10460  return (result != NULL) ? AllocationSite::cast(result) : NULL;
10461 }
10462 
10463 
10465  Object* result = FindNthObject(1, GetHeap()->meta_map());
10466  return (result != NULL) ? Map::cast(result) : NULL;
10467 }
10468 
10469 
10472  DisallowHeapAllocation no_allocation;
10473  int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10474  STATIC_ASSERT(FindAndReplacePattern::kMaxCount < 32);
10475  int current_pattern = 0;
10476  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10477  RelocInfo* info = it.rinfo();
10478  Object* object = info->target_object();
10479  if (object->IsHeapObject()) {
10480  Map* map = HeapObject::cast(object)->map();
10481  if (map == *pattern.find_[current_pattern]) {
10482  info->set_target_object(*pattern.replace_[current_pattern]);
10483  if (++current_pattern == pattern.count_) return;
10484  }
10485  }
10486  }
10487  UNREACHABLE();
10488 }
10489 
10490 
10493  DisallowHeapAllocation no_allocation;
10494  int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10495  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10496  RelocInfo* info = it.rinfo();
10497  Object* object = info->target_object();
10498  if (object->IsMap()) maps->Add(handle(Map::cast(object)));
10499  }
10500 }
10501 
10502 
10505  DisallowHeapAllocation no_allocation;
10506  int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10507  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10508  RelocInfo* info = it.rinfo();
10509  Object* object = info->target_object();
10510  if (object->IsMap()) {
10511  Handle<Map> map(Map::cast(object));
10512  types->Add(IC::MapToType<HeapType>(map, map->GetIsolate()));
10513  }
10514  }
10515 }
10516 
10517 
10520  DisallowHeapAllocation no_allocation;
10521  int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
10522  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10523  RelocInfo* info = it.rinfo();
10524  Code* code = Code::GetCodeFromTargetAddress(info->target_address());
10525  if (code->kind() == Code::HANDLER) return code;
10526  }
10527  return NULL;
10528 }
10529 
10530 
10531 bool Code::FindHandlers(CodeHandleList* code_list, int length) {
10533  DisallowHeapAllocation no_allocation;
10534  int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
10535  int i = 0;
10536  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10537  if (i == length) return true;
10538  RelocInfo* info = it.rinfo();
10539  Code* code = Code::GetCodeFromTargetAddress(info->target_address());
10540  // IC stubs with handlers never contain non-handler code objects before
10541  // handler targets.
10542  if (code->kind() != Code::HANDLER) break;
10543  code_list->Add(Handle<Code>(code));
10544  i++;
10545  }
10546  return i == length;
10547 }
10548 
10549 
10552  DisallowHeapAllocation no_allocation;
10553  int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
10554  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10555  RelocInfo* info = it.rinfo();
10556  Object* object = info->target_object();
10557  if (object->IsName()) return Name::cast(object);
10558  }
10559  return NULL;
10560 }
10561 
10562 
10565 }
10566 
10567 
10569  ClearInlineCaches(&kind);
10570 }
10571 
10572 
10573 void Code::ClearInlineCaches(Code::Kind* kind) {
10574  int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
10575  RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
10576  RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
10577  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10578  RelocInfo* info = it.rinfo();
10579  Code* target(Code::GetCodeFromTargetAddress(info->target_address()));
10580  if (target->is_inline_cache_stub()) {
10581  if (kind == NULL || *kind == target->kind()) {
10582  IC::Clear(this->GetIsolate(), info->pc(),
10583  info->host()->constant_pool());
10584  }
10585  }
10586  }
10587 }
10588 
10589 
10591  if (kind() != FUNCTION) return;
10592  Object* raw_info = type_feedback_info();
10593  if (raw_info->IsTypeFeedbackInfo()) {
10594  FixedArray* feedback_vector =
10595  TypeFeedbackInfo::cast(raw_info)->feedback_vector();
10596  for (int i = 0; i < feedback_vector->length(); i++) {
10597  Object* obj = feedback_vector->get(i);
10598  if (!obj->IsAllocationSite()) {
10599  // TODO(mvstanton): Can't I avoid a write barrier for this sentinel?
10600  feedback_vector->set(i,
10602  }
10603  }
10604  }
10605 }
10606 
10607 
10609  DisallowHeapAllocation no_gc;
10610  ASSERT(kind() == FUNCTION);
10611  BackEdgeTable back_edges(this, &no_gc);
10612  for (uint32_t i = 0; i < back_edges.length(); i++) {
10613  if (back_edges.pc_offset(i) == pc_offset) return back_edges.ast_id(i);
10614  }
10615  return BailoutId::None();
10616 }
10617 
10618 
10620  DisallowHeapAllocation no_gc;
10621  ASSERT(kind() == FUNCTION);
10622  BackEdgeTable back_edges(this, &no_gc);
10623  for (uint32_t i = 0; i < back_edges.length(); i++) {
10624  if (back_edges.ast_id(i) == ast_id) return back_edges.pc_offset(i);
10625  }
10626  UNREACHABLE(); // We expect to find the back edge.
10627  return 0;
10628 }
10629 
10630 
10631 void Code::MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate) {
10632  PatchPlatformCodeAge(isolate, sequence, kNoAgeCodeAge, NO_MARKING_PARITY);
10633 }
10634 
10635 
10636 void Code::MarkCodeAsExecuted(byte* sequence, Isolate* isolate) {
10637  PatchPlatformCodeAge(isolate, sequence, kExecutedOnceCodeAge,
10639 }
10640 
10641 
10642 static Code::Age EffectiveAge(Code::Age age) {
10643  if (age == Code::kNotExecutedCodeAge) {
10644  // Treat that's never been executed as old immediately.
10645  age = Code::kIsOldCodeAge;
10646  } else if (age == Code::kExecutedOnceCodeAge) {
10647  // Pre-age code that has only been executed once.
10648  age = Code::kPreAgedCodeAge;
10649  }
10650  return age;
10651 }
10652 
10653 
10654 void Code::MakeOlder(MarkingParity current_parity) {
10655  byte* sequence = FindCodeAgeSequence();
10656  if (sequence != NULL) {
10657  Age age;
10658  MarkingParity code_parity;
10659  GetCodeAgeAndParity(sequence, &age, &code_parity);
10660  age = EffectiveAge(age);
10661  if (age != kLastCodeAge && code_parity != current_parity) {
10662  PatchPlatformCodeAge(GetIsolate(),
10663  sequence,
10664  static_cast<Age>(age + 1),
10665  current_parity);
10666  }
10667  }
10668 }
10669 
10670 
10671 bool Code::IsOld() {
10672  return GetAge() >= kIsOldCodeAge;
10673 }
10674 
10675 
10676 byte* Code::FindCodeAgeSequence() {
10677  return FLAG_age_code &&
10679  (kind() == OPTIMIZED_FUNCTION ||
10680  (kind() == FUNCTION && !has_debug_break_slots()))
10682  : NULL;
10683 }
10684 
10685 
10687  return EffectiveAge(GetRawAge());
10688 }
10689 
10690 
10692  byte* sequence = FindCodeAgeSequence();
10693  if (sequence == NULL) {
10694  return kNoAgeCodeAge;
10695  }
10696  Age age;
10697  MarkingParity parity;
10698  GetCodeAgeAndParity(sequence, &age, &parity);
10699  return age;
10700 }
10701 
10702 
10703 void Code::GetCodeAgeAndParity(Code* code, Age* age,
10704  MarkingParity* parity) {
10705  Isolate* isolate = code->GetIsolate();
10706  Builtins* builtins = isolate->builtins();
10707  Code* stub = NULL;
10708 #define HANDLE_CODE_AGE(AGE) \
10709  stub = *builtins->Make##AGE##CodeYoungAgainEvenMarking(); \
10710  if (code == stub) { \
10711  *age = k##AGE##CodeAge; \
10712  *parity = EVEN_MARKING_PARITY; \
10713  return; \
10714  } \
10715  stub = *builtins->Make##AGE##CodeYoungAgainOddMarking(); \
10716  if (code == stub) { \
10717  *age = k##AGE##CodeAge; \
10718  *parity = ODD_MARKING_PARITY; \
10719  return; \
10720  }
10722 #undef HANDLE_CODE_AGE
10723  stub = *builtins->MarkCodeAsExecutedOnce();
10724  if (code == stub) {
10725  *age = kNotExecutedCodeAge;
10726  *parity = NO_MARKING_PARITY;
10727  return;
10728  }
10729  stub = *builtins->MarkCodeAsExecutedTwice();
10730  if (code == stub) {
10731  *age = kExecutedOnceCodeAge;
10732  *parity = NO_MARKING_PARITY;
10733  return;
10734  }
10735  UNREACHABLE();
10736 }
10737 
10738 
10739 Code* Code::GetCodeAgeStub(Isolate* isolate, Age age, MarkingParity parity) {
10740  Builtins* builtins = isolate->builtins();
10741  switch (age) {
10742 #define HANDLE_CODE_AGE(AGE) \
10743  case k##AGE##CodeAge: { \
10744  Code* stub = parity == EVEN_MARKING_PARITY \
10745  ? *builtins->Make##AGE##CodeYoungAgainEvenMarking() \
10746  : *builtins->Make##AGE##CodeYoungAgainOddMarking(); \
10747  return stub; \
10748  }
10750 #undef HANDLE_CODE_AGE
10751  case kNotExecutedCodeAge: {
10752  ASSERT(parity == NO_MARKING_PARITY);
10753  return *builtins->MarkCodeAsExecutedOnce();
10754  }
10755  case kExecutedOnceCodeAge: {
10756  ASSERT(parity == NO_MARKING_PARITY);
10757  return *builtins->MarkCodeAsExecutedTwice();
10758  }
10759  default:
10760  UNREACHABLE();
10761  break;
10762  }
10763  return NULL;
10764 }
10765 
10766 
10767 void Code::PrintDeoptLocation(FILE* out, int bailout_id) {
10768  const char* last_comment = NULL;
10769  int mask = RelocInfo::ModeMask(RelocInfo::COMMENT)
10770  | RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
10771  for (RelocIterator it(this, mask); !it.done(); it.next()) {
10772  RelocInfo* info = it.rinfo();
10773  if (info->rmode() == RelocInfo::COMMENT) {
10774  last_comment = reinterpret_cast<const char*>(info->data());
10775  } else if (last_comment != NULL) {
10776  if ((bailout_id == Deoptimizer::GetDeoptimizationId(
10777  GetIsolate(), info->target_address(), Deoptimizer::EAGER)) ||
10778  (bailout_id == Deoptimizer::GetDeoptimizationId(
10779  GetIsolate(), info->target_address(), Deoptimizer::SOFT))) {
10780  CHECK(RelocInfo::IsRuntimeEntry(info->rmode()));
10781  PrintF(out, " %s\n", last_comment);
10782  return;
10783  }
10784  }
10785  }
10786 }
10787 
10788 
10790  DeoptimizationInputData* deopt_data =
10791  DeoptimizationInputData::cast(deoptimization_data());
10792  Address code_start_address = instruction_start();
10793  for (int i = 0; i < deopt_data->DeoptCount(); i++) {
10794  if (deopt_data->Pc(i)->value() == -1) continue;
10795  Address address = code_start_address + deopt_data->Pc(i)->value();
10796  if (address == pc) return true;
10797  }
10798  return false;
10799 }
10800 
10801 
10802 // Identify kind of code.
10803 const char* Code::Kind2String(Kind kind) {
10804  switch (kind) {
10805 #define CASE(name) case name: return #name;
10807 #undef CASE
10808  case NUMBER_OF_KINDS: break;
10809  }
10810  UNREACHABLE();
10811  return NULL;
10812 }
10813 
10814 
10815 #ifdef ENABLE_DISASSEMBLER
10816 
10817 void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
10818  disasm::NameConverter converter;
10819  int deopt_count = DeoptCount();
10820  PrintF(out, "Deoptimization Input Data (deopt points = %d)\n", deopt_count);
10821  if (0 == deopt_count) return;
10822 
10823  PrintF(out, "%6s %6s %6s %6s %12s\n", "index", "ast id", "argc", "pc",
10824  FLAG_print_code_verbose ? "commands" : "");
10825  for (int i = 0; i < deopt_count; i++) {
10826  PrintF(out, "%6d %6d %6d %6d",
10827  i,
10828  AstId(i).ToInt(),
10829  ArgumentsStackHeight(i)->value(),
10830  Pc(i)->value());
10831 
10832  if (!FLAG_print_code_verbose) {
10833  PrintF(out, "\n");
10834  continue;
10835  }
10836  // Print details of the frame translation.
10837  int translation_index = TranslationIndex(i)->value();
10838  TranslationIterator iterator(TranslationByteArray(), translation_index);
10839  Translation::Opcode opcode =
10840  static_cast<Translation::Opcode>(iterator.Next());
10841  ASSERT(Translation::BEGIN == opcode);
10842  int frame_count = iterator.Next();
10843  int jsframe_count = iterator.Next();
10844  PrintF(out, " %s {frame count=%d, js frame count=%d}\n",
10845  Translation::StringFor(opcode),
10846  frame_count,
10847  jsframe_count);
10848 
10849  while (iterator.HasNext() &&
10850  Translation::BEGIN !=
10851  (opcode = static_cast<Translation::Opcode>(iterator.Next()))) {
10852  PrintF(out, "%24s %s ", "", Translation::StringFor(opcode));
10853 
10854  switch (opcode) {
10855  case Translation::BEGIN:
10856  UNREACHABLE();
10857  break;
10858 
10859  case Translation::JS_FRAME: {
10860  int ast_id = iterator.Next();
10861  int function_id = iterator.Next();
10862  unsigned height = iterator.Next();
10863  PrintF(out, "{ast_id=%d, function=", ast_id);
10864  if (function_id != Translation::kSelfLiteralId) {
10865  Object* function = LiteralArray()->get(function_id);
10866  JSFunction::cast(function)->PrintName(out);
10867  } else {
10868  PrintF(out, "<self>");
10869  }
10870  PrintF(out, ", height=%u}", height);
10871  break;
10872  }
10873 
10874  case Translation::COMPILED_STUB_FRAME: {
10875  Code::Kind stub_kind = static_cast<Code::Kind>(iterator.Next());
10876  PrintF(out, "{kind=%d}", stub_kind);
10877  break;
10878  }
10879 
10880  case Translation::ARGUMENTS_ADAPTOR_FRAME:
10881  case Translation::CONSTRUCT_STUB_FRAME: {
10882  int function_id = iterator.Next();
10883  JSFunction* function =
10884  JSFunction::cast(LiteralArray()->get(function_id));
10885  unsigned height = iterator.Next();
10886  PrintF(out, "{function=");
10887  function->PrintName(out);
10888  PrintF(out, ", height=%u}", height);
10889  break;
10890  }
10891 
10892  case Translation::GETTER_STUB_FRAME:
10893  case Translation::SETTER_STUB_FRAME: {
10894  int function_id = iterator.Next();
10895  JSFunction* function =
10896  JSFunction::cast(LiteralArray()->get(function_id));
10897  PrintF(out, "{function=");
10898  function->PrintName(out);
10899  PrintF(out, "}");
10900  break;
10901  }
10902 
10903  case Translation::REGISTER: {
10904  int reg_code = iterator.Next();
10905  PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
10906  break;
10907  }
10908 
10909  case Translation::INT32_REGISTER: {
10910  int reg_code = iterator.Next();
10911  PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
10912  break;
10913  }
10914 
10915  case Translation::UINT32_REGISTER: {
10916  int reg_code = iterator.Next();
10917  PrintF(out, "{input=%s (unsigned)}",
10918  converter.NameOfCPURegister(reg_code));
10919  break;
10920  }
10921 
10922  case Translation::DOUBLE_REGISTER: {
10923  int reg_code = iterator.Next();
10924  PrintF(out, "{input=%s}",
10926  break;
10927  }
10928 
10929  case Translation::STACK_SLOT: {
10930  int input_slot_index = iterator.Next();
10931  PrintF(out, "{input=%d}", input_slot_index);
10932  break;
10933  }
10934 
10935  case Translation::INT32_STACK_SLOT: {
10936  int input_slot_index = iterator.Next();
10937  PrintF(out, "{input=%d}", input_slot_index);
10938  break;
10939  }
10940 
10941  case Translation::UINT32_STACK_SLOT: {
10942  int input_slot_index = iterator.Next();
10943  PrintF(out, "{input=%d (unsigned)}", input_slot_index);
10944  break;
10945  }
10946 
10947  case Translation::DOUBLE_STACK_SLOT: {
10948  int input_slot_index = iterator.Next();
10949  PrintF(out, "{input=%d}", input_slot_index);
10950  break;
10951  }
10952 
10953  case Translation::LITERAL: {
10954  unsigned literal_index = iterator.Next();
10955  PrintF(out, "{literal_id=%u}", literal_index);
10956  break;
10957  }
10958 
10959  case Translation::DUPLICATED_OBJECT: {
10960  int object_index = iterator.Next();
10961  PrintF(out, "{object_index=%d}", object_index);
10962  break;
10963  }
10964 
10965  case Translation::ARGUMENTS_OBJECT:
10966  case Translation::CAPTURED_OBJECT: {
10967  int args_length = iterator.Next();
10968  PrintF(out, "{length=%d}", args_length);
10969  break;
10970  }
10971  }
10972  PrintF(out, "\n");
10973  }
10974  }
10975 }
10976 
10977 
10978 void DeoptimizationOutputData::DeoptimizationOutputDataPrint(FILE* out) {
10979  PrintF(out, "Deoptimization Output Data (deopt points = %d)\n",
10980  this->DeoptPoints());
10981  if (this->DeoptPoints() == 0) return;
10982 
10983  PrintF(out, "%6s %8s %s\n", "ast id", "pc", "state");
10984  for (int i = 0; i < this->DeoptPoints(); i++) {
10985  int pc_and_state = this->PcAndState(i)->value();
10986  PrintF(out, "%6d %8d %s\n",
10987  this->AstId(i).ToInt(),
10988  FullCodeGenerator::PcField::decode(pc_and_state),
10990  FullCodeGenerator::StateField::decode(pc_and_state)));
10991  }
10992 }
10993 
10994 
10995 const char* Code::ICState2String(InlineCacheState state) {
10996  switch (state) {
10997  case UNINITIALIZED: return "UNINITIALIZED";
10998  case PREMONOMORPHIC: return "PREMONOMORPHIC";
10999  case MONOMORPHIC: return "MONOMORPHIC";
11000  case MONOMORPHIC_PROTOTYPE_FAILURE: return "MONOMORPHIC_PROTOTYPE_FAILURE";
11001  case POLYMORPHIC: return "POLYMORPHIC";
11002  case MEGAMORPHIC: return "MEGAMORPHIC";
11003  case GENERIC: return "GENERIC";
11004  case DEBUG_STUB: return "DEBUG_STUB";
11005  }
11006  UNREACHABLE();
11007  return NULL;
11008 }
11009 
11010 
11011 const char* Code::StubType2String(StubType type) {
11012  switch (type) {
11013  case NORMAL: return "NORMAL";
11014  case FAST: return "FAST";
11015  }
11016  UNREACHABLE(); // keep the compiler happy
11017  return NULL;
11018 }
11019 
11020 
11021 void Code::PrintExtraICState(FILE* out, Kind kind, ExtraICState extra) {
11022  PrintF(out, "extra_ic_state = ");
11023  const char* name = NULL;
11024  switch (kind) {
11025  case STORE_IC:
11026  case KEYED_STORE_IC:
11027  if (extra == STRICT) name = "STRICT";
11028  break;
11029  default:
11030  break;
11031  }
11032  if (name != NULL) {
11033  PrintF(out, "%s\n", name);
11034  } else {
11035  PrintF(out, "%d\n", extra);
11036  }
11037 }
11038 
11039 
11040 void Code::Disassemble(const char* name, FILE* out) {
11041  PrintF(out, "kind = %s\n", Kind2String(kind()));
11042  if (has_major_key()) {
11043  PrintF(out, "major_key = %s\n",
11044  CodeStub::MajorName(CodeStub::GetMajorKey(this), true));
11045  }
11046  if (is_inline_cache_stub()) {
11047  PrintF(out, "ic_state = %s\n", ICState2String(ic_state()));
11048  PrintExtraICState(out, kind(), extra_ic_state());
11049  if (ic_state() == MONOMORPHIC) {
11050  PrintF(out, "type = %s\n", StubType2String(type()));
11051  }
11052  if (is_compare_ic_stub()) {
11053  ASSERT(major_key() == CodeStub::CompareIC);
11054  CompareIC::State left_state, right_state, handler_state;
11055  Token::Value op;
11056  ICCompareStub::DecodeMinorKey(stub_info(), &left_state, &right_state,
11057  &handler_state, &op);
11058  PrintF(out, "compare_state = %s*%s -> %s\n",
11059  CompareIC::GetStateName(left_state),
11060  CompareIC::GetStateName(right_state),
11061  CompareIC::GetStateName(handler_state));
11062  PrintF(out, "compare_operation = %s\n", Token::Name(op));
11063  }
11064  }
11065  if ((name != NULL) && (name[0] != '\0')) {
11066  PrintF(out, "name = %s\n", name);
11067  }
11068  if (kind() == OPTIMIZED_FUNCTION) {
11069  PrintF(out, "stack_slots = %d\n", stack_slots());
11070  }
11071 
11072  PrintF(out, "Instructions (size = %d)\n", instruction_size());
11073  Disassembler::Decode(out, this);
11074  PrintF(out, "\n");
11075 
11076  if (kind() == FUNCTION) {
11077  DeoptimizationOutputData* data =
11078  DeoptimizationOutputData::cast(this->deoptimization_data());
11079  data->DeoptimizationOutputDataPrint(out);
11080  } else if (kind() == OPTIMIZED_FUNCTION) {
11081  DeoptimizationInputData* data =
11082  DeoptimizationInputData::cast(this->deoptimization_data());
11083  data->DeoptimizationInputDataPrint(out);
11084  }
11085  PrintF(out, "\n");
11086 
11087  if (is_crankshafted()) {
11088  SafepointTable table(this);
11089  PrintF(out, "Safepoints (size = %u)\n", table.size());
11090  for (unsigned i = 0; i < table.length(); i++) {
11091  unsigned pc_offset = table.GetPcOffset(i);
11092  PrintF(out, "%p %4d ", (instruction_start() + pc_offset), pc_offset);
11093  table.PrintEntry(i, out);
11094  PrintF(out, " (sp -> fp)");
11095  SafepointEntry entry = table.GetEntry(i);
11096  if (entry.deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
11097  PrintF(out, " %6d", entry.deoptimization_index());
11098  } else {
11099  PrintF(out, " <none>");
11100  }
11101  if (entry.argument_count() > 0) {
11102  PrintF(out, " argc: %d", entry.argument_count());
11103  }
11104  PrintF(out, "\n");
11105  }
11106  PrintF(out, "\n");
11107  } else if (kind() == FUNCTION) {
11108  unsigned offset = back_edge_table_offset();
11109  // If there is no back edge table, the "table start" will be at or after
11110  // (due to alignment) the end of the instruction stream.
11111  if (static_cast<int>(offset) < instruction_size()) {
11112  DisallowHeapAllocation no_gc;
11113  BackEdgeTable back_edges(this, &no_gc);
11114 
11115  PrintF(out, "Back edges (size = %u)\n", back_edges.length());
11116  PrintF(out, "ast_id pc_offset loop_depth\n");
11117 
11118  for (uint32_t i = 0; i < back_edges.length(); i++) {
11119  PrintF(out, "%6d %9u %10u\n", back_edges.ast_id(i).ToInt(),
11120  back_edges.pc_offset(i),
11121  back_edges.loop_depth(i));
11122  }
11123 
11124  PrintF(out, "\n");
11125  }
11126 #ifdef OBJECT_PRINT
11127  if (!type_feedback_info()->IsUndefined()) {
11128  TypeFeedbackInfo::cast(type_feedback_info())->TypeFeedbackInfoPrint(out);
11129  PrintF(out, "\n");
11130  }
11131 #endif
11132  }
11133 
11134  PrintF(out, "RelocInfo (size = %d)\n", relocation_size());
11135  for (RelocIterator it(this); !it.done(); it.next()) {
11136  it.rinfo()->Print(GetIsolate(), out);
11137  }
11138  PrintF(out, "\n");
11139 }
11140 #endif // ENABLE_DISASSEMBLER
11141 
11142 
11144  Handle<JSObject> object,
11145  int capacity,
11146  int length,
11147  SetFastElementsCapacitySmiMode smi_mode) {
11148  // We should never end in here with a pixel or external array.
11149  ASSERT(!object->HasExternalArrayElements());
11150 
11151  // Allocate a new fast elements backing store.
11152  Handle<FixedArray> new_elements =
11153  object->GetIsolate()->factory()->NewUninitializedFixedArray(capacity);
11154 
11155  ElementsKind elements_kind = object->GetElementsKind();
11156  ElementsKind new_elements_kind;
11157  // The resized array has FAST_*_SMI_ELEMENTS if the capacity mode forces it,
11158  // or if it's allowed and the old elements array contained only SMIs.
11159  bool has_fast_smi_elements =
11160  (smi_mode == kForceSmiElements) ||
11161  ((smi_mode == kAllowSmiElements) && object->HasFastSmiElements());
11162  if (has_fast_smi_elements) {
11163  if (IsHoleyElementsKind(elements_kind)) {
11164  new_elements_kind = FAST_HOLEY_SMI_ELEMENTS;
11165  } else {
11166  new_elements_kind = FAST_SMI_ELEMENTS;
11167  }
11168  } else {
11169  if (IsHoleyElementsKind(elements_kind)) {
11170  new_elements_kind = FAST_HOLEY_ELEMENTS;
11171  } else {
11172  new_elements_kind = FAST_ELEMENTS;
11173  }
11174  }
11175  Handle<FixedArrayBase> old_elements(object->elements());
11176  ElementsAccessor* accessor = ElementsAccessor::ForKind(new_elements_kind);
11177  accessor->CopyElements(object, new_elements, elements_kind);
11178 
11179  if (elements_kind != SLOPPY_ARGUMENTS_ELEMENTS) {
11180  Handle<Map> new_map = (new_elements_kind != elements_kind)
11181  ? GetElementsTransitionMap(object, new_elements_kind)
11182  : handle(object->map());
11183  object->ValidateElements();
11184  object->set_map_and_elements(*new_map, *new_elements);
11185 
11186  // Transition through the allocation site as well if present.
11187  JSObject::UpdateAllocationSite(object, new_elements_kind);
11188  } else {
11189  Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(old_elements);
11190  parameter_map->set(1, *new_elements);
11191  }
11192 
11193  if (FLAG_trace_elements_transitions) {
11194  PrintElementsTransition(stdout, object, elements_kind, old_elements,
11195  object->GetElementsKind(), new_elements);
11196  }
11197 
11198  if (object->IsJSArray()) {
11199  Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length));
11200  }
11201  return new_elements;
11202 }
11203 
11204 
11206  int capacity,
11207  int length) {
11208  // We should never end in here with a pixel or external array.
11209  ASSERT(!object->HasExternalArrayElements());
11210 
11211  Handle<FixedArrayBase> elems =
11212  object->GetIsolate()->factory()->NewFixedDoubleArray(capacity);
11213 
11214  ElementsKind elements_kind = object->GetElementsKind();
11215  CHECK(elements_kind != SLOPPY_ARGUMENTS_ELEMENTS);
11216  ElementsKind new_elements_kind = elements_kind;
11217  if (IsHoleyElementsKind(elements_kind)) {
11218  new_elements_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
11219  } else {
11220  new_elements_kind = FAST_DOUBLE_ELEMENTS;
11221  }
11222 
11223  Handle<Map> new_map = GetElementsTransitionMap(object, new_elements_kind);
11224 
11225  Handle<FixedArrayBase> old_elements(object->elements());
11227  accessor->CopyElements(object, elems, elements_kind);
11228 
11229  object->ValidateElements();
11230  object->set_map_and_elements(*new_map, *elems);
11231 
11232  if (FLAG_trace_elements_transitions) {
11233  PrintElementsTransition(stdout, object, elements_kind, old_elements,
11234  object->GetElementsKind(), elems);
11235  }
11236 
11237  if (object->IsJSArray()) {
11238  Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length));
11239  }
11240 }
11241 
11242 
11243 // static
11244 void JSArray::Initialize(Handle<JSArray> array, int capacity, int length) {
11245  ASSERT(capacity >= 0);
11246  array->GetIsolate()->factory()->NewJSArrayStorage(
11247  array, length, capacity, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
11248 }
11249 
11250 
11251 void JSArray::Expand(Handle<JSArray> array, int required_size) {
11252  ElementsAccessor* accessor = array->GetElementsAccessor();
11253  accessor->SetCapacityAndLength(array, required_size, required_size);
11254 }
11255 
11256 
11257 // Returns false if the passed-in index is marked non-configurable,
11258 // which will cause the ES5 truncation operation to halt, and thus
11259 // no further old values need be collected.
11260 static bool GetOldValue(Isolate* isolate,
11261  Handle<JSObject> object,
11262  uint32_t index,
11263  List<Handle<Object> >* old_values,
11264  List<uint32_t>* indices) {
11265  PropertyAttributes attributes =
11266  JSReceiver::GetLocalElementAttribute(object, index);
11267  ASSERT(attributes != ABSENT);
11268  if (attributes == DONT_DELETE) return false;
11269  Handle<Object> value;
11270  if (object->GetLocalElementAccessorPair(index) != NULL) {
11271  value = Handle<Object>::cast(isolate->factory()->the_hole_value());
11272  } else {
11273  value = Object::GetElementNoExceptionThrown(isolate, object, index);
11274  }
11275  old_values->Add(value);
11276  indices->Add(index);
11277  return true;
11278 }
11279 
11280 static void EnqueueSpliceRecord(Handle<JSArray> object,
11281  uint32_t index,
11282  Handle<JSArray> deleted,
11283  uint32_t add_count) {
11284  Isolate* isolate = object->GetIsolate();
11285  HandleScope scope(isolate);
11286  Handle<Object> index_object = isolate->factory()->NewNumberFromUint(index);
11287  Handle<Object> add_count_object =
11288  isolate->factory()->NewNumberFromUint(add_count);
11289 
11290  Handle<Object> args[] =
11291  { object, index_object, deleted, add_count_object };
11292 
11293  bool threw;
11294  Execution::Call(isolate,
11295  Handle<JSFunction>(isolate->observers_enqueue_splice()),
11296  isolate->factory()->undefined_value(), ARRAY_SIZE(args), args,
11297  &threw);
11298  ASSERT(!threw);
11299 }
11300 
11301 
11302 static void BeginPerformSplice(Handle<JSArray> object) {
11303  Isolate* isolate = object->GetIsolate();
11304  HandleScope scope(isolate);
11305  Handle<Object> args[] = { object };
11306 
11307  bool threw;
11308  Execution::Call(isolate,
11309  Handle<JSFunction>(isolate->observers_begin_perform_splice()),
11310  isolate->factory()->undefined_value(), ARRAY_SIZE(args), args,
11311  &threw);
11312  ASSERT(!threw);
11313 }
11314 
11315 
11316 static void EndPerformSplice(Handle<JSArray> object) {
11317  Isolate* isolate = object->GetIsolate();
11318  HandleScope scope(isolate);
11319  Handle<Object> args[] = { object };
11320 
11321  bool threw;
11322  Execution::Call(isolate,
11323  Handle<JSFunction>(isolate->observers_end_perform_splice()),
11324  isolate->factory()->undefined_value(), ARRAY_SIZE(args), args,
11325  &threw);
11326  ASSERT(!threw);
11327 }
11328 
11329 
11331  Handle<Object> new_length_handle) {
11332  // We should never end in here with a pixel or external array.
11333  ASSERT(array->AllowsSetElementsLength());
11334  if (!array->map()->is_observed()) {
11335  return array->GetElementsAccessor()->SetLength(array, new_length_handle);
11336  }
11337 
11338  Isolate* isolate = array->GetIsolate();
11339  List<uint32_t> indices;
11340  List<Handle<Object> > old_values;
11341  Handle<Object> old_length_handle(array->length(), isolate);
11342  uint32_t old_length = 0;
11343  CHECK(old_length_handle->ToArrayIndex(&old_length));
11344  uint32_t new_length = 0;
11345  CHECK(new_length_handle->ToArrayIndex(&new_length));
11346 
11347  static const PropertyAttributes kNoAttrFilter = NONE;
11348  int num_elements = array->NumberOfLocalElements(kNoAttrFilter);
11349  if (num_elements > 0) {
11350  if (old_length == static_cast<uint32_t>(num_elements)) {
11351  // Simple case for arrays without holes.
11352  for (uint32_t i = old_length - 1; i + 1 > new_length; --i) {
11353  if (!GetOldValue(isolate, array, i, &old_values, &indices)) break;
11354  }
11355  } else {
11356  // For sparse arrays, only iterate over existing elements.
11357  // TODO(rafaelw): For fast, sparse arrays, we can avoid iterating over
11358  // the to-be-removed indices twice.
11359  Handle<FixedArray> keys = isolate->factory()->NewFixedArray(num_elements);
11360  array->GetLocalElementKeys(*keys, kNoAttrFilter);
11361  while (num_elements-- > 0) {
11362  uint32_t index = NumberToUint32(keys->get(num_elements));
11363  if (index < new_length) break;
11364  if (!GetOldValue(isolate, array, index, &old_values, &indices)) break;
11365  }
11366  }
11367  }
11368 
11369  Handle<Object> hresult =
11370  array->GetElementsAccessor()->SetLength(array, new_length_handle);
11371  RETURN_IF_EMPTY_HANDLE_VALUE(isolate, hresult, hresult);
11372 
11373  CHECK(array->length()->ToArrayIndex(&new_length));
11374  if (old_length == new_length) return hresult;
11375 
11376  BeginPerformSplice(array);
11377 
11378  for (int i = 0; i < indices.length(); ++i) {
11379  // For deletions where the property was an accessor, old_values[i]
11380  // will be the hole, which instructs EnqueueChangeRecord to elide
11381  // the "oldValue" property.
11383  array, "delete", isolate->factory()->Uint32ToString(indices[i]),
11384  old_values[i]);
11385  }
11387  array, "update", isolate->factory()->length_string(),
11388  old_length_handle);
11389 
11390  EndPerformSplice(array);
11391 
11392  uint32_t index = Min(old_length, new_length);
11393  uint32_t add_count = new_length > old_length ? new_length - old_length : 0;
11394  uint32_t delete_count = new_length < old_length ? old_length - new_length : 0;
11395  Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
11396  if (delete_count > 0) {
11397  for (int i = indices.length() - 1; i >= 0; i--) {
11398  // Skip deletions where the property was an accessor, leaving holes
11399  // in the array of old values.
11400  if (old_values[i]->IsTheHole()) continue;
11401  JSObject::SetElement(deleted, indices[i] - index, old_values[i], NONE,
11402  SLOPPY);
11403  }
11404 
11405  SetProperty(deleted, isolate->factory()->length_string(),
11406  isolate->factory()->NewNumberFromUint(delete_count),
11407  NONE, SLOPPY);
11408  }
11409 
11410  EnqueueSpliceRecord(array, index, deleted, add_count);
11411 
11412  return hresult;
11413 }
11414 
11415 
11417  Handle<Object> prototype) {
11418  FixedArray* cache = map->GetPrototypeTransitions();
11419  int number_of_transitions = map->NumberOfProtoTransitions();
11420  const int proto_offset =
11422  const int map_offset = kProtoTransitionHeaderSize + kProtoTransitionMapOffset;
11423  const int step = kProtoTransitionElementsPerEntry;
11424  for (int i = 0; i < number_of_transitions; i++) {
11425  if (cache->get(proto_offset + i * step) == *prototype) {
11426  Object* result = cache->get(map_offset + i * step);
11427  return Handle<Map>(Map::cast(result));
11428  }
11429  }
11430  return Handle<Map>();
11431 }
11432 
11433 
11435  Handle<Object> prototype,
11436  Handle<Map> target_map) {
11437  ASSERT(target_map->IsMap());
11438  ASSERT(HeapObject::cast(*prototype)->map()->IsMap());
11439  // Don't cache prototype transition if this map is shared.
11440  if (map->is_shared() || !FLAG_cache_prototype_transitions) return map;
11441 
11442  const int step = kProtoTransitionElementsPerEntry;
11443  const int header = kProtoTransitionHeaderSize;
11444 
11445  Handle<FixedArray> cache(map->GetPrototypeTransitions());
11446  int capacity = (cache->length() - header) / step;
11447  int transitions = map->NumberOfProtoTransitions() + 1;
11448 
11449  if (transitions > capacity) {
11450  if (capacity > kMaxCachedPrototypeTransitions) return map;
11451 
11452  // Grow array by factor 2 over and above what we need.
11453  Factory* factory = map->GetIsolate()->factory();
11454  cache = factory->CopySizeFixedArray(cache, transitions * 2 * step + header);
11455 
11456  CALL_AND_RETRY_OR_DIE(map->GetIsolate(),
11457  map->SetPrototypeTransitions(*cache),
11458  break,
11459  return Handle<Map>());
11460  }
11461 
11462  // Reload number of transitions as GC might shrink them.
11463  int last = map->NumberOfProtoTransitions();
11464  int entry = header + last * step;
11465 
11466  cache->set(entry + kProtoTransitionPrototypeOffset, *prototype);
11467  cache->set(entry + kProtoTransitionMapOffset, *target_map);
11468  map->SetNumberOfProtoTransitions(last + 1);
11469 
11470  return map;
11471 }
11472 
11473 
11475  TransitionArray* transition_array = transitions();
11476  // TODO(mstarzinger): Temporarily use a slower version instead of the faster
11477  // MemsetPointer to investigate a crasher. Switch back to MemsetPointer.
11478  Object** data = transition_array->data_start();
11479  Object* the_hole = GetHeap()->the_hole_value();
11480  int length = transition_array->length();
11481  for (int i = 0; i < length; i++) {
11482  data[i] = the_hole;
11483  }
11484 }
11485 
11486 
11488  FixedArray* proto_transitions = GetPrototypeTransitions();
11489  MemsetPointer(proto_transitions->data_start(),
11490  GetHeap()->the_hole_value(),
11491  proto_transitions->length());
11492 }
11493 
11494 
11496  CompilationInfo* info) {
11498  Handle<DependentCode> codes =
11499  DependentCode::Insert(dep, group, info->object_wrapper());
11500  if (*codes != dependent_code()) set_dependent_code(*codes);
11501  info->dependencies(group)->Add(Handle<HeapObject>(this), info->zone());
11502 }
11503 
11504 
11506  Handle<Code> code) {
11508  Handle<DependentCode>(dependent_code()), group, code);
11509  if (*codes != dependent_code()) set_dependent_code(*codes);
11510 }
11511 
11512 
11514  Recompute(entries);
11515 }
11516 
11517 
11519  start_indexes_[0] = 0;
11520  for (int g = 1; g <= kGroupCount; g++) {
11521  int count = entries->number_of_entries(static_cast<DependencyGroup>(g - 1));
11522  start_indexes_[g] = start_indexes_[g - 1] + count;
11523  }
11524 }
11525 
11526 
11528  DependencyGroup group) {
11529  AllowDeferredHandleDereference dependencies_are_safe;
11531  return Handle<PropertyCell>::cast(object)->dependent_code();
11534  return Handle<AllocationSite>::cast(object)->dependent_code();
11535  }
11536  return Handle<Map>::cast(object)->dependent_code();
11537 }
11538 
11539 
11541  DependencyGroup group,
11542  Handle<Object> object) {
11543  GroupStartIndexes starts(*entries);
11544  int start = starts.at(group);
11545  int end = starts.at(group + 1);
11546  int number_of_entries = starts.number_of_entries();
11547  // Check for existing entry to avoid duplicates.
11548  for (int i = start; i < end; i++) {
11549  if (entries->object_at(i) == *object) return entries;
11550  }
11551  if (entries->length() < kCodesStartIndex + number_of_entries + 1) {
11552  Factory* factory = entries->GetIsolate()->factory();
11553  int capacity = kCodesStartIndex + number_of_entries + 1;
11554  if (capacity > 5) capacity = capacity * 5 / 4;
11556  factory->CopySizeFixedArray(entries, capacity, TENURED));
11557  // The number of codes can change after GC.
11558  starts.Recompute(*entries);
11559  start = starts.at(group);
11560  end = starts.at(group + 1);
11561  number_of_entries = starts.number_of_entries();
11562  for (int i = 0; i < number_of_entries; i++) {
11563  entries->clear_at(i);
11564  }
11565  // If the old fixed array was empty, we need to reset counters of the
11566  // new array.
11567  if (number_of_entries == 0) {
11568  for (int g = 0; g < kGroupCount; g++) {
11569  new_entries->set_number_of_entries(static_cast<DependencyGroup>(g), 0);
11570  }
11571  }
11572  entries = new_entries;
11573  }
11574  entries->ExtendGroup(group);
11575  entries->set_object_at(end, *object);
11576  entries->set_number_of_entries(group, end + 1 - start);
11577  return entries;
11578 }
11579 
11580 
11582  CompilationInfo* info,
11583  Code* code) {
11584  DisallowHeapAllocation no_gc;
11585  AllowDeferredHandleDereference get_object_wrapper;
11586  Foreign* info_wrapper = *info->object_wrapper();
11587  GroupStartIndexes starts(this);
11588  int start = starts.at(group);
11589  int end = starts.at(group + 1);
11590  for (int i = start; i < end; i++) {
11591  if (object_at(i) == info_wrapper) {
11592  set_object_at(i, code);
11593  break;
11594  }
11595  }
11596 
11597 #ifdef DEBUG
11598  for (int i = start; i < end; i++) {
11599  ASSERT(is_code_at(i) || compilation_info_at(i) != info);
11600  }
11601 #endif
11602 }
11603 
11604 
11606  CompilationInfo* info) {
11607  DisallowHeapAllocation no_allocation;
11608  AllowDeferredHandleDereference get_object_wrapper;
11609  Foreign* info_wrapper = *info->object_wrapper();
11610  GroupStartIndexes starts(this);
11611  int start = starts.at(group);
11612  int end = starts.at(group + 1);
11613  // Find compilation info wrapper.
11614  int info_pos = -1;
11615  for (int i = start; i < end; i++) {
11616  if (object_at(i) == info_wrapper) {
11617  info_pos = i;
11618  break;
11619  }
11620  }
11621  if (info_pos == -1) return; // Not found.
11622  int gap = info_pos;
11623  // Use the last of each group to fill the gap in the previous group.
11624  for (int i = group; i < kGroupCount; i++) {
11625  int last_of_group = starts.at(i + 1) - 1;
11626  ASSERT(last_of_group >= gap);
11627  if (last_of_group == gap) continue;
11628  copy(last_of_group, gap);
11629  gap = last_of_group;
11630  }
11631  ASSERT(gap == starts.number_of_entries() - 1);
11632  clear_at(gap); // Clear last gap.
11633  set_number_of_entries(group, end - start - 1);
11634 
11635 #ifdef DEBUG
11636  for (int i = start; i < end - 1; i++) {
11637  ASSERT(is_code_at(i) || compilation_info_at(i) != info);
11638  }
11639 #endif
11640 }
11641 
11642 
11644  GroupStartIndexes starts(this);
11645  int start = starts.at(group);
11646  int end = starts.at(group + 1);
11647  for (int i = start; i < end; i++) {
11648  if (object_at(i) == code) return true;
11649  }
11650  return false;
11651 }
11652 
11653 
11655  Isolate* isolate,
11657  DisallowHeapAllocation no_allocation_scope;
11658  DependentCode::GroupStartIndexes starts(this);
11659  int start = starts.at(group);
11660  int end = starts.at(group + 1);
11661  int code_entries = starts.number_of_entries();
11662  if (start == end) return false;
11663 
11664  // Mark all the code that needs to be deoptimized.
11665  bool marked = false;
11666  for (int i = start; i < end; i++) {
11667  if (is_code_at(i)) {
11668  Code* code = code_at(i);
11669  if (!code->marked_for_deoptimization()) {
11670  code->set_marked_for_deoptimization(true);
11671  marked = true;
11672  }
11673  } else {
11676  }
11677  }
11678  // Compact the array by moving all subsequent groups to fill in the new holes.
11679  for (int src = end, dst = start; src < code_entries; src++, dst++) {
11680  copy(src, dst);
11681  }
11682  // Now the holes are at the end of the array, zap them for heap-verifier.
11683  int removed = end - start;
11684  for (int i = code_entries - removed; i < code_entries; i++) {
11685  clear_at(i);
11686  }
11687  set_number_of_entries(group, 0);
11688  return marked;
11689 }
11690 
11691 
11693  Isolate* isolate,
11695  ASSERT(AllowCodeDependencyChange::IsAllowed());
11696  DisallowHeapAllocation no_allocation_scope;
11697  bool marked = MarkCodeForDeoptimization(isolate, group);
11698 
11699  if (marked) Deoptimizer::DeoptimizeMarkedCode(isolate);
11700 }
11701 
11702 
11704  Handle<Object> value,
11705  bool skip_hidden_prototypes) {
11706 #ifdef DEBUG
11707  int size = object->Size();
11708 #endif
11709 
11710  Isolate* isolate = object->GetIsolate();
11711  Heap* heap = isolate->heap();
11712  // Silently ignore the change if value is not a JSObject or null.
11713  // SpiderMonkey behaves this way.
11714  if (!value->IsJSReceiver() && !value->IsNull()) return value;
11715 
11716  // From 8.6.2 Object Internal Methods
11717  // ...
11718  // In addition, if [[Extensible]] is false the value of the [[Class]] and
11719  // [[Prototype]] internal properties of the object may not be modified.
11720  // ...
11721  // Implementation specific extensions that modify [[Class]], [[Prototype]]
11722  // or [[Extensible]] must not violate the invariants defined in the preceding
11723  // paragraph.
11724  if (!object->map()->is_extensible()) {
11725  Handle<Object> args[] = { object };
11726  Handle<Object> error = isolate->factory()->NewTypeError(
11727  "non_extensible_proto", HandleVector(args, ARRAY_SIZE(args)));
11728  isolate->Throw(*error);
11729  return Handle<Object>();
11730  }
11731 
11732  // Before we can set the prototype we need to be sure
11733  // prototype cycles are prevented.
11734  // It is sufficient to validate that the receiver is not in the new prototype
11735  // chain.
11736  for (Object* pt = *value;
11737  pt != heap->null_value();
11738  pt = pt->GetPrototype(isolate)) {
11739  if (JSReceiver::cast(pt) == *object) {
11740  // Cycle detected.
11741  Handle<Object> error = isolate->factory()->NewError(
11742  "cyclic_proto", HandleVector<Object>(NULL, 0));
11743  isolate->Throw(*error);
11744  return Handle<Object>();
11745  }
11746  }
11747 
11748  bool dictionary_elements_in_chain =
11749  object->map()->DictionaryElementsInPrototypeChainOnly();
11750  Handle<JSObject> real_receiver = object;
11751 
11752  if (skip_hidden_prototypes) {
11753  // Find the first object in the chain whose prototype object is not
11754  // hidden and set the new prototype on that object.
11755  Object* current_proto = real_receiver->GetPrototype();
11756  while (current_proto->IsJSObject() &&
11757  JSObject::cast(current_proto)->map()->is_hidden_prototype()) {
11758  real_receiver = handle(JSObject::cast(current_proto), isolate);
11759  current_proto = current_proto->GetPrototype(isolate);
11760  }
11761  }
11762 
11763  // Set the new prototype of the object.
11764  Handle<Map> map(real_receiver->map());
11765 
11766  // Nothing to do if prototype is already set.
11767  if (map->prototype() == *value) return value;
11768 
11769  if (value->IsJSObject()) {
11771  }
11772 
11773  Handle<Map> new_map = Map::GetPrototypeTransition(map, value);
11774  if (new_map.is_null()) {
11775  new_map = Map::Copy(map);
11776  Map::PutPrototypeTransition(map, value, new_map);
11777  new_map->set_prototype(*value);
11778  }
11779  ASSERT(new_map->prototype() == *value);
11780  JSObject::MigrateToMap(real_receiver, new_map);
11781 
11782  if (!dictionary_elements_in_chain &&
11783  new_map->DictionaryElementsInPrototypeChainOnly()) {
11784  // If the prototype chain didn't previously have element callbacks, then
11785  // KeyedStoreICs need to be cleared to ensure any that involve this
11786  // map go generic.
11787  object->GetHeap()->ClearAllICsByKind(Code::KEYED_STORE_IC);
11788  }
11789 
11790  heap->ClearInstanceofCache();
11791  ASSERT(size == object->Size());
11792  return value;
11793 }
11794 
11795 
11797  Arguments* args,
11798  uint32_t first_arg,
11799  uint32_t arg_count,
11800  EnsureElementsMode mode) {
11801  // Elements in |Arguments| are ordered backwards (because they're on the
11802  // stack), but the method that's called here iterates over them in forward
11803  // direction.
11804  return EnsureCanContainElements(
11805  object, args->arguments() - first_arg - (arg_count - 1), arg_count, mode);
11806 }
11807 
11808 
11810  uint32_t index = 0;
11811  if (name->AsArrayIndex(&index)) {
11812  return GetLocalElementAccessorPair(index);
11813  }
11814 
11815  LookupResult lookup(GetIsolate());
11816  LocalLookupRealNamedProperty(name, &lookup);
11817 
11818  if (lookup.IsPropertyCallbacks() &&
11819  lookup.GetCallbackObject()->IsAccessorPair()) {
11820  return AccessorPair::cast(lookup.GetCallbackObject());
11821  }
11822  return NULL;
11823 }
11824 
11825 
11827  if (IsJSGlobalProxy()) {
11828  Object* proto = GetPrototype();
11829  if (proto->IsNull()) return NULL;
11830  ASSERT(proto->IsJSGlobalObject());
11831  return JSObject::cast(proto)->GetLocalElementAccessorPair(index);
11832  }
11833 
11834  // Check for lookup interceptor.
11835  if (HasIndexedInterceptor()) return NULL;
11836 
11837  return GetElementsAccessor()->GetAccessorPair(this, this, index);
11838 }
11839 
11840 
11841 Handle<Object> JSObject::SetElementWithInterceptor(
11842  Handle<JSObject> object,
11843  uint32_t index,
11844  Handle<Object> value,
11845  PropertyAttributes attributes,
11846  StrictMode strict_mode,
11847  bool check_prototype,
11848  SetPropertyMode set_mode) {
11849  Isolate* isolate = object->GetIsolate();
11850 
11851  // Make sure that the top context does not change when doing
11852  // callbacks or interceptor calls.
11853  AssertNoContextChange ncc(isolate);
11854 
11855  Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
11856  if (!interceptor->setter()->IsUndefined()) {
11858  v8::ToCData<v8::IndexedPropertySetterCallback>(interceptor->setter());
11859  LOG(isolate,
11860  ApiIndexedPropertyAccess("interceptor-indexed-set", *object, index));
11861  PropertyCallbackArguments args(isolate, interceptor->data(), *object,
11862  *object);
11863  v8::Handle<v8::Value> result =
11864  args.Call(setter, index, v8::Utils::ToLocal(value));
11866  if (!result.IsEmpty()) return value;
11867  }
11868 
11869  return SetElementWithoutInterceptor(object, index, value, attributes,
11870  strict_mode,
11871  check_prototype,
11872  set_mode);
11873 }
11874 
11875 
11876 MaybeObject* JSObject::GetElementWithCallback(Object* receiver,
11877  Object* structure,
11878  uint32_t index,
11879  Object* holder) {
11880  Isolate* isolate = GetIsolate();
11881  ASSERT(!structure->IsForeign());
11882 
11883  // api style callbacks.
11884  if (structure->IsExecutableAccessorInfo()) {
11885  Handle<ExecutableAccessorInfo> data(
11886  ExecutableAccessorInfo::cast(structure));
11887  Object* fun_obj = data->getter();
11888  v8::AccessorGetterCallback call_fun =
11889  v8::ToCData<v8::AccessorGetterCallback>(fun_obj);
11890  if (call_fun == NULL) return isolate->heap()->undefined_value();
11891  HandleScope scope(isolate);
11892  Handle<JSObject> self(JSObject::cast(receiver));
11893  Handle<JSObject> holder_handle(JSObject::cast(holder));
11894  Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
11895  Handle<String> key = isolate->factory()->NumberToString(number);
11896  LOG(isolate, ApiNamedPropertyAccess("load", *self, *key));
11897  PropertyCallbackArguments
11898  args(isolate, data->data(), *self, *holder_handle);
11899  v8::Handle<v8::Value> result = args.Call(call_fun, v8::Utils::ToLocal(key));
11901  if (result.IsEmpty()) return isolate->heap()->undefined_value();
11902  Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
11903  result_internal->VerifyApiCallResultType();
11904  return *result_internal;
11905  }
11906 
11907  // __defineGetter__ callback
11908  if (structure->IsAccessorPair()) {
11909  Object* getter = AccessorPair::cast(structure)->getter();
11910  if (getter->IsSpecFunction()) {
11911  // TODO(rossberg): nicer would be to cast to some JSCallable here...
11912  return GetPropertyWithDefinedGetter(receiver, JSReceiver::cast(getter));
11913  }
11914  // Getter is not a function.
11915  return isolate->heap()->undefined_value();
11916  }
11917 
11918  if (structure->IsDeclaredAccessorInfo()) {
11919  return GetDeclaredAccessorProperty(receiver,
11920  DeclaredAccessorInfo::cast(structure),
11921  isolate);
11922  }
11923 
11924  UNREACHABLE();
11925  return NULL;
11926 }
11927 
11928 
11929 Handle<Object> JSObject::SetElementWithCallback(Handle<JSObject> object,
11930  Handle<Object> structure,
11931  uint32_t index,
11932  Handle<Object> value,
11933  Handle<JSObject> holder,
11934  StrictMode strict_mode) {
11935  Isolate* isolate = object->GetIsolate();
11936 
11937  // We should never get here to initialize a const with the hole
11938  // value since a const declaration would conflict with the setter.
11939  ASSERT(!value->IsTheHole());
11940 
11941  // To accommodate both the old and the new api we switch on the
11942  // data structure used to store the callbacks. Eventually foreign
11943  // callbacks should be phased out.
11944  ASSERT(!structure->IsForeign());
11945 
11946  if (structure->IsExecutableAccessorInfo()) {
11947  // api style callbacks
11948  Handle<ExecutableAccessorInfo> data =
11950  Object* call_obj = data->setter();
11951  v8::AccessorSetterCallback call_fun =
11952  v8::ToCData<v8::AccessorSetterCallback>(call_obj);
11953  if (call_fun == NULL) return value;
11954  Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
11955  Handle<String> key(isolate->factory()->NumberToString(number));
11956  LOG(isolate, ApiNamedPropertyAccess("store", *object, *key));
11957  PropertyCallbackArguments
11958  args(isolate, data->data(), *object, *holder);
11959  args.Call(call_fun,
11960  v8::Utils::ToLocal(key),
11961  v8::Utils::ToLocal(value));
11963  return value;
11964  }
11965 
11966  if (structure->IsAccessorPair()) {
11967  Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate);
11968  if (setter->IsSpecFunction()) {
11969  // TODO(rossberg): nicer would be to cast to some JSCallable here...
11970  return SetPropertyWithDefinedSetter(
11971  object, Handle<JSReceiver>::cast(setter), value);
11972  } else {
11973  if (strict_mode == SLOPPY) return value;
11974  Handle<Object> key(isolate->factory()->NewNumberFromUint(index));
11975  Handle<Object> args[2] = { key, holder };
11976  Handle<Object> error = isolate->factory()->NewTypeError(
11977  "no_setter_in_callback", HandleVector(args, 2));
11978  isolate->Throw(*error);
11979  return Handle<Object>();
11980  }
11981  }
11982 
11983  // TODO(dcarney): Handle correctly.
11984  if (structure->IsDeclaredAccessorInfo()) return value;
11985 
11986  UNREACHABLE();
11987  return Handle<Object>();
11988 }
11989 
11990 
11992  Heap* heap = GetHeap();
11993  if (!elements()->IsFixedArray()) return false;
11994  FixedArray* elements = FixedArray::cast(this->elements());
11995  if (elements->map() != heap->sloppy_arguments_elements_map()) {
11996  return false;
11997  }
11998  FixedArray* arguments = FixedArray::cast(elements->get(1));
11999  return !arguments->IsDictionary();
12000 }
12001 
12002 
12004  Heap* heap = GetHeap();
12005  if (!elements()->IsFixedArray()) return false;
12006  FixedArray* elements = FixedArray::cast(this->elements());
12007  if (elements->map() != heap->sloppy_arguments_elements_map()) {
12008  return false;
12009  }
12010  FixedArray* arguments = FixedArray::cast(elements->get(1));
12011  return arguments->IsDictionary();
12012 }
12013 
12014 
12015 // Adding n elements in fast case is O(n*n).
12016 // Note: revisit design to have dual undefined values to capture absent
12017 // elements.
12019  uint32_t index,
12020  Handle<Object> value,
12021  StrictMode strict_mode,
12022  bool check_prototype) {
12023  ASSERT(object->HasFastSmiOrObjectElements() ||
12024  object->HasFastArgumentsElements());
12025 
12026  Isolate* isolate = object->GetIsolate();
12027 
12028  // Array optimizations rely on the prototype lookups of Array objects always
12029  // returning undefined. If there is a store to the initial prototype object,
12030  // make sure all of these optimizations are invalidated.
12031  if (isolate->is_initial_object_prototype(*object) ||
12032  isolate->is_initial_array_prototype(*object)) {
12033  object->map()->dependent_code()->DeoptimizeDependentCodeGroup(isolate,
12035  }
12036 
12037  Handle<FixedArray> backing_store(FixedArray::cast(object->elements()));
12038  if (backing_store->map() ==
12039  isolate->heap()->sloppy_arguments_elements_map()) {
12040  backing_store = handle(FixedArray::cast(backing_store->get(1)));
12041  } else {
12042  backing_store = EnsureWritableFastElements(object);
12043  }
12044  uint32_t capacity = static_cast<uint32_t>(backing_store->length());
12045 
12046  if (check_prototype &&
12047  (index >= capacity || backing_store->get(index)->IsTheHole())) {
12048  bool found;
12049  Handle<Object> result = SetElementWithCallbackSetterInPrototypes(
12050  object, index, value, &found, strict_mode);
12051  if (found) return result;
12052  }
12053 
12054  uint32_t new_capacity = capacity;
12055  // Check if the length property of this object needs to be updated.
12056  uint32_t array_length = 0;
12057  bool must_update_array_length = false;
12058  bool introduces_holes = true;
12059  if (object->IsJSArray()) {
12060  CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length));
12061  introduces_holes = index > array_length;
12062  if (index >= array_length) {
12063  must_update_array_length = true;
12064  array_length = index + 1;
12065  }
12066  } else {
12067  introduces_holes = index >= capacity;
12068  }
12069 
12070  // If the array is growing, and it's not growth by a single element at the
12071  // end, make sure that the ElementsKind is HOLEY.
12072  ElementsKind elements_kind = object->GetElementsKind();
12073  if (introduces_holes &&
12074  IsFastElementsKind(elements_kind) &&
12075  !IsFastHoleyElementsKind(elements_kind)) {
12076  ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
12077  TransitionElementsKind(object, transitioned_kind);
12078  }
12079 
12080  // Check if the capacity of the backing store needs to be increased, or if
12081  // a transition to slow elements is necessary.
12082  if (index >= capacity) {
12083  bool convert_to_slow = true;
12084  if ((index - capacity) < kMaxGap) {
12085  new_capacity = NewElementsCapacity(index + 1);
12086  ASSERT(new_capacity > index);
12087  if (!object->ShouldConvertToSlowElements(new_capacity)) {
12088  convert_to_slow = false;
12089  }
12090  }
12091  if (convert_to_slow) {
12092  NormalizeElements(object);
12093  return SetDictionaryElement(object, index, value, NONE, strict_mode,
12094  check_prototype);
12095  }
12096  }
12097  // Convert to fast double elements if appropriate.
12098  if (object->HasFastSmiElements() && !value->IsSmi() && value->IsNumber()) {
12099  // Consider fixing the boilerplate as well if we have one.
12100  ElementsKind to_kind = IsHoleyElementsKind(elements_kind)
12103 
12104  UpdateAllocationSite(object, to_kind);
12105 
12106  SetFastDoubleElementsCapacityAndLength(object, new_capacity, array_length);
12107  FixedDoubleArray::cast(object->elements())->set(index, value->Number());
12108  object->ValidateElements();
12109  return value;
12110  }
12111  // Change elements kind from Smi-only to generic FAST if necessary.
12112  if (object->HasFastSmiElements() && !value->IsSmi()) {
12113  ElementsKind kind = object->HasFastHoleyElements()
12115  : FAST_ELEMENTS;
12116 
12117  UpdateAllocationSite(object, kind);
12118  Handle<Map> new_map = GetElementsTransitionMap(object, kind);
12119  JSObject::MigrateToMap(object, new_map);
12120  ASSERT(IsFastObjectElementsKind(object->GetElementsKind()));
12121  }
12122  // Increase backing store capacity if that's been decided previously.
12123  if (new_capacity != capacity) {
12125  value->IsSmi() && object->HasFastSmiElements()
12126  ? kAllowSmiElements
12127  : kDontAllowSmiElements;
12128  Handle<FixedArray> new_elements =
12129  SetFastElementsCapacityAndLength(object, new_capacity, array_length,
12130  smi_mode);
12131  new_elements->set(index, *value);
12132  object->ValidateElements();
12133  return value;
12134  }
12135 
12136  // Finally, set the new element and length.
12137  ASSERT(object->elements()->IsFixedArray());
12138  backing_store->set(index, *value);
12139  if (must_update_array_length) {
12140  Handle<JSArray>::cast(object)->set_length(Smi::FromInt(array_length));
12141  }
12142  return value;
12143 }
12144 
12145 
12146 Handle<Object> JSObject::SetDictionaryElement(Handle<JSObject> object,
12147  uint32_t index,
12148  Handle<Object> value,
12149  PropertyAttributes attributes,
12150  StrictMode strict_mode,
12151  bool check_prototype,
12152  SetPropertyMode set_mode) {
12153  ASSERT(object->HasDictionaryElements() ||
12154  object->HasDictionaryArgumentsElements());
12155  Isolate* isolate = object->GetIsolate();
12156 
12157  // Insert element in the dictionary.
12158  Handle<FixedArray> elements(FixedArray::cast(object->elements()));
12159  bool is_arguments =
12160  (elements->map() == isolate->heap()->sloppy_arguments_elements_map());
12161  Handle<SeededNumberDictionary> dictionary(is_arguments
12162  ? SeededNumberDictionary::cast(elements->get(1))
12163  : SeededNumberDictionary::cast(*elements));
12164 
12165  int entry = dictionary->FindEntry(index);
12166  if (entry != SeededNumberDictionary::kNotFound) {
12167  Handle<Object> element(dictionary->ValueAt(entry), isolate);
12168  PropertyDetails details = dictionary->DetailsAt(entry);
12169  if (details.type() == CALLBACKS && set_mode == SET_PROPERTY) {
12170  return SetElementWithCallback(object, element, index, value, object,
12171  strict_mode);
12172  } else {
12173  dictionary->UpdateMaxNumberKey(index);
12174  // If a value has not been initialized we allow writing to it even if it
12175  // is read-only (a declared const that has not been initialized). If a
12176  // value is being defined we skip attribute checks completely.
12177  if (set_mode == DEFINE_PROPERTY) {
12178  details = PropertyDetails(
12179  attributes, NORMAL, details.dictionary_index());
12180  dictionary->DetailsAtPut(entry, details);
12181  } else if (details.IsReadOnly() && !element->IsTheHole()) {
12182  if (strict_mode == SLOPPY) {
12183  return isolate->factory()->undefined_value();
12184  } else {
12185  Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12186  Handle<Object> args[2] = { number, object };
12187  Handle<Object> error =
12188  isolate->factory()->NewTypeError("strict_read_only_property",
12189  HandleVector(args, 2));
12190  isolate->Throw(*error);
12191  return Handle<Object>();
12192  }
12193  }
12194  // Elements of the arguments object in slow mode might be slow aliases.
12195  if (is_arguments && element->IsAliasedArgumentsEntry()) {
12196  Handle<AliasedArgumentsEntry> entry =
12198  Handle<Context> context(Context::cast(elements->get(0)));
12199  int context_index = entry->aliased_context_slot();
12200  ASSERT(!context->get(context_index)->IsTheHole());
12201  context->set(context_index, *value);
12202  // For elements that are still writable we keep slow aliasing.
12203  if (!details.IsReadOnly()) value = element;
12204  }
12205  dictionary->ValueAtPut(entry, *value);
12206  }
12207  } else {
12208  // Index not already used. Look for an accessor in the prototype chain.
12209  // Can cause GC!
12210  if (check_prototype) {
12211  bool found;
12212  Handle<Object> result = SetElementWithCallbackSetterInPrototypes(object,
12213  index, value, &found, strict_mode);
12214  if (found) return result;
12215  }
12216 
12217  // When we set the is_extensible flag to false we always force the
12218  // element into dictionary mode (and force them to stay there).
12219  if (!object->map()->is_extensible()) {
12220  if (strict_mode == SLOPPY) {
12221  return isolate->factory()->undefined_value();
12222  } else {
12223  Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12224  Handle<String> name = isolate->factory()->NumberToString(number);
12225  Handle<Object> args[1] = { name };
12226  Handle<Object> error =
12227  isolate->factory()->NewTypeError("object_not_extensible",
12228  HandleVector(args, 1));
12229  isolate->Throw(*error);
12230  return Handle<Object>();
12231  }
12232  }
12233 
12234  PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
12235  Handle<SeededNumberDictionary> new_dictionary =
12236  SeededNumberDictionary::AddNumberEntry(dictionary, index, value,
12237  details);
12238  if (*dictionary != *new_dictionary) {
12239  if (is_arguments) {
12240  elements->set(1, *new_dictionary);
12241  } else {
12242  object->set_elements(*new_dictionary);
12243  }
12244  dictionary = new_dictionary;
12245  }
12246  }
12247 
12248  // Update the array length if this JSObject is an array.
12249  if (object->IsJSArray()) {
12250  JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray>::cast(object), index,
12251  value);
12252  }
12253 
12254  // Attempt to put this object back in fast case.
12255  if (object->ShouldConvertToFastElements()) {
12256  uint32_t new_length = 0;
12257  if (object->IsJSArray()) {
12258  CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&new_length));
12259  } else {
12260  new_length = dictionary->max_number_key() + 1;
12261  }
12262  SetFastElementsCapacitySmiMode smi_mode = FLAG_smi_only_arrays
12263  ? kAllowSmiElements
12264  : kDontAllowSmiElements;
12265  bool has_smi_only_elements = false;
12266  bool should_convert_to_fast_double_elements =
12267  object->ShouldConvertToFastDoubleElements(&has_smi_only_elements);
12268  if (has_smi_only_elements) {
12269  smi_mode = kForceSmiElements;
12270  }
12271 
12272  if (should_convert_to_fast_double_elements) {
12273  SetFastDoubleElementsCapacityAndLength(object, new_length, new_length);
12274  } else {
12275  SetFastElementsCapacityAndLength(object, new_length, new_length,
12276  smi_mode);
12277  }
12278  object->ValidateElements();
12279 #ifdef DEBUG
12280  if (FLAG_trace_normalization) {
12281  PrintF("Object elements are fast case again:\n");
12282  object->Print();
12283  }
12284 #endif
12285  }
12286  return value;
12287 }
12288 
12289 Handle<Object> JSObject::SetFastDoubleElement(
12290  Handle<JSObject> object,
12291  uint32_t index,
12292  Handle<Object> value,
12293  StrictMode strict_mode,
12294  bool check_prototype) {
12295  ASSERT(object->HasFastDoubleElements());
12296 
12297  Handle<FixedArrayBase> base_elms(FixedArrayBase::cast(object->elements()));
12298  uint32_t elms_length = static_cast<uint32_t>(base_elms->length());
12299 
12300  // If storing to an element that isn't in the array, pass the store request
12301  // up the prototype chain before storing in the receiver's elements.
12302  if (check_prototype &&
12303  (index >= elms_length ||
12304  Handle<FixedDoubleArray>::cast(base_elms)->is_the_hole(index))) {
12305  bool found;
12306  Handle<Object> result = SetElementWithCallbackSetterInPrototypes(object,
12307  index, value, &found, strict_mode);
12308  if (found) return result;
12309  }
12310 
12311  // If the value object is not a heap number, switch to fast elements and try
12312  // again.
12313  bool value_is_smi = value->IsSmi();
12314  bool introduces_holes = true;
12315  uint32_t length = elms_length;
12316  if (object->IsJSArray()) {
12317  CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&length));
12318  introduces_holes = index > length;
12319  } else {
12320  introduces_holes = index >= elms_length;
12321  }
12322 
12323  if (!value->IsNumber()) {
12324  SetFastElementsCapacityAndLength(object, elms_length, length,
12325  kDontAllowSmiElements);
12326  Handle<Object> result = SetFastElement(object, index, value, strict_mode,
12327  check_prototype);
12328  RETURN_IF_EMPTY_HANDLE_VALUE(object->GetIsolate(), result,
12329  Handle<Object>());
12330  object->ValidateElements();
12331  return result;
12332  }
12333 
12334  double double_value = value_is_smi
12335  ? static_cast<double>(Handle<Smi>::cast(value)->value())
12336  : Handle<HeapNumber>::cast(value)->value();
12337 
12338  // If the array is growing, and it's not growth by a single element at the
12339  // end, make sure that the ElementsKind is HOLEY.
12340  ElementsKind elements_kind = object->GetElementsKind();
12341  if (introduces_holes && !IsFastHoleyElementsKind(elements_kind)) {
12342  ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
12343  TransitionElementsKind(object, transitioned_kind);
12344  }
12345 
12346  // Check whether there is extra space in the fixed array.
12347  if (index < elms_length) {
12348  Handle<FixedDoubleArray> elms(FixedDoubleArray::cast(object->elements()));
12349  elms->set(index, double_value);
12350  if (object->IsJSArray()) {
12351  // Update the length of the array if needed.
12352  uint32_t array_length = 0;
12353  CHECK(
12354  Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length));
12355  if (index >= array_length) {
12356  Handle<JSArray>::cast(object)->set_length(Smi::FromInt(index + 1));
12357  }
12358  }
12359  return value;
12360  }
12361 
12362  // Allow gap in fast case.
12363  if ((index - elms_length) < kMaxGap) {
12364  // Try allocating extra space.
12365  int new_capacity = NewElementsCapacity(index+1);
12366  if (!object->ShouldConvertToSlowElements(new_capacity)) {
12367  ASSERT(static_cast<uint32_t>(new_capacity) > index);
12368  SetFastDoubleElementsCapacityAndLength(object, new_capacity, index + 1);
12369  FixedDoubleArray::cast(object->elements())->set(index, double_value);
12370  object->ValidateElements();
12371  return value;
12372  }
12373  }
12374 
12375  // Otherwise default to slow case.
12376  ASSERT(object->HasFastDoubleElements());
12377  ASSERT(object->map()->has_fast_double_elements());
12378  ASSERT(object->elements()->IsFixedDoubleArray() ||
12379  object->elements()->length() == 0);
12380 
12381  NormalizeElements(object);
12382  ASSERT(object->HasDictionaryElements());
12383  return SetElement(object, index, value, NONE, strict_mode, check_prototype);
12384 }
12385 
12386 
12388  uint32_t index,
12389  Handle<Object> value,
12390  PropertyAttributes attributes,
12391  StrictMode strict_mode) {
12392  if (object->IsJSProxy()) {
12393  return JSProxy::SetElementWithHandler(
12394  Handle<JSProxy>::cast(object), object, index, value, strict_mode);
12395  }
12396  return JSObject::SetElement(
12397  Handle<JSObject>::cast(object), index, value, attributes, strict_mode);
12398 }
12399 
12400 
12402  uint32_t index,
12403  Handle<Object> value,
12404  StrictMode strict_mode) {
12405  ASSERT(!object->HasExternalArrayElements());
12406  return JSObject::SetElement(object, index, value, NONE, strict_mode, false);
12407 }
12408 
12409 
12411  uint32_t index,
12412  Handle<Object> value,
12413  PropertyAttributes attributes,
12414  StrictMode strict_mode,
12415  bool check_prototype,
12416  SetPropertyMode set_mode) {
12417  Isolate* isolate = object->GetIsolate();
12418 
12419  if (object->HasExternalArrayElements() ||
12420  object->HasFixedTypedArrayElements()) {
12421  if (!value->IsNumber() && !value->IsUndefined()) {
12422  bool has_exception;
12423  Handle<Object> number =
12424  Execution::ToNumber(isolate, value, &has_exception);
12425  if (has_exception) return Handle<Object>();
12426  value = number;
12427  }
12428  }
12429 
12430  // Check access rights if needed.
12431  if (object->IsAccessCheckNeeded()) {
12432  if (!isolate->MayIndexedAccessWrapper(object, index, v8::ACCESS_SET)) {
12435  return value;
12436  }
12437  }
12438 
12439  if (object->IsJSGlobalProxy()) {
12440  Handle<Object> proto(object->GetPrototype(), isolate);
12441  if (proto->IsNull()) return value;
12442  ASSERT(proto->IsJSGlobalObject());
12443  return SetElement(Handle<JSObject>::cast(proto), index, value, attributes,
12444  strict_mode,
12445  check_prototype,
12446  set_mode);
12447  }
12448 
12449  // Don't allow element properties to be redefined for external arrays.
12450  if ((object->HasExternalArrayElements() ||
12451  object->HasFixedTypedArrayElements()) &&
12452  set_mode == DEFINE_PROPERTY) {
12453  Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
12454  Handle<Object> args[] = { object, number };
12455  Handle<Object> error = isolate->factory()->NewTypeError(
12456  "redef_external_array_element", HandleVector(args, ARRAY_SIZE(args)));
12457  isolate->Throw(*error);
12458  return Handle<Object>();
12459  }
12460 
12461  // Normalize the elements to enable attributes on the property.
12462  if ((attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0) {
12463  Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
12464  // Make sure that we never go back to fast case.
12465  dictionary->set_requires_slow_elements();
12466  }
12467 
12468  if (!object->map()->is_observed()) {
12469  return object->HasIndexedInterceptor()
12470  ? SetElementWithInterceptor(object, index, value, attributes, strict_mode,
12471  check_prototype,
12472  set_mode)
12473  : SetElementWithoutInterceptor(object, index, value, attributes,
12474  strict_mode,
12475  check_prototype,
12476  set_mode);
12477  }
12478 
12479  PropertyAttributes old_attributes =
12480  JSReceiver::GetLocalElementAttribute(object, index);
12481  Handle<Object> old_value = isolate->factory()->the_hole_value();
12482  Handle<Object> old_length_handle;
12483  Handle<Object> new_length_handle;
12484 
12485  if (old_attributes != ABSENT) {
12486  if (object->GetLocalElementAccessorPair(index) == NULL) {
12487  old_value = Object::GetElementNoExceptionThrown(isolate, object, index);
12488  }
12489  } else if (object->IsJSArray()) {
12490  // Store old array length in case adding an element grows the array.
12491  old_length_handle = handle(Handle<JSArray>::cast(object)->length(),
12492  isolate);
12493  }
12494 
12495  // Check for lookup interceptor
12496  Handle<Object> result = object->HasIndexedInterceptor()
12497  ? SetElementWithInterceptor(object, index, value, attributes, strict_mode,
12498  check_prototype,
12499  set_mode)
12500  : SetElementWithoutInterceptor(object, index, value, attributes,
12501  strict_mode,
12502  check_prototype,
12503  set_mode);
12504  RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>());
12505 
12506  Handle<String> name = isolate->factory()->Uint32ToString(index);
12507  PropertyAttributes new_attributes = GetLocalElementAttribute(object, index);
12508  if (old_attributes == ABSENT) {
12509  if (object->IsJSArray() &&
12510  !old_length_handle->SameValue(
12511  Handle<JSArray>::cast(object)->length())) {
12512  new_length_handle = handle(Handle<JSArray>::cast(object)->length(),
12513  isolate);
12514  uint32_t old_length = 0;
12515  uint32_t new_length = 0;
12516  CHECK(old_length_handle->ToArrayIndex(&old_length));
12517  CHECK(new_length_handle->ToArrayIndex(&new_length));
12518 
12519  BeginPerformSplice(Handle<JSArray>::cast(object));
12520  EnqueueChangeRecord(object, "add", name, old_value);
12521  EnqueueChangeRecord(object, "update", isolate->factory()->length_string(),
12522  old_length_handle);
12523  EndPerformSplice(Handle<JSArray>::cast(object));
12524  Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
12525  EnqueueSpliceRecord(Handle<JSArray>::cast(object), old_length, deleted,
12526  new_length - old_length);
12527  } else {
12528  EnqueueChangeRecord(object, "add", name, old_value);
12529  }
12530  } else if (old_value->IsTheHole()) {
12531  EnqueueChangeRecord(object, "reconfigure", name, old_value);
12532  } else {
12533  Handle<Object> new_value =
12534  Object::GetElementNoExceptionThrown(isolate, object, index);
12535  bool value_changed = !old_value->SameValue(*new_value);
12536  if (old_attributes != new_attributes) {
12537  if (!value_changed) old_value = isolate->factory()->the_hole_value();
12538  EnqueueChangeRecord(object, "reconfigure", name, old_value);
12539  } else if (value_changed) {
12540  EnqueueChangeRecord(object, "update", name, old_value);
12541  }
12542  }
12543 
12544  return result;
12545 }
12546 
12547 
12548 Handle<Object> JSObject::SetElementWithoutInterceptor(
12549  Handle<JSObject> object,
12550  uint32_t index,
12551  Handle<Object> value,
12552  PropertyAttributes attributes,
12553  StrictMode strict_mode,
12554  bool check_prototype,
12555  SetPropertyMode set_mode) {
12556  ASSERT(object->HasDictionaryElements() ||
12557  object->HasDictionaryArgumentsElements() ||
12558  (attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0);
12559  Isolate* isolate = object->GetIsolate();
12560  if (FLAG_trace_external_array_abuse &&
12561  IsExternalArrayElementsKind(object->GetElementsKind())) {
12562  CheckArrayAbuse(*object, "external elements write", index);
12563  }
12564  if (FLAG_trace_js_array_abuse &&
12565  !IsExternalArrayElementsKind(object->GetElementsKind())) {
12566  if (object->IsJSArray()) {
12567  CheckArrayAbuse(*object, "elements write", index, true);
12568  }
12569  }
12570  switch (object->GetElementsKind()) {
12571  case FAST_SMI_ELEMENTS:
12572  case FAST_ELEMENTS:
12574  case FAST_HOLEY_ELEMENTS:
12575  return SetFastElement(object, index, value, strict_mode, check_prototype);
12576  case FAST_DOUBLE_ELEMENTS:
12578  return SetFastDoubleElement(object, index, value, strict_mode,
12579  check_prototype);
12580 
12581 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
12582  case EXTERNAL_##TYPE##_ELEMENTS: { \
12583  Handle<External##Type##Array> array( \
12584  External##Type##Array::cast(object->elements())); \
12585  return External##Type##Array::SetValue(array, index, value); \
12586  } \
12587  case TYPE##_ELEMENTS: { \
12588  Handle<Fixed##Type##Array> array( \
12589  Fixed##Type##Array::cast(object->elements())); \
12590  return Fixed##Type##Array::SetValue(array, index, value); \
12591  }
12592 
12594 
12595 #undef TYPED_ARRAY_CASE
12596 
12597  case DICTIONARY_ELEMENTS:
12598  return SetDictionaryElement(object, index, value, attributes, strict_mode,
12599  check_prototype,
12600  set_mode);
12602  Handle<FixedArray> parameter_map(FixedArray::cast(object->elements()));
12603  uint32_t length = parameter_map->length();
12604  Handle<Object> probe = index < length - 2 ?
12605  Handle<Object>(parameter_map->get(index + 2), isolate) :
12606  Handle<Object>();
12607  if (!probe.is_null() && !probe->IsTheHole()) {
12608  Handle<Context> context(Context::cast(parameter_map->get(0)));
12609  int context_index = Handle<Smi>::cast(probe)->value();
12610  ASSERT(!context->get(context_index)->IsTheHole());
12611  context->set(context_index, *value);
12612  // Redefining attributes of an aliased element destroys fast aliasing.
12613  if (set_mode == SET_PROPERTY || attributes == NONE) return value;
12614  parameter_map->set_the_hole(index + 2);
12615  // For elements that are still writable we re-establish slow aliasing.
12616  if ((attributes & READ_ONLY) == 0) {
12617  value = Handle<Object>::cast(
12618  isolate->factory()->NewAliasedArgumentsEntry(context_index));
12619  }
12620  }
12621  Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1)));
12622  if (arguments->IsDictionary()) {
12623  return SetDictionaryElement(object, index, value, attributes,
12624  strict_mode,
12625  check_prototype,
12626  set_mode);
12627  } else {
12628  return SetFastElement(object, index, value, strict_mode,
12629  check_prototype);
12630  }
12631  }
12632  }
12633  // All possible cases have been handled above. Add a return to avoid the
12634  // complaints from the compiler.
12635  UNREACHABLE();
12636  return isolate->factory()->null_value();
12637 }
12638 
12639 
12640 const double AllocationSite::kPretenureRatio = 0.85;
12641 
12642 
12644  set_pretenure_decision(kUndecided);
12645  set_memento_found_count(0);
12646  set_memento_create_count(0);
12647 }
12648 
12649 
12651  PretenureDecision mode = pretenure_decision();
12652  // Zombie objects "decide" to be untenured.
12653  return mode == kTenure ? TENURED : NOT_TENURED;
12654 }
12655 
12656 
12658  ASSERT(FLAG_trace_track_allocation_sites);
12659  Object* current = GetHeap()->allocation_sites_list();
12660  while (current->IsAllocationSite()) {
12661  AllocationSite* current_site = AllocationSite::cast(current);
12662  if (current_site->nested_site() == this) {
12663  return true;
12664  }
12665  current = current_site->weak_next();
12666  }
12667  return false;
12668 }
12669 
12670 
12672  ElementsKind to_kind) {
12673  Isolate* isolate = site->GetIsolate();
12674 
12675  if (site->SitePointsToLiteral() && site->transition_info()->IsJSArray()) {
12676  Handle<JSArray> transition_info =
12677  handle(JSArray::cast(site->transition_info()));
12678  ElementsKind kind = transition_info->GetElementsKind();
12679  // if kind is holey ensure that to_kind is as well.
12680  if (IsHoleyElementsKind(kind)) {
12681  to_kind = GetHoleyElementsKind(to_kind);
12682  }
12683  if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
12684  // If the array is huge, it's not likely to be defined in a local
12685  // function, so we shouldn't make new instances of it very often.
12686  uint32_t length = 0;
12687  CHECK(transition_info->length()->ToArrayIndex(&length));
12688  if (length <= kMaximumArrayBytesToPretransition) {
12689  if (FLAG_trace_track_allocation_sites) {
12690  bool is_nested = site->IsNestedSite();
12691  PrintF(
12692  "AllocationSite: JSArray %p boilerplate %s updated %s->%s\n",
12693  reinterpret_cast<void*>(*site),
12694  is_nested ? "(nested)" : "",
12695  ElementsKindToString(kind),
12696  ElementsKindToString(to_kind));
12697  }
12698  JSObject::TransitionElementsKind(transition_info, to_kind);
12699  site->dependent_code()->DeoptimizeDependentCodeGroup(
12701  }
12702  }
12703  } else {
12704  ElementsKind kind = site->GetElementsKind();
12705  // if kind is holey ensure that to_kind is as well.
12706  if (IsHoleyElementsKind(kind)) {
12707  to_kind = GetHoleyElementsKind(to_kind);
12708  }
12709  if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
12710  if (FLAG_trace_track_allocation_sites) {
12711  PrintF("AllocationSite: JSArray %p site updated %s->%s\n",
12712  reinterpret_cast<void*>(*site),
12713  ElementsKindToString(kind),
12714  ElementsKindToString(to_kind));
12715  }
12716  site->SetElementsKind(to_kind);
12717  site->dependent_code()->DeoptimizeDependentCodeGroup(
12719  }
12720  }
12721 }
12722 
12723 
12724 // static
12726  Reason reason,
12727  CompilationInfo* info) {
12728  DependentCode::DependencyGroup group = site->ToDependencyGroup(reason);
12729  Handle<DependentCode> dep(site->dependent_code());
12730  Handle<DependentCode> codes =
12731  DependentCode::Insert(dep, group, info->object_wrapper());
12732  if (*codes != site->dependent_code()) site->set_dependent_code(*codes);
12733  info->dependencies(group)->Add(Handle<HeapObject>(*site), info->zone());
12734 }
12735 
12736 
12737 void JSObject::UpdateAllocationSite(Handle<JSObject> object,
12738  ElementsKind to_kind) {
12739  if (!object->IsJSArray()) return;
12740 
12741  Heap* heap = object->GetHeap();
12742  if (!heap->InNewSpace(*object)) return;
12743 
12745  {
12746  DisallowHeapAllocation no_allocation;
12747  // Check if there is potentially a memento behind the object. If
12748  // the last word of the momento is on another page we return
12749  // immediatelly.
12750  Address object_address = object->address();
12751  Address memento_address = object_address + JSArray::kSize;
12752  Address last_memento_word_address = memento_address + kPointerSize;
12753  if (!NewSpacePage::OnSamePage(object_address,
12754  last_memento_word_address)) {
12755  return;
12756  }
12757 
12758  // Either object is the last object in the new space, or there is another
12759  // object of at least word size (the header map word) following it, so
12760  // suffices to compare ptr and top here.
12761  Address top = heap->NewSpaceTop();
12762  ASSERT(memento_address == top ||
12763  memento_address + HeapObject::kHeaderSize <= top);
12764  if (memento_address == top) return;
12765 
12766  HeapObject* candidate = HeapObject::FromAddress(memento_address);
12767  if (candidate->map() != heap->allocation_memento_map()) return;
12768 
12769  AllocationMemento* memento = AllocationMemento::cast(candidate);
12770  if (!memento->IsValid()) return;
12771 
12772  // Walk through to the Allocation Site
12773  site = handle(memento->GetAllocationSite());
12774  }
12776 }
12777 
12778 
12780  ElementsKind to_kind) {
12781  ElementsKind from_kind = object->map()->elements_kind();
12782 
12783  if (IsFastHoleyElementsKind(from_kind)) {
12784  to_kind = GetHoleyElementsKind(to_kind);
12785  }
12786 
12787  if (from_kind == to_kind) return;
12788  // Don't update the site if to_kind isn't fast
12789  if (IsFastElementsKind(to_kind)) {
12790  UpdateAllocationSite(object, to_kind);
12791  }
12792 
12793  Isolate* isolate = object->GetIsolate();
12794  if (object->elements() == isolate->heap()->empty_fixed_array() ||
12795  (IsFastSmiOrObjectElementsKind(from_kind) &&
12796  IsFastSmiOrObjectElementsKind(to_kind)) ||
12797  (from_kind == FAST_DOUBLE_ELEMENTS &&
12798  to_kind == FAST_HOLEY_DOUBLE_ELEMENTS)) {
12799  ASSERT(from_kind != TERMINAL_FAST_ELEMENTS_KIND);
12800  // No change is needed to the elements() buffer, the transition
12801  // only requires a map change.
12802  Handle<Map> new_map = GetElementsTransitionMap(object, to_kind);
12803  MigrateToMap(object, new_map);
12804  if (FLAG_trace_elements_transitions) {
12805  Handle<FixedArrayBase> elms(object->elements());
12806  PrintElementsTransition(stdout, object, from_kind, elms, to_kind, elms);
12807  }
12808  return;
12809  }
12810 
12811  Handle<FixedArrayBase> elms(object->elements());
12812  uint32_t capacity = static_cast<uint32_t>(elms->length());
12813  uint32_t length = capacity;
12814 
12815  if (object->IsJSArray()) {
12816  Object* raw_length = Handle<JSArray>::cast(object)->length();
12817  if (raw_length->IsUndefined()) {
12818  // If length is undefined, then JSArray is being initialized and has no
12819  // elements, assume a length of zero.
12820  length = 0;
12821  } else {
12822  CHECK(raw_length->ToArrayIndex(&length));
12823  }
12824  }
12825 
12826  if (IsFastSmiElementsKind(from_kind) &&
12827  IsFastDoubleElementsKind(to_kind)) {
12828  SetFastDoubleElementsCapacityAndLength(object, capacity, length);
12829  object->ValidateElements();
12830  return;
12831  }
12832 
12833  if (IsFastDoubleElementsKind(from_kind) &&
12834  IsFastObjectElementsKind(to_kind)) {
12835  SetFastElementsCapacityAndLength(object, capacity, length,
12836  kDontAllowSmiElements);
12837  object->ValidateElements();
12838  return;
12839  }
12840 
12841  // This method should never be called for any other case than the ones
12842  // handled above.
12843  UNREACHABLE();
12844 }
12845 
12846 
12847 // static
12849  ElementsKind to_kind) {
12850  // Transitions can't go backwards.
12851  if (!IsMoreGeneralElementsKindTransition(from_kind, to_kind)) {
12852  return false;
12853  }
12854 
12855  // Transitions from HOLEY -> PACKED are not allowed.
12856  return !IsFastHoleyElementsKind(from_kind) ||
12857  IsFastHoleyElementsKind(to_kind);
12858 }
12859 
12860 
12862  uint32_t index,
12863  Handle<Object> value) {
12864  CALL_HEAP_FUNCTION_VOID(array->GetIsolate(),
12865  array->JSArrayUpdateLengthFromIndex(index, *value));
12866 }
12867 
12868 
12869 MaybeObject* JSArray::JSArrayUpdateLengthFromIndex(uint32_t index,
12870  Object* value) {
12871  uint32_t old_len = 0;
12872  CHECK(length()->ToArrayIndex(&old_len));
12873  // Check to see if we need to update the length. For now, we make
12874  // sure that the length stays within 32-bits (unsigned).
12875  if (index >= old_len && index != 0xffffffff) {
12876  Object* len;
12877  { MaybeObject* maybe_len =
12878  GetHeap()->NumberFromDouble(static_cast<double>(index) + 1);
12879  if (!maybe_len->ToObject(&len)) return maybe_len;
12880  }
12881  set_length(len);
12882  }
12883  return value;
12884 }
12885 
12886 
12888  Handle<Object> receiver,
12889  uint32_t index) {
12890  Isolate* isolate = object->GetIsolate();
12891 
12892  // Make sure that the top context does not change when doing
12893  // callbacks or interceptor calls.
12894  AssertNoContextChange ncc(isolate);
12895 
12896  Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor(), isolate);
12897  if (!interceptor->getter()->IsUndefined()) {
12899  v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter());
12900  LOG(isolate,
12901  ApiIndexedPropertyAccess("interceptor-indexed-get", *object, index));
12903  args(isolate, interceptor->data(), *receiver, *object);
12904  v8::Handle<v8::Value> result = args.Call(getter, index);
12906  if (!result.IsEmpty()) {
12907  Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
12908  result_internal->VerifyApiCallResultType();
12909  // Rebox handle before return.
12910  return Handle<Object>(*result_internal, isolate);
12911  }
12912  }
12913 
12914  ElementsAccessor* handler = object->GetElementsAccessor();
12915  Handle<Object> result = handler->Get(receiver, object, index);
12916  RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>());
12917  if (!result->IsTheHole()) return result;
12918 
12919  Handle<Object> proto(object->GetPrototype(), isolate);
12920  if (proto->IsNull()) return isolate->factory()->undefined_value();
12921  return Object::GetElementWithReceiver(isolate, proto, receiver, index);
12922 }
12923 
12924 
12925 bool JSObject::HasDenseElements() {
12926  int capacity = 0;
12927  int used = 0;
12928  GetElementsCapacityAndUsage(&capacity, &used);
12929  return (capacity == 0) || (used > (capacity / 2));
12930 }
12931 
12932 
12933 void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) {
12934  *capacity = 0;
12935  *used = 0;
12936 
12937  FixedArrayBase* backing_store_base = FixedArrayBase::cast(elements());
12938  FixedArray* backing_store = NULL;
12939  switch (GetElementsKind()) {
12941  backing_store_base =
12942  FixedArray::cast(FixedArray::cast(backing_store_base)->get(1));
12943  backing_store = FixedArray::cast(backing_store_base);
12944  if (backing_store->IsDictionary()) {
12945  SeededNumberDictionary* dictionary =
12946  SeededNumberDictionary::cast(backing_store);
12947  *capacity = dictionary->Capacity();
12948  *used = dictionary->NumberOfElements();
12949  break;
12950  }
12951  // Fall through.
12952  case FAST_SMI_ELEMENTS:
12953  case FAST_ELEMENTS:
12954  if (IsJSArray()) {
12955  *capacity = backing_store_base->length();
12956  *used = Smi::cast(JSArray::cast(this)->length())->value();
12957  break;
12958  }
12959  // Fall through if packing is not guaranteed.
12961  case FAST_HOLEY_ELEMENTS:
12962  backing_store = FixedArray::cast(backing_store_base);
12963  *capacity = backing_store->length();
12964  for (int i = 0; i < *capacity; ++i) {
12965  if (!backing_store->get(i)->IsTheHole()) ++(*used);
12966  }
12967  break;
12968  case DICTIONARY_ELEMENTS: {
12969  SeededNumberDictionary* dictionary = element_dictionary();
12970  *capacity = dictionary->Capacity();
12971  *used = dictionary->NumberOfElements();
12972  break;
12973  }
12974  case FAST_DOUBLE_ELEMENTS:
12975  if (IsJSArray()) {
12976  *capacity = backing_store_base->length();
12977  *used = Smi::cast(JSArray::cast(this)->length())->value();
12978  break;
12979  }
12980  // Fall through if packing is not guaranteed.
12982  *capacity = elements()->length();
12983  if (*capacity == 0) break;
12984  FixedDoubleArray * elms = FixedDoubleArray::cast(elements());
12985  for (int i = 0; i < *capacity; i++) {
12986  if (!elms->is_the_hole(i)) ++(*used);
12987  }
12988  break;
12989  }
12990 
12991 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
12992  case EXTERNAL_##TYPE##_ELEMENTS: \
12993  case TYPE##_ELEMENTS: \
12994 
12996 #undef TYPED_ARRAY_CASE
12997  {
12998  // External arrays are considered 100% used.
12999  FixedArrayBase* external_array = FixedArrayBase::cast(elements());
13000  *capacity = external_array->length();
13001  *used = external_array->length();
13002  break;
13003  }
13004  }
13005 }
13006 
13007 
13009  uint32_t index;
13010  if (HasFastElements() && key->ToArrayIndex(&index)) {
13011  Handle<FixedArrayBase> backing_store(FixedArrayBase::cast(elements()));
13012  uint32_t capacity = static_cast<uint32_t>(backing_store->length());
13013  if (index >= capacity) {
13014  if ((index - capacity) >= kMaxGap) return true;
13015  uint32_t new_capacity = NewElementsCapacity(index + 1);
13016  return ShouldConvertToSlowElements(new_capacity);
13017  }
13018  }
13019  return false;
13020 }
13021 
13022 
13024  STATIC_ASSERT(kMaxUncheckedOldFastElementsLength <=
13025  kMaxUncheckedFastElementsLength);
13026  if (new_capacity <= kMaxUncheckedOldFastElementsLength ||
13027  (new_capacity <= kMaxUncheckedFastElementsLength &&
13028  GetHeap()->InNewSpace(this))) {
13029  return false;
13030  }
13031  // If the fast-case backing storage takes up roughly three times as
13032  // much space (in machine words) as a dictionary backing storage
13033  // would, the object should have slow elements.
13034  int old_capacity = 0;
13035  int used_elements = 0;
13036  GetElementsCapacityAndUsage(&old_capacity, &used_elements);
13037  int dictionary_size = SeededNumberDictionary::ComputeCapacity(used_elements) *
13039  return 3 * dictionary_size <= new_capacity;
13040 }
13041 
13042 
13044  ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements());
13045  // If the elements are sparse, we should not go back to fast case.
13046  if (!HasDenseElements()) return false;
13047  // An object requiring access checks is never allowed to have fast
13048  // elements. If it had fast elements we would skip security checks.
13049  if (IsAccessCheckNeeded()) return false;
13050  // Observed objects may not go to fast mode because they rely on map checks,
13051  // and for fast element accesses we sometimes check element kinds only.
13052  if (map()->is_observed()) return false;
13053 
13054  FixedArray* elements = FixedArray::cast(this->elements());
13055  SeededNumberDictionary* dictionary = NULL;
13056  if (elements->map() == GetHeap()->sloppy_arguments_elements_map()) {
13057  dictionary = SeededNumberDictionary::cast(elements->get(1));
13058  } else {
13059  dictionary = SeededNumberDictionary::cast(elements);
13060  }
13061  // If an element has been added at a very high index in the elements
13062  // dictionary, we cannot go back to fast case.
13063  if (dictionary->requires_slow_elements()) return false;
13064  // If the dictionary backing storage takes up roughly half as much
13065  // space (in machine words) as a fast-case backing storage would,
13066  // the object should have fast elements.
13067  uint32_t array_size = 0;
13068  if (IsJSArray()) {
13069  CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_size));
13070  } else {
13071  array_size = dictionary->max_number_key();
13072  }
13073  uint32_t dictionary_size = static_cast<uint32_t>(dictionary->Capacity()) *
13075  return 2 * dictionary_size >= array_size;
13076 }
13077 
13078 
13080  bool* has_smi_only_elements) {
13081  *has_smi_only_elements = false;
13082  if (HasSloppyArgumentsElements()) return false;
13083  if (FLAG_unbox_double_arrays) {
13084  ASSERT(HasDictionaryElements());
13085  SeededNumberDictionary* dictionary = element_dictionary();
13086  bool found_double = false;
13087  for (int i = 0; i < dictionary->Capacity(); i++) {
13088  Object* key = dictionary->KeyAt(i);
13089  if (key->IsNumber()) {
13090  Object* value = dictionary->ValueAt(i);
13091  if (!value->IsNumber()) return false;
13092  if (!value->IsSmi()) {
13093  found_double = true;
13094  }
13095  }
13096  }
13097  *has_smi_only_elements = !found_double;
13098  return found_double;
13099  } else {
13100  return false;
13101  }
13102 }
13103 
13104 
13105 // Certain compilers request function template instantiation when they
13106 // see the definition of the other template functions in the
13107 // class. This requires us to have the template functions put
13108 // together, so even though this function belongs in objects-debug.cc,
13109 // we keep it here instead to satisfy certain compilers.
13110 #ifdef OBJECT_PRINT
13111 template<typename Shape, typename Key>
13112 void Dictionary<Shape, Key>::Print(FILE* out) {
13113  int capacity = HashTable<Shape, Key>::Capacity();
13114  for (int i = 0; i < capacity; i++) {
13117  PrintF(out, " ");
13118  if (k->IsString()) {
13119  String::cast(k)->StringPrint(out);
13120  } else {
13121  k->ShortPrint(out);
13122  }
13123  PrintF(out, ": ");
13124  ValueAt(i)->ShortPrint(out);
13125  PrintF(out, "\n");
13126  }
13127  }
13128 }
13129 #endif
13130 
13131 
13132 template<typename Shape, typename Key>
13134  int pos = 0;
13135  int capacity = HashTable<Shape, Key>::Capacity();
13136  DisallowHeapAllocation no_gc;
13137  WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc);
13138  for (int i = 0; i < capacity; i++) {
13141  elements->set(pos++, ValueAt(i), mode);
13142  }
13143  }
13144  ASSERT(pos == elements->length());
13145 }
13146 
13147 
13149  ASSERT(map()->has_named_interceptor());
13150  JSFunction* constructor = JSFunction::cast(map()->constructor());
13151  ASSERT(constructor->shared()->IsApiFunction());
13152  Object* result =
13153  constructor->shared()->get_api_func_data()->named_property_handler();
13154  return InterceptorInfo::cast(result);
13155 }
13156 
13157 
13159  ASSERT(map()->has_indexed_interceptor());
13160  JSFunction* constructor = JSFunction::cast(map()->constructor());
13161  ASSERT(constructor->shared()->IsApiFunction());
13162  Object* result =
13163  constructor->shared()->get_api_func_data()->indexed_property_handler();
13164  return InterceptorInfo::cast(result);
13165 }
13166 
13167 
13169  Handle<JSObject> object,
13170  Handle<Object> receiver,
13171  Handle<Name> name,
13172  PropertyAttributes* attributes) {
13173  // Check local property in holder, ignore interceptor.
13174  Isolate* isolate = object->GetIsolate();
13175  LookupResult lookup(isolate);
13176  object->LocalLookupRealNamedProperty(*name, &lookup);
13177  Handle<Object> result;
13178  if (lookup.IsFound()) {
13179  result = GetProperty(object, receiver, &lookup, name, attributes);
13180  } else {
13181  // Continue searching via the prototype chain.
13182  Handle<Object> prototype(object->GetPrototype(), isolate);
13183  *attributes = ABSENT;
13184  if (prototype->IsNull()) return isolate->factory()->undefined_value();
13185  result = GetPropertyWithReceiver(prototype, receiver, name, attributes);
13186  }
13187  return result;
13188 }
13189 
13190 
13192  Object* receiver,
13193  Name* name,
13194  PropertyAttributes* attributes) {
13195  // Check local property in holder, ignore interceptor.
13196  LookupResult result(GetIsolate());
13197  LocalLookupRealNamedProperty(name, &result);
13198  if (result.IsFound()) {
13199  return GetProperty(receiver, &result, name, attributes);
13200  }
13201  return GetHeap()->undefined_value();
13202 }
13203 
13204 
13206  Handle<JSObject> object,
13207  Handle<Object> receiver,
13208  Handle<Name> name,
13209  PropertyAttributes* attributes) {
13210  Isolate* isolate = object->GetIsolate();
13211 
13212  // TODO(rossberg): Support symbols in the API.
13213  if (name->IsSymbol()) return isolate->factory()->undefined_value();
13214 
13215  Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor(), isolate);
13216  Handle<String> name_string = Handle<String>::cast(name);
13217 
13218  if (!interceptor->getter()->IsUndefined()) {
13220  v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter());
13221  LOG(isolate,
13222  ApiNamedPropertyAccess("interceptor-named-get", *object, *name));
13224  args(isolate, interceptor->data(), *receiver, *object);
13225  v8::Handle<v8::Value> result =
13226  args.Call(getter, v8::Utils::ToLocal(name_string));
13228  if (!result.IsEmpty()) {
13229  *attributes = NONE;
13230  Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
13231  result_internal->VerifyApiCallResultType();
13232  // Rebox handle to escape this scope.
13233  return handle(*result_internal, isolate);
13234  }
13235  }
13236 
13237  return GetPropertyPostInterceptor(object, receiver, name, attributes);
13238 }
13239 
13240 
13242  Handle<Name> key) {
13243  Isolate* isolate = object->GetIsolate();
13244  SealHandleScope shs(isolate);
13245  // Check access rights if needed.
13246  if (object->IsAccessCheckNeeded()) {
13247  if (!isolate->MayNamedAccessWrapper(object, key, v8::ACCESS_HAS)) {
13249  return false;
13250  }
13251  }
13252 
13253  LookupResult result(isolate);
13254  object->LocalLookupRealNamedProperty(*key, &result);
13255  return result.IsFound() && !result.IsInterceptor();
13256 }
13257 
13258 
13260  Isolate* isolate = object->GetIsolate();
13261  HandleScope scope(isolate);
13262  // Check access rights if needed.
13263  if (object->IsAccessCheckNeeded()) {
13264  if (!isolate->MayIndexedAccessWrapper(object, index, v8::ACCESS_HAS)) {
13266  return false;
13267  }
13268  }
13269 
13270  if (object->IsJSGlobalProxy()) {
13271  HandleScope scope(isolate);
13272  Handle<Object> proto(object->GetPrototype(), isolate);
13273  if (proto->IsNull()) return false;
13274  ASSERT(proto->IsJSGlobalObject());
13275  return HasRealElementProperty(Handle<JSObject>::cast(proto), index);
13276  }
13277 
13278  return GetElementAttributeWithoutInterceptor(
13279  object, object, index, false) != ABSENT;
13280 }
13281 
13282 
13284  Handle<Name> key) {
13285  Isolate* isolate = object->GetIsolate();
13286  SealHandleScope shs(isolate);
13287  // Check access rights if needed.
13288  if (object->IsAccessCheckNeeded()) {
13289  if (!isolate->MayNamedAccessWrapper(object, key, v8::ACCESS_HAS)) {
13291  return false;
13292  }
13293  }
13294 
13295  LookupResult result(isolate);
13296  object->LocalLookupRealNamedProperty(*key, &result);
13297  return result.IsPropertyCallbacks();
13298 }
13299 
13300 
13302  if (HasFastProperties()) {
13303  Map* map = this->map();
13304  if (filter == NONE) return map->NumberOfOwnDescriptors();
13305  if (filter & DONT_ENUM) {
13306  int result = map->EnumLength();
13307  if (result != kInvalidEnumCacheSentinel) return result;
13308  }
13309  return map->NumberOfDescribedProperties(OWN_DESCRIPTORS, filter);
13310  }
13311  return property_dictionary()->NumberOfElementsFilterAttributes(filter);
13312 }
13313 
13314 
13315 void FixedArray::SwapPairs(FixedArray* numbers, int i, int j) {
13316  Object* temp = get(i);
13317  set(i, get(j));
13318  set(j, temp);
13319  if (this != numbers) {
13320  temp = numbers->get(i);
13321  numbers->set(i, Smi::cast(numbers->get(j)));
13322  numbers->set(j, Smi::cast(temp));
13323  }
13324 }
13325 
13326 
13327 static void InsertionSortPairs(FixedArray* content,
13328  FixedArray* numbers,
13329  int len) {
13330  for (int i = 1; i < len; i++) {
13331  int j = i;
13332  while (j > 0 &&
13333  (NumberToUint32(numbers->get(j - 1)) >
13334  NumberToUint32(numbers->get(j)))) {
13335  content->SwapPairs(numbers, j - 1, j);
13336  j--;
13337  }
13338  }
13339 }
13340 
13341 
13342 void HeapSortPairs(FixedArray* content, FixedArray* numbers, int len) {
13343  // In-place heap sort.
13344  ASSERT(content->length() == numbers->length());
13345 
13346  // Bottom-up max-heap construction.
13347  for (int i = 1; i < len; ++i) {
13348  int child_index = i;
13349  while (child_index > 0) {
13350  int parent_index = ((child_index + 1) >> 1) - 1;
13351  uint32_t parent_value = NumberToUint32(numbers->get(parent_index));
13352  uint32_t child_value = NumberToUint32(numbers->get(child_index));
13353  if (parent_value < child_value) {
13354  content->SwapPairs(numbers, parent_index, child_index);
13355  } else {
13356  break;
13357  }
13358  child_index = parent_index;
13359  }
13360  }
13361 
13362  // Extract elements and create sorted array.
13363  for (int i = len - 1; i > 0; --i) {
13364  // Put max element at the back of the array.
13365  content->SwapPairs(numbers, 0, i);
13366  // Sift down the new top element.
13367  int parent_index = 0;
13368  while (true) {
13369  int child_index = ((parent_index + 1) << 1) - 1;
13370  if (child_index >= i) break;
13371  uint32_t child1_value = NumberToUint32(numbers->get(child_index));
13372  uint32_t child2_value = NumberToUint32(numbers->get(child_index + 1));
13373  uint32_t parent_value = NumberToUint32(numbers->get(parent_index));
13374  if (child_index + 1 >= i || child1_value > child2_value) {
13375  if (parent_value > child1_value) break;
13376  content->SwapPairs(numbers, parent_index, child_index);
13377  parent_index = child_index;
13378  } else {
13379  if (parent_value > child2_value) break;
13380  content->SwapPairs(numbers, parent_index, child_index + 1);
13381  parent_index = child_index + 1;
13382  }
13383  }
13384  }
13385 }
13386 
13387 
13388 // Sort this array and the numbers as pairs wrt. the (distinct) numbers.
13389 void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) {
13390  ASSERT(this->length() == numbers->length());
13391  // For small arrays, simply use insertion sort.
13392  if (len <= 10) {
13393  InsertionSortPairs(this, numbers, len);
13394  return;
13395  }
13396  // Check the range of indices.
13397  uint32_t min_index = NumberToUint32(numbers->get(0));
13398  uint32_t max_index = min_index;
13399  uint32_t i;
13400  for (i = 1; i < len; i++) {
13401  if (NumberToUint32(numbers->get(i)) < min_index) {
13402  min_index = NumberToUint32(numbers->get(i));
13403  } else if (NumberToUint32(numbers->get(i)) > max_index) {
13404  max_index = NumberToUint32(numbers->get(i));
13405  }
13406  }
13407  if (max_index - min_index + 1 == len) {
13408  // Indices form a contiguous range, unless there are duplicates.
13409  // Do an in-place linear time sort assuming distinct numbers, but
13410  // avoid hanging in case they are not.
13411  for (i = 0; i < len; i++) {
13412  uint32_t p;
13413  uint32_t j = 0;
13414  // While the current element at i is not at its correct position p,
13415  // swap the elements at these two positions.
13416  while ((p = NumberToUint32(numbers->get(i)) - min_index) != i &&
13417  j++ < len) {
13418  SwapPairs(numbers, i, p);
13419  }
13420  }
13421  } else {
13422  HeapSortPairs(this, numbers, len);
13423  return;
13424  }
13425 }
13426 
13427 
13428 // Fill in the names of local properties into the supplied storage. The main
13429 // purpose of this function is to provide reflection information for the object
13430 // mirrors.
13432  FixedArray* storage, int index, PropertyAttributes filter) {
13433  ASSERT(storage->length() >= (NumberOfLocalProperties(filter) - index));
13434  if (HasFastProperties()) {
13435  int real_size = map()->NumberOfOwnDescriptors();
13436  DescriptorArray* descs = map()->instance_descriptors();
13437  for (int i = 0; i < real_size; i++) {
13438  if ((descs->GetDetails(i).attributes() & filter) == 0 &&
13439  !FilterKey(descs->GetKey(i), filter)) {
13440  storage->set(index++, descs->GetKey(i));
13441  }
13442  }
13443  } else {
13444  property_dictionary()->CopyKeysTo(storage,
13445  index,
13446  filter,
13448  }
13449 }
13450 
13451 
13453  return GetLocalElementKeys(NULL, filter);
13454 }
13455 
13456 
13458  // Fast case for objects with no elements.
13459  if (!IsJSValue() && HasFastObjectElements()) {
13460  uint32_t length = IsJSArray() ?
13461  static_cast<uint32_t>(
13462  Smi::cast(JSArray::cast(this)->length())->value()) :
13463  static_cast<uint32_t>(FixedArray::cast(elements())->length());
13464  if (length == 0) return 0;
13465  }
13466  // Compute the number of enumerable elements.
13467  return NumberOfLocalElements(static_cast<PropertyAttributes>(DONT_ENUM));
13468 }
13469 
13470 
13472  PropertyAttributes filter) {
13473  int counter = 0;
13474  switch (GetElementsKind()) {
13475  case FAST_SMI_ELEMENTS:
13476  case FAST_ELEMENTS:
13478  case FAST_HOLEY_ELEMENTS: {
13479  int length = IsJSArray() ?
13480  Smi::cast(JSArray::cast(this)->length())->value() :
13481  FixedArray::cast(elements())->length();
13482  for (int i = 0; i < length; i++) {
13483  if (!FixedArray::cast(elements())->get(i)->IsTheHole()) {
13484  if (storage != NULL) {
13485  storage->set(counter, Smi::FromInt(i));
13486  }
13487  counter++;
13488  }
13489  }
13490  ASSERT(!storage || storage->length() >= counter);
13491  break;
13492  }
13493  case FAST_DOUBLE_ELEMENTS:
13495  int length = IsJSArray() ?
13496  Smi::cast(JSArray::cast(this)->length())->value() :
13497  FixedDoubleArray::cast(elements())->length();
13498  for (int i = 0; i < length; i++) {
13499  if (!FixedDoubleArray::cast(elements())->is_the_hole(i)) {
13500  if (storage != NULL) {
13501  storage->set(counter, Smi::FromInt(i));
13502  }
13503  counter++;
13504  }
13505  }
13506  ASSERT(!storage || storage->length() >= counter);
13507  break;
13508  }
13509 
13510 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
13511  case EXTERNAL_##TYPE##_ELEMENTS: \
13512  case TYPE##_ELEMENTS: \
13513 
13515 #undef TYPED_ARRAY_CASE
13516  {
13517  int length = FixedArrayBase::cast(elements())->length();
13518  while (counter < length) {
13519  if (storage != NULL) {
13520  storage->set(counter, Smi::FromInt(counter));
13521  }
13522  counter++;
13523  }
13524  ASSERT(!storage || storage->length() >= counter);
13525  break;
13526  }
13527 
13528  case DICTIONARY_ELEMENTS: {
13529  if (storage != NULL) {
13530  element_dictionary()->CopyKeysTo(storage,
13531  filter,
13533  }
13534  counter += element_dictionary()->NumberOfElementsFilterAttributes(filter);
13535  break;
13536  }
13538  FixedArray* parameter_map = FixedArray::cast(elements());
13539  int mapped_length = parameter_map->length() - 2;
13540  FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
13541  if (arguments->IsDictionary()) {
13542  // Copy the keys from arguments first, because Dictionary::CopyKeysTo
13543  // will insert in storage starting at index 0.
13544  SeededNumberDictionary* dictionary =
13545  SeededNumberDictionary::cast(arguments);
13546  if (storage != NULL) {
13547  dictionary->CopyKeysTo(
13548  storage, filter, SeededNumberDictionary::UNSORTED);
13549  }
13550  counter += dictionary->NumberOfElementsFilterAttributes(filter);
13551  for (int i = 0; i < mapped_length; ++i) {
13552  if (!parameter_map->get(i + 2)->IsTheHole()) {
13553  if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13554  ++counter;
13555  }
13556  }
13557  if (storage != NULL) storage->SortPairs(storage, counter);
13558 
13559  } else {
13560  int backing_length = arguments->length();
13561  int i = 0;
13562  for (; i < mapped_length; ++i) {
13563  if (!parameter_map->get(i + 2)->IsTheHole()) {
13564  if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13565  ++counter;
13566  } else if (i < backing_length && !arguments->get(i)->IsTheHole()) {
13567  if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13568  ++counter;
13569  }
13570  }
13571  for (; i < backing_length; ++i) {
13572  if (storage != NULL) storage->set(counter, Smi::FromInt(i));
13573  ++counter;
13574  }
13575  }
13576  break;
13577  }
13578  }
13579 
13580  if (this->IsJSValue()) {
13581  Object* val = JSValue::cast(this)->value();
13582  if (val->IsString()) {
13583  String* str = String::cast(val);
13584  if (storage) {
13585  for (int i = 0; i < str->length(); i++) {
13586  storage->set(counter + i, Smi::FromInt(i));
13587  }
13588  }
13589  counter += str->length();
13590  }
13591  }
13592  ASSERT(!storage || storage->length() == counter);
13593  return counter;
13594 }
13595 
13596 
13598  return GetLocalElementKeys(storage,
13599  static_cast<PropertyAttributes>(DONT_ENUM));
13600 }
13601 
13602 
13603 // StringKey simply carries a string object as key.
13604 class StringKey : public HashTableKey {
13605  public:
13606  explicit StringKey(String* string) :
13607  string_(string),
13608  hash_(HashForObject(string)) { }
13609 
13610  bool IsMatch(Object* string) {
13611  // We know that all entries in a hash table had their hash keys created.
13612  // Use that knowledge to have fast failure.
13613  if (hash_ != HashForObject(string)) {
13614  return false;
13615  }
13616  return string_->Equals(String::cast(string));
13617  }
13618 
13619  uint32_t Hash() { return hash_; }
13620 
13621  uint32_t HashForObject(Object* other) { return String::cast(other)->Hash(); }
13622 
13623  Object* AsObject(Heap* heap) { return string_; }
13624 
13626  uint32_t hash_;
13627 };
13628 
13629 
13630 // StringSharedKeys are used as keys in the eval cache.
13632  public:
13634  SharedFunctionInfo* shared,
13635  StrictMode strict_mode,
13636  int scope_position)
13637  : source_(source),
13638  shared_(shared),
13639  strict_mode_(strict_mode),
13640  scope_position_(scope_position) { }
13641 
13642  bool IsMatch(Object* other) {
13643  if (!other->IsFixedArray()) return false;
13644  FixedArray* other_array = FixedArray::cast(other);
13645  SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
13646  if (shared != shared_) return false;
13647  int strict_unchecked = Smi::cast(other_array->get(2))->value();
13648  ASSERT(strict_unchecked == SLOPPY || strict_unchecked == STRICT);
13649  StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked);
13650  if (strict_mode != strict_mode_) return false;
13651  int scope_position = Smi::cast(other_array->get(3))->value();
13652  if (scope_position != scope_position_) return false;
13653  String* source = String::cast(other_array->get(1));
13654  return source->Equals(source_);
13655  }
13656 
13657  static uint32_t StringSharedHashHelper(String* source,
13658  SharedFunctionInfo* shared,
13659  StrictMode strict_mode,
13660  int scope_position) {
13661  uint32_t hash = source->Hash();
13662  if (shared->HasSourceCode()) {
13663  // Instead of using the SharedFunctionInfo pointer in the hash
13664  // code computation, we use a combination of the hash of the
13665  // script source code and the start position of the calling scope.
13666  // We do this to ensure that the cache entries can survive garbage
13667  // collection.
13668  Script* script = Script::cast(shared->script());
13669  hash ^= String::cast(script->source())->Hash();
13670  if (strict_mode == STRICT) hash ^= 0x8000;
13671  hash += scope_position;
13672  }
13673  return hash;
13674  }
13675 
13676  uint32_t Hash() {
13677  return StringSharedHashHelper(
13678  source_, shared_, strict_mode_, scope_position_);
13679  }
13680 
13681  uint32_t HashForObject(Object* obj) {
13682  FixedArray* other_array = FixedArray::cast(obj);
13683  SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
13684  String* source = String::cast(other_array->get(1));
13685  int strict_unchecked = Smi::cast(other_array->get(2))->value();
13686  ASSERT(strict_unchecked == SLOPPY || strict_unchecked == STRICT);
13687  StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked);
13688  int scope_position = Smi::cast(other_array->get(3))->value();
13689  return StringSharedHashHelper(
13690  source, shared, strict_mode, scope_position);
13691  }
13692 
13693  MUST_USE_RESULT MaybeObject* AsObject(Heap* heap) {
13694  Object* obj;
13695  { MaybeObject* maybe_obj = heap->AllocateFixedArray(4);
13696  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
13697  }
13698  FixedArray* other_array = FixedArray::cast(obj);
13699  other_array->set(0, shared_);
13700  other_array->set(1, source_);
13701  other_array->set(2, Smi::FromInt(strict_mode_));
13702  other_array->set(3, Smi::FromInt(scope_position_));
13703  return other_array;
13704  }
13705 
13706  private:
13707  String* source_;
13708  SharedFunctionInfo* shared_;
13709  StrictMode strict_mode_;
13710  int scope_position_;
13711 };
13712 
13713 
13714 // RegExpKey carries the source and flags of a regular expression as key.
13715 class RegExpKey : public HashTableKey {
13716  public:
13718  : string_(string),
13719  flags_(Smi::FromInt(flags.value())) { }
13720 
13721  // Rather than storing the key in the hash table, a pointer to the
13722  // stored value is stored where the key should be. IsMatch then
13723  // compares the search key to the found object, rather than comparing
13724  // a key to a key.
13725  bool IsMatch(Object* obj) {
13726  FixedArray* val = FixedArray::cast(obj);
13727  return string_->Equals(String::cast(val->get(JSRegExp::kSourceIndex)))
13728  && (flags_ == val->get(JSRegExp::kFlagsIndex));
13729  }
13730 
13731  uint32_t Hash() { return RegExpHash(string_, flags_); }
13732 
13734  // Plain hash maps, which is where regexp keys are used, don't
13735  // use this function.
13736  UNREACHABLE();
13737  return NULL;
13738  }
13739 
13740  uint32_t HashForObject(Object* obj) {
13741  FixedArray* val = FixedArray::cast(obj);
13742  return RegExpHash(String::cast(val->get(JSRegExp::kSourceIndex)),
13744  }
13745 
13746  static uint32_t RegExpHash(String* string, Smi* flags) {
13747  return string->Hash() + flags->value();
13748  }
13749 
13752 };
13753 
13754 
13755 MaybeObject* OneByteStringKey::AsObject(Heap* heap) {
13756  if (hash_field_ == 0) Hash();
13757  return heap->AllocateOneByteInternalizedString(string_, hash_field_);
13758 }
13759 
13760 
13761 MaybeObject* TwoByteStringKey::AsObject(Heap* heap) {
13762  if (hash_field_ == 0) Hash();
13763  return heap->AllocateTwoByteInternalizedString(string_, hash_field_);
13764 }
13765 
13766 
13767 template<>
13768 const uint8_t* SubStringKey<uint8_t>::GetChars() {
13769  return string_->IsSeqOneByteString()
13770  ? SeqOneByteString::cast(*string_)->GetChars()
13771  : ExternalAsciiString::cast(*string_)->GetChars();
13772 }
13773 
13774 
13775 template<>
13776 const uint16_t* SubStringKey<uint16_t>::GetChars() {
13777  return string_->IsSeqTwoByteString()
13778  ? SeqTwoByteString::cast(*string_)->GetChars()
13779  : ExternalTwoByteString::cast(*string_)->GetChars();
13780 }
13781 
13782 
13783 template<>
13785  if (hash_field_ == 0) Hash();
13786  Vector<const uint8_t> chars(GetChars() + from_, length_);
13787  return heap->AllocateOneByteInternalizedString(chars, hash_field_);
13788 }
13789 
13790 
13791 template<>
13793  Heap* heap) {
13794  if (hash_field_ == 0) Hash();
13795  Vector<const uint16_t> chars(GetChars() + from_, length_);
13796  return heap->AllocateTwoByteInternalizedString(chars, hash_field_);
13797 }
13798 
13799 
13800 template<>
13802  Vector<const uint8_t> chars(GetChars() + from_, length_);
13803  return String::cast(string)->IsOneByteEqualTo(chars);
13804 }
13805 
13806 
13807 template<>
13809  Vector<const uint16_t> chars(GetChars() + from_, length_);
13810  return String::cast(string)->IsTwoByteEqualTo(chars);
13811 }
13812 
13813 
13814 template class SubStringKey<uint8_t>;
13815 template class SubStringKey<uint16_t>;
13816 
13817 
13818 // InternalizedStringKey carries a string/internalized-string object as key.
13820  public:
13821  explicit InternalizedStringKey(String* string)
13822  : string_(string) { }
13823 
13824  bool IsMatch(Object* string) {
13825  return String::cast(string)->Equals(string_);
13826  }
13827 
13828  uint32_t Hash() { return string_->Hash(); }
13829 
13830  uint32_t HashForObject(Object* other) {
13831  return String::cast(other)->Hash();
13832  }
13833 
13834  MaybeObject* AsObject(Heap* heap) {
13835  // Attempt to flatten the string, so that internalized strings will most
13836  // often be flat strings.
13837  string_ = string_->TryFlattenGetString();
13838  // Internalize the string if possible.
13839  Map* map = heap->InternalizedStringMapForString(string_);
13840  if (map != NULL) {
13841  string_->set_map_no_write_barrier(map);
13842  ASSERT(string_->IsInternalizedString());
13843  return string_;
13844  }
13845  // Otherwise allocate a new internalized string.
13846  return heap->AllocateInternalizedStringImpl(
13847  string_, string_->length(), string_->hash_field());
13848  }
13849 
13850  static uint32_t StringHash(Object* obj) {
13851  return String::cast(obj)->Hash();
13852  }
13853 
13855 };
13856 
13857 
13858 template<typename Shape, typename Key>
13860  IteratePointers(v, 0, kElementsStartOffset);
13861 }
13862 
13863 
13864 template<typename Shape, typename Key>
13866  IteratePointers(v,
13867  kElementsStartOffset,
13868  kHeaderSize + length() * kPointerSize);
13869 }
13870 
13871 
13872 template<typename Shape, typename Key>
13874  int at_least_space_for,
13875  MinimumCapacity capacity_option,
13876  PretenureFlag pretenure) {
13877  ASSERT(!capacity_option || IS_POWER_OF_TWO(at_least_space_for));
13878  int capacity = (capacity_option == USE_CUSTOM_MINIMUM_CAPACITY)
13879  ? at_least_space_for
13880  : ComputeCapacity(at_least_space_for);
13881  if (capacity > HashTable::kMaxCapacity) {
13882  v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true);
13883  }
13884 
13885  Object* obj;
13886  { MaybeObject* maybe_obj =
13887  heap-> AllocateHashTable(EntryToIndex(capacity), pretenure);
13888  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
13889  }
13892  HashTable::cast(obj)->SetCapacity(capacity);
13893  return obj;
13894 }
13895 
13896 
13897 // Find entry for key otherwise return kNotFound.
13899  if (!key->IsUniqueName()) {
13901  }
13902 
13903  // Optimized for unique names. Knowledge of the key type allows:
13904  // 1. Move the check if the key is unique out of the loop.
13905  // 2. Avoid comparing hash codes in unique-to-unique comparison.
13906  // 3. Detect a case when a dictionary key is not unique but the key is.
13907  // In case of positive result the dictionary key may be replaced by the
13908  // internalized string with minimal performance penalty. It gives a chance
13909  // to perform further lookups in code stubs (and significant performance
13910  // boost a certain style of code).
13911 
13912  // EnsureCapacity will guarantee the hash table is never full.
13913  uint32_t capacity = Capacity();
13914  uint32_t entry = FirstProbe(key->Hash(), capacity);
13915  uint32_t count = 1;
13916 
13917  while (true) {
13918  int index = EntryToIndex(entry);
13919  Object* element = get(index);
13920  if (element->IsUndefined()) break; // Empty entry.
13921  if (key == element) return entry;
13922  if (!element->IsUniqueName() &&
13923  !element->IsTheHole() &&
13924  Name::cast(element)->Equals(key)) {
13925  // Replace a key that is a non-internalized string by the equivalent
13926  // internalized string for faster further lookups.
13927  set(index, key);
13928  return entry;
13929  }
13930  ASSERT(element->IsTheHole() || !Name::cast(element)->Equals(key));
13931  entry = NextProbe(entry, count++, capacity);
13932  }
13933  return kNotFound;
13934 }
13935 
13936 
13937 template<typename Shape, typename Key>
13938 MaybeObject* HashTable<Shape, Key>::Rehash(HashTable* new_table, Key key) {
13939  ASSERT(NumberOfElements() < new_table->Capacity());
13940 
13941  DisallowHeapAllocation no_gc;
13942  WriteBarrierMode mode = new_table->GetWriteBarrierMode(no_gc);
13943 
13944  // Copy prefix to new array.
13945  for (int i = kPrefixStartIndex;
13946  i < kPrefixStartIndex + Shape::kPrefixSize;
13947  i++) {
13948  new_table->set(i, get(i), mode);
13949  }
13950 
13951  // Rehash the elements.
13952  int capacity = Capacity();
13953  for (int i = 0; i < capacity; i++) {
13954  uint32_t from_index = EntryToIndex(i);
13955  Object* k = get(from_index);
13956  if (IsKey(k)) {
13957  uint32_t hash = HashTable<Shape, Key>::HashForObject(key, k);
13958  uint32_t insertion_index =
13959  EntryToIndex(new_table->FindInsertionEntry(hash));
13960  for (int j = 0; j < Shape::kEntrySize; j++) {
13961  new_table->set(insertion_index + j, get(from_index + j), mode);
13962  }
13963  }
13964  }
13965  new_table->SetNumberOfElements(NumberOfElements());
13966  new_table->SetNumberOfDeletedElements(0);
13967  return new_table;
13968 }
13969 
13970 
13971 template<typename Shape, typename Key>
13973  Object* k,
13974  int probe,
13975  uint32_t expected) {
13976  uint32_t hash = HashTable<Shape, Key>::HashForObject(key, k);
13977  uint32_t capacity = Capacity();
13978  uint32_t entry = FirstProbe(hash, capacity);
13979  for (int i = 1; i < probe; i++) {
13980  if (entry == expected) return expected;
13981  entry = NextProbe(entry, i, capacity);
13982  }
13983  return entry;
13984 }
13985 
13986 
13987 template<typename Shape, typename Key>
13988 void HashTable<Shape, Key>::Swap(uint32_t entry1,
13989  uint32_t entry2,
13990  WriteBarrierMode mode) {
13991  int index1 = EntryToIndex(entry1);
13992  int index2 = EntryToIndex(entry2);
13993  Object* temp[Shape::kEntrySize];
13994  for (int j = 0; j < Shape::kEntrySize; j++) {
13995  temp[j] = get(index1 + j);
13996  }
13997  for (int j = 0; j < Shape::kEntrySize; j++) {
13998  set(index1 + j, get(index2 + j), mode);
13999  }
14000  for (int j = 0; j < Shape::kEntrySize; j++) {
14001  set(index2 + j, temp[j], mode);
14002  }
14003 }
14004 
14005 
14006 template<typename Shape, typename Key>
14008  DisallowHeapAllocation no_gc;
14009  WriteBarrierMode mode = GetWriteBarrierMode(no_gc);
14010  uint32_t capacity = Capacity();
14011  bool done = false;
14012  for (int probe = 1; !done; probe++) {
14013  // All elements at entries given by one of the first _probe_ probes
14014  // are placed correctly. Other elements might need to be moved.
14015  done = true;
14016  for (uint32_t current = 0; current < capacity; current++) {
14017  Object* current_key = get(EntryToIndex(current));
14018  if (IsKey(current_key)) {
14019  uint32_t target = EntryForProbe(key, current_key, probe, current);
14020  if (current == target) continue;
14021  Object* target_key = get(EntryToIndex(target));
14022  if (!IsKey(target_key) ||
14023  EntryForProbe(key, target_key, probe, target) != target) {
14024  // Put the current element into the correct position.
14025  Swap(current, target, mode);
14026  // The other element will be processed on the next iteration.
14027  current--;
14028  } else {
14029  // The place for the current element is occupied. Leave the element
14030  // for the next probe.
14031  done = false;
14032  }
14033  }
14034  }
14035  }
14036 }
14037 
14038 
14039 template<typename Shape, typename Key>
14041  Key key,
14042  PretenureFlag pretenure) {
14043  int capacity = Capacity();
14044  int nof = NumberOfElements() + n;
14045  int nod = NumberOfDeletedElements();
14046  // Return if:
14047  // 50% is still free after adding n elements and
14048  // at most 50% of the free elements are deleted elements.
14049  if (nod <= (capacity - nof) >> 1) {
14050  int needed_free = nof >> 1;
14051  if (nof + needed_free <= capacity) return this;
14052  }
14053 
14054  const int kMinCapacityForPretenure = 256;
14055  bool should_pretenure = pretenure == TENURED ||
14056  ((capacity > kMinCapacityForPretenure) && !GetHeap()->InNewSpace(this));
14057  Object* obj;
14058  { MaybeObject* maybe_obj =
14059  Allocate(GetHeap(),
14060  nof * 2,
14062  should_pretenure ? TENURED : NOT_TENURED);
14063  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14064  }
14065 
14066  return Rehash(HashTable::cast(obj), key);
14067 }
14068 
14069 
14070 template<typename Shape, typename Key>
14071 MaybeObject* HashTable<Shape, Key>::Shrink(Key key) {
14072  int capacity = Capacity();
14073  int nof = NumberOfElements();
14074 
14075  // Shrink to fit the number of elements if only a quarter of the
14076  // capacity is filled with elements.
14077  if (nof > (capacity >> 2)) return this;
14078  // Allocate a new dictionary with room for at least the current
14079  // number of elements. The allocation method will make sure that
14080  // there is extra room in the dictionary for additions. Don't go
14081  // lower than room for 16 elements.
14082  int at_least_room_for = nof;
14083  if (at_least_room_for < 16) return this;
14084 
14085  const int kMinCapacityForPretenure = 256;
14086  bool pretenure =
14087  (at_least_room_for > kMinCapacityForPretenure) &&
14088  !GetHeap()->InNewSpace(this);
14089  Object* obj;
14090  { MaybeObject* maybe_obj =
14091  Allocate(GetHeap(),
14092  at_least_room_for,
14094  pretenure ? TENURED : NOT_TENURED);
14095  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14096  }
14097 
14098  return Rehash(HashTable::cast(obj), key);
14099 }
14100 
14101 
14102 template<typename Shape, typename Key>
14104  uint32_t capacity = Capacity();
14105  uint32_t entry = FirstProbe(hash, capacity);
14106  uint32_t count = 1;
14107  // EnsureCapacity will guarantee the hash table is never full.
14108  while (true) {
14109  Object* element = KeyAt(entry);
14110  if (element->IsUndefined() || element->IsTheHole()) break;
14111  entry = NextProbe(entry, count++, capacity);
14112  }
14113  return entry;
14114 }
14115 
14116 
14117 // Force instantiation of template instances class.
14118 // Please note this list is compiler dependent.
14119 
14121 
14123 
14125 
14126 template class HashTable<ObjectHashTableShape<1>, Object*>;
14127 
14128 template class HashTable<ObjectHashTableShape<2>, Object*>;
14129 
14130 template class HashTable<WeakHashTableShape<2>, Object*>;
14131 
14133 
14135 
14137 
14139  Allocate(Heap* heap, int at_least_space_for, PretenureFlag pretenure);
14140 
14142  Allocate(Heap* heap, int at_least_space_for, PretenureFlag pretenure);
14143 
14144 template MaybeObject* Dictionary<NameDictionaryShape, Name*>::
14145  Allocate(Heap* heap, int n, PretenureFlag pretenure);
14146 
14148  uint32_t, Object*);
14149 
14151  AtPut(uint32_t, Object*);
14152 
14154  SlowReverseLookup(Object* value);
14155 
14157  SlowReverseLookup(Object* value);
14158 
14160  Object*);
14161 
14163  FixedArray*,
14166 
14168  int, JSObject::DeleteMode);
14169 
14172 
14173 template MaybeObject* Dictionary<NameDictionaryShape, Name*>::Shrink(Name* n);
14174 
14176  uint32_t);
14177 
14179  FixedArray*,
14180  int,
14183 
14184 template int
14187 
14188 template MaybeObject* Dictionary<NameDictionaryShape, Name*>::Add(
14189  Name*, Object*, PropertyDetails);
14190 
14191 template MaybeObject*
14193 
14194 template int
14197 
14199  uint32_t, Object*, PropertyDetails);
14200 
14202  uint32_t, Object*, PropertyDetails);
14203 
14205  EnsureCapacity(int, uint32_t);
14206 
14208  EnsureCapacity(int, uint32_t);
14209 
14210 template MaybeObject* Dictionary<NameDictionaryShape, Name*>::
14211  EnsureCapacity(int, Name*);
14212 
14214  AddEntry(uint32_t, Object*, PropertyDetails, uint32_t);
14215 
14217  AddEntry(uint32_t, Object*, PropertyDetails, uint32_t);
14218 
14220  Name*, Object*, PropertyDetails, uint32_t);
14221 
14222 template
14224 
14225 template
14227 
14228 template
14230 
14231 
14233  Handle<JSObject> object, uint32_t limit) {
14234  CALL_HEAP_FUNCTION(object->GetIsolate(),
14235  object->PrepareSlowElementsForSort(limit),
14236  Object);
14237 }
14238 
14239 
14240 // Collates undefined and unexisting elements below limit from position
14241 // zero of the elements. The object stays in Dictionary mode.
14242 MaybeObject* JSObject::PrepareSlowElementsForSort(uint32_t limit) {
14243  ASSERT(HasDictionaryElements());
14244  // Must stay in dictionary mode, either because of requires_slow_elements,
14245  // or because we are not going to sort (and therefore compact) all of the
14246  // elements.
14247  SeededNumberDictionary* dict = element_dictionary();
14248  HeapNumber* result_double = NULL;
14249  if (limit > static_cast<uint32_t>(Smi::kMaxValue)) {
14250  // Allocate space for result before we start mutating the object.
14251  Object* new_double;
14252  { MaybeObject* maybe_new_double = GetHeap()->AllocateHeapNumber(0.0);
14253  if (!maybe_new_double->ToObject(&new_double)) return maybe_new_double;
14254  }
14255  result_double = HeapNumber::cast(new_double);
14256  }
14257 
14258  Object* obj;
14259  { MaybeObject* maybe_obj =
14261  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14262  }
14264 
14265  DisallowHeapAllocation no_alloc;
14266 
14267  uint32_t pos = 0;
14268  uint32_t undefs = 0;
14269  int capacity = dict->Capacity();
14270  for (int i = 0; i < capacity; i++) {
14271  Object* k = dict->KeyAt(i);
14272  if (dict->IsKey(k)) {
14273  ASSERT(k->IsNumber());
14274  ASSERT(!k->IsSmi() || Smi::cast(k)->value() >= 0);
14275  ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0);
14276  ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() <= kMaxUInt32);
14277  Object* value = dict->ValueAt(i);
14278  PropertyDetails details = dict->DetailsAt(i);
14279  if (details.type() == CALLBACKS || details.IsReadOnly()) {
14280  // Bail out and do the sorting of undefineds and array holes in JS.
14281  // Also bail out if the element is not supposed to be moved.
14282  return Smi::FromInt(-1);
14283  }
14284  uint32_t key = NumberToUint32(k);
14285  // In the following we assert that adding the entry to the new dictionary
14286  // does not cause GC. This is the case because we made sure to allocate
14287  // the dictionary big enough above, so it need not grow.
14288  if (key < limit) {
14289  if (value->IsUndefined()) {
14290  undefs++;
14291  } else {
14292  if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
14293  // Adding an entry with the key beyond smi-range requires
14294  // allocation. Bailout.
14295  return Smi::FromInt(-1);
14296  }
14297  new_dict->AddNumberEntry(pos, value, details)->ToObjectUnchecked();
14298  pos++;
14299  }
14300  } else {
14301  if (key > static_cast<uint32_t>(Smi::kMaxValue)) {
14302  // Adding an entry with the key beyond smi-range requires
14303  // allocation. Bailout.
14304  return Smi::FromInt(-1);
14305  }
14306  new_dict->AddNumberEntry(key, value, details)->ToObjectUnchecked();
14307  }
14308  }
14309  }
14310 
14311  uint32_t result = pos;
14312  PropertyDetails no_details = PropertyDetails(NONE, NORMAL, 0);
14313  Heap* heap = GetHeap();
14314  while (undefs > 0) {
14315  if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
14316  // Adding an entry with the key beyond smi-range requires
14317  // allocation. Bailout.
14318  return Smi::FromInt(-1);
14319  }
14320  new_dict->AddNumberEntry(pos, heap->undefined_value(), no_details)->
14321  ToObjectUnchecked();
14322  pos++;
14323  undefs--;
14324  }
14325 
14326  set_elements(new_dict);
14327 
14328  if (result <= static_cast<uint32_t>(Smi::kMaxValue)) {
14329  return Smi::FromInt(static_cast<int>(result));
14330  }
14331 
14332  ASSERT_NE(NULL, result_double);
14333  result_double->set_value(static_cast<double>(result));
14334  return result_double;
14335 }
14336 
14337 
14338 // Collects all defined (non-hole) and non-undefined (array) elements at
14339 // the start of the elements array.
14340 // If the object is in dictionary mode, it is converted to fast elements
14341 // mode.
14343  uint32_t limit) {
14344  Isolate* isolate = object->GetIsolate();
14345  if (object->HasSloppyArgumentsElements() ||
14346  object->map()->is_observed()) {
14347  return handle(Smi::FromInt(-1), isolate);
14348  }
14349 
14350  if (object->HasDictionaryElements()) {
14351  // Convert to fast elements containing only the existing properties.
14352  // Ordering is irrelevant, since we are going to sort anyway.
14353  Handle<SeededNumberDictionary> dict(object->element_dictionary());
14354  if (object->IsJSArray() || dict->requires_slow_elements() ||
14355  dict->max_number_key() >= limit) {
14356  return JSObject::PrepareSlowElementsForSort(object, limit);
14357  }
14358  // Convert to fast elements.
14359 
14360  Handle<Map> new_map =
14362 
14363  PretenureFlag tenure = isolate->heap()->InNewSpace(*object) ?
14365  Handle<FixedArray> fast_elements =
14366  isolate->factory()->NewFixedArray(dict->NumberOfElements(), tenure);
14367  dict->CopyValuesTo(*fast_elements);
14368  object->ValidateElements();
14369 
14370  object->set_map_and_elements(*new_map, *fast_elements);
14371  } else if (object->HasExternalArrayElements() ||
14372  object->HasFixedTypedArrayElements()) {
14373  // Typed arrays cannot have holes or undefined elements.
14374  return handle(Smi::FromInt(
14375  FixedArrayBase::cast(object->elements())->length()), isolate);
14376  } else if (!object->HasFastDoubleElements()) {
14377  EnsureWritableFastElements(object);
14378  }
14379  ASSERT(object->HasFastSmiOrObjectElements() ||
14380  object->HasFastDoubleElements());
14381 
14382  // Collect holes at the end, undefined before that and the rest at the
14383  // start, and return the number of non-hole, non-undefined values.
14384 
14385  Handle<FixedArrayBase> elements_base(object->elements());
14386  uint32_t elements_length = static_cast<uint32_t>(elements_base->length());
14387  if (limit > elements_length) {
14388  limit = elements_length ;
14389  }
14390  if (limit == 0) {
14391  return handle(Smi::FromInt(0), isolate);
14392  }
14393 
14394  uint32_t result = 0;
14395  if (elements_base->map() == isolate->heap()->fixed_double_array_map()) {
14396  FixedDoubleArray* elements = FixedDoubleArray::cast(*elements_base);
14397  // Split elements into defined and the_hole, in that order.
14398  unsigned int holes = limit;
14399  // Assume most arrays contain no holes and undefined values, so minimize the
14400  // number of stores of non-undefined, non-the-hole values.
14401  for (unsigned int i = 0; i < holes; i++) {
14402  if (elements->is_the_hole(i)) {
14403  holes--;
14404  } else {
14405  continue;
14406  }
14407  // Position i needs to be filled.
14408  while (holes > i) {
14409  if (elements->is_the_hole(holes)) {
14410  holes--;
14411  } else {
14412  elements->set(i, elements->get_scalar(holes));
14413  break;
14414  }
14415  }
14416  }
14417  result = holes;
14418  while (holes < limit) {
14419  elements->set_the_hole(holes);
14420  holes++;
14421  }
14422  } else {
14423  FixedArray* elements = FixedArray::cast(*elements_base);
14424  DisallowHeapAllocation no_gc;
14425 
14426  // Split elements into defined, undefined and the_hole, in that order. Only
14427  // count locations for undefined and the hole, and fill them afterwards.
14428  WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_gc);
14429  unsigned int undefs = limit;
14430  unsigned int holes = limit;
14431  // Assume most arrays contain no holes and undefined values, so minimize the
14432  // number of stores of non-undefined, non-the-hole values.
14433  for (unsigned int i = 0; i < undefs; i++) {
14434  Object* current = elements->get(i);
14435  if (current->IsTheHole()) {
14436  holes--;
14437  undefs--;
14438  } else if (current->IsUndefined()) {
14439  undefs--;
14440  } else {
14441  continue;
14442  }
14443  // Position i needs to be filled.
14444  while (undefs > i) {
14445  current = elements->get(undefs);
14446  if (current->IsTheHole()) {
14447  holes--;
14448  undefs--;
14449  } else if (current->IsUndefined()) {
14450  undefs--;
14451  } else {
14452  elements->set(i, current, write_barrier);
14453  break;
14454  }
14455  }
14456  }
14457  result = undefs;
14458  while (undefs < holes) {
14459  elements->set_undefined(undefs);
14460  undefs++;
14461  }
14462  while (holes < limit) {
14463  elements->set_the_hole(holes);
14464  holes++;
14465  }
14466  }
14467 
14468  return isolate->factory()->NewNumberFromUint(result);
14469 }
14470 
14471 
14473  switch (elements()->map()->instance_type()) {
14474 #define INSTANCE_TYPE_TO_ARRAY_TYPE(Type, type, TYPE, ctype, size) \
14475  case EXTERNAL_##TYPE##_ARRAY_TYPE: \
14476  case FIXED_##TYPE##_ARRAY_TYPE: \
14477  return kExternal##Type##Array;
14478 
14480 #undef INSTANCE_TYPE_TO_ARRAY_TYPE
14481 
14482  default:
14483  UNREACHABLE();
14484  return static_cast<ExternalArrayType>(-1);
14485  }
14486 }
14487 
14488 
14490  switch (elements()->map()->instance_type()) {
14491 #define INSTANCE_TYPE_TO_ELEMENT_SIZE(Type, type, TYPE, ctype, size) \
14492  case EXTERNAL_##TYPE##_ARRAY_TYPE: \
14493  return size;
14494 
14496 #undef INSTANCE_TYPE_TO_ELEMENT_SIZE
14497 
14498  default:
14499  UNREACHABLE();
14500  return 0;
14501  }
14502 }
14503 
14504 
14506  uint8_t clamped_value = 0;
14507  if (index < static_cast<uint32_t>(length())) {
14508  if (value->IsSmi()) {
14509  int int_value = Smi::cast(value)->value();
14510  if (int_value < 0) {
14511  clamped_value = 0;
14512  } else if (int_value > 255) {
14513  clamped_value = 255;
14514  } else {
14515  clamped_value = static_cast<uint8_t>(int_value);
14516  }
14517  } else if (value->IsHeapNumber()) {
14518  double double_value = HeapNumber::cast(value)->value();
14519  if (!(double_value > 0)) {
14520  // NaN and less than zero clamp to zero.
14521  clamped_value = 0;
14522  } else if (double_value > 255) {
14523  // Greater than 255 clamp to 255.
14524  clamped_value = 255;
14525  } else {
14526  // Other doubles are rounded to the nearest integer.
14527  clamped_value = static_cast<uint8_t>(lrint(double_value));
14528  }
14529  } else {
14530  // Clamp undefined to zero (default). All other types have been
14531  // converted to a number type further up in the call chain.
14532  ASSERT(value->IsUndefined());
14533  }
14534  set(index, clamped_value);
14535  }
14536  return Smi::FromInt(clamped_value);
14537 }
14538 
14539 
14542  uint32_t index,
14543  Handle<Object> value) {
14544  return Handle<Object>(array->SetValue(index, *value), array->GetIsolate());
14545 }
14546 
14547 
14548 template<typename ExternalArrayClass, typename ValueType>
14549 static MaybeObject* ExternalArrayIntSetter(Heap* heap,
14550  ExternalArrayClass* receiver,
14551  uint32_t index,
14552  Object* value) {
14553  ValueType cast_value = 0;
14554  if (index < static_cast<uint32_t>(receiver->length())) {
14555  if (value->IsSmi()) {
14556  int int_value = Smi::cast(value)->value();
14557  cast_value = static_cast<ValueType>(int_value);
14558  } else if (value->IsHeapNumber()) {
14559  double double_value = HeapNumber::cast(value)->value();
14560  cast_value = static_cast<ValueType>(DoubleToInt32(double_value));
14561  } else {
14562  // Clamp undefined to zero (default). All other types have been
14563  // converted to a number type further up in the call chain.
14564  ASSERT(value->IsUndefined());
14565  }
14566  receiver->set(index, cast_value);
14567  }
14568  return heap->NumberFromInt32(cast_value);
14569 }
14570 
14571 
14573  uint32_t index,
14574  Handle<Object> value) {
14575  CALL_HEAP_FUNCTION(array->GetIsolate(),
14576  array->SetValue(index, *value),
14577  Object);
14578 }
14579 
14580 
14581 MaybeObject* ExternalInt8Array::SetValue(uint32_t index, Object* value) {
14582  return ExternalArrayIntSetter<ExternalInt8Array, int8_t>
14583  (GetHeap(), this, index, value);
14584 }
14585 
14586 
14589  uint32_t index,
14590  Handle<Object> value) {
14591  CALL_HEAP_FUNCTION(array->GetIsolate(),
14592  array->SetValue(index, *value),
14593  Object);
14594 }
14595 
14596 
14597 MaybeObject* ExternalUint8Array::SetValue(uint32_t index,
14598  Object* value) {
14599  return ExternalArrayIntSetter<ExternalUint8Array, uint8_t>
14600  (GetHeap(), this, index, value);
14601 }
14602 
14603 
14606  uint32_t index,
14607  Handle<Object> value) {
14608  CALL_HEAP_FUNCTION(array->GetIsolate(),
14609  array->SetValue(index, *value),
14610  Object);
14611 }
14612 
14613 
14614 MaybeObject* ExternalInt16Array::SetValue(uint32_t index,
14615  Object* value) {
14616  return ExternalArrayIntSetter<ExternalInt16Array, int16_t>
14617  (GetHeap(), this, index, value);
14618 }
14619 
14620 
14623  uint32_t index,
14624  Handle<Object> value) {
14625  CALL_HEAP_FUNCTION(array->GetIsolate(),
14626  array->SetValue(index, *value),
14627  Object);
14628 }
14629 
14630 
14631 MaybeObject* ExternalUint16Array::SetValue(uint32_t index,
14632  Object* value) {
14633  return ExternalArrayIntSetter<ExternalUint16Array, uint16_t>
14634  (GetHeap(), this, index, value);
14635 }
14636 
14637 
14639  uint32_t index,
14640  Handle<Object> value) {
14641  CALL_HEAP_FUNCTION(array->GetIsolate(),
14642  array->SetValue(index, *value),
14643  Object);
14644 }
14645 
14646 
14647 MaybeObject* ExternalInt32Array::SetValue(uint32_t index, Object* value) {
14648  return ExternalArrayIntSetter<ExternalInt32Array, int32_t>
14649  (GetHeap(), this, index, value);
14650 }
14651 
14652 
14655  uint32_t index,
14656  Handle<Object> value) {
14657  CALL_HEAP_FUNCTION(array->GetIsolate(),
14658  array->SetValue(index, *value),
14659  Object);
14660 }
14661 
14662 
14663 MaybeObject* ExternalUint32Array::SetValue(uint32_t index, Object* value) {
14664  uint32_t cast_value = 0;
14665  Heap* heap = GetHeap();
14666  if (index < static_cast<uint32_t>(length())) {
14667  if (value->IsSmi()) {
14668  int int_value = Smi::cast(value)->value();
14669  cast_value = static_cast<uint32_t>(int_value);
14670  } else if (value->IsHeapNumber()) {
14671  double double_value = HeapNumber::cast(value)->value();
14672  cast_value = static_cast<uint32_t>(DoubleToUint32(double_value));
14673  } else {
14674  // Clamp undefined to zero (default). All other types have been
14675  // converted to a number type further up in the call chain.
14676  ASSERT(value->IsUndefined());
14677  }
14678  set(index, cast_value);
14679  }
14680  return heap->NumberFromUint32(cast_value);
14681 }
14682 
14683 
14686  uint32_t index,
14687  Handle<Object> value) {
14688  CALL_HEAP_FUNCTION(array->GetIsolate(),
14689  array->SetValue(index, *value),
14690  Object);
14691 }
14692 
14693 
14694 MaybeObject* ExternalFloat32Array::SetValue(uint32_t index, Object* value) {
14695  float cast_value = static_cast<float>(OS::nan_value());
14696  Heap* heap = GetHeap();
14697  if (index < static_cast<uint32_t>(length())) {
14698  if (value->IsSmi()) {
14699  int int_value = Smi::cast(value)->value();
14700  cast_value = static_cast<float>(int_value);
14701  } else if (value->IsHeapNumber()) {
14702  double double_value = HeapNumber::cast(value)->value();
14703  cast_value = static_cast<float>(double_value);
14704  } else {
14705  // Clamp undefined to NaN (default). All other types have been
14706  // converted to a number type further up in the call chain.
14707  ASSERT(value->IsUndefined());
14708  }
14709  set(index, cast_value);
14710  }
14711  return heap->AllocateHeapNumber(cast_value);
14712 }
14713 
14714 
14717  uint32_t index,
14718  Handle<Object> value) {
14719  CALL_HEAP_FUNCTION(array->GetIsolate(),
14720  array->SetValue(index, *value),
14721  Object);
14722 }
14723 
14724 
14725 MaybeObject* ExternalFloat64Array::SetValue(uint32_t index, Object* value) {
14726  double double_value = OS::nan_value();
14727  Heap* heap = GetHeap();
14728  if (index < static_cast<uint32_t>(length())) {
14729  if (value->IsSmi()) {
14730  int int_value = Smi::cast(value)->value();
14731  double_value = static_cast<double>(int_value);
14732  } else if (value->IsHeapNumber()) {
14733  double_value = HeapNumber::cast(value)->value();
14734  } else {
14735  // Clamp undefined to NaN (default). All other types have been
14736  // converted to a number type further up in the call chain.
14737  ASSERT(value->IsUndefined());
14738  }
14739  set(index, double_value);
14740  }
14741  return heap->AllocateHeapNumber(double_value);
14742 }
14743 
14744 
14746  ASSERT(!HasFastProperties());
14747  Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry());
14748  return PropertyCell::cast(value);
14749 }
14750 
14751 
14753  Handle<JSGlobalObject> global,
14754  Handle<Name> name) {
14755  ASSERT(!global->HasFastProperties());
14756  int entry = global->property_dictionary()->FindEntry(*name);
14757  if (entry == NameDictionary::kNotFound) {
14758  Isolate* isolate = global->GetIsolate();
14759  Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(
14760  isolate->factory()->the_hole_value());
14761  PropertyDetails details(NONE, NORMAL, 0);
14762  details = details.AsDeleted();
14763  Handle<NameDictionary> dictionary = NameDictionaryAdd(
14764  handle(global->property_dictionary()), name, cell, details);
14765  global->set_properties(*dictionary);
14766  return cell;
14767  } else {
14768  Object* value = global->property_dictionary()->ValueAt(entry);
14769  ASSERT(value->IsPropertyCell());
14770  return handle(PropertyCell::cast(value));
14771  }
14772 }
14773 
14774 
14775 MaybeObject* StringTable::LookupString(String* string, Object** s) {
14776  InternalizedStringKey key(string);
14777  return LookupKey(&key, s);
14778 }
14779 
14780 
14781 // This class is used for looking up two character strings in the string table.
14782 // If we don't have a hit we don't want to waste much time so we unroll the
14783 // string hash calculation loop here for speed. Doesn't work if the two
14784 // characters form a decimal integer, since such strings have a different hash
14785 // algorithm.
14787  public:
14788  TwoCharHashTableKey(uint16_t c1, uint16_t c2, uint32_t seed)
14789  : c1_(c1), c2_(c2) {
14790  // Char 1.
14791  uint32_t hash = seed;
14792  hash += c1;
14793  hash += hash << 10;
14794  hash ^= hash >> 6;
14795  // Char 2.
14796  hash += c2;
14797  hash += hash << 10;
14798  hash ^= hash >> 6;
14799  // GetHash.
14800  hash += hash << 3;
14801  hash ^= hash >> 11;
14802  hash += hash << 15;
14803  if ((hash & String::kHashBitMask) == 0) hash = StringHasher::kZeroHash;
14804  hash_ = hash;
14805 #ifdef DEBUG
14806  // If this assert fails then we failed to reproduce the two-character
14807  // version of the string hashing algorithm above. One reason could be
14808  // that we were passed two digits as characters, since the hash
14809  // algorithm is different in that case.
14810  uint16_t chars[2] = {c1, c2};
14811  uint32_t check_hash = StringHasher::HashSequentialString(chars, 2, seed);
14813  ASSERT_EQ(static_cast<int32_t>(hash), static_cast<int32_t>(check_hash));
14814 #endif
14815  }
14816 
14817  bool IsMatch(Object* o) {
14818  if (!o->IsString()) return false;
14819  String* other = String::cast(o);
14820  if (other->length() != 2) return false;
14821  if (other->Get(0) != c1_) return false;
14822  return other->Get(1) == c2_;
14823  }
14824 
14825  uint32_t Hash() { return hash_; }
14826  uint32_t HashForObject(Object* key) {
14827  if (!key->IsString()) return 0;
14828  return String::cast(key)->Hash();
14829  }
14830 
14832  // The TwoCharHashTableKey is only used for looking in the string
14833  // table, not for adding to it.
14834  UNREACHABLE();
14835  return NULL;
14836  }
14837 
14838  private:
14839  uint16_t c1_;
14840  uint16_t c2_;
14841  uint32_t hash_;
14842 };
14843 
14844 
14846  InternalizedStringKey key(string);
14847  int entry = FindEntry(&key);
14848  if (entry == kNotFound) {
14849  return false;
14850  } else {
14851  *result = String::cast(KeyAt(entry));
14852  ASSERT(StringShape(*result).IsInternalized());
14853  return true;
14854  }
14855 }
14856 
14857 
14859  uint16_t c2,
14860  String** result) {
14861  TwoCharHashTableKey key(c1, c2, GetHeap()->HashSeed());
14862  int entry = FindEntry(&key);
14863  if (entry == kNotFound) {
14864  return false;
14865  } else {
14866  *result = String::cast(KeyAt(entry));
14867  ASSERT(StringShape(*result).IsInternalized());
14868  return true;
14869  }
14870 }
14871 
14872 
14873 MaybeObject* StringTable::LookupKey(HashTableKey* key, Object** s) {
14874  int entry = FindEntry(key);
14875 
14876  // String already in table.
14877  if (entry != kNotFound) {
14878  *s = KeyAt(entry);
14879  return this;
14880  }
14881 
14882  // Adding new string. Grow table if needed.
14883  Object* obj;
14884  { MaybeObject* maybe_obj = EnsureCapacity(1, key);
14885  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14886  }
14887 
14888  // Create string object.
14889  Object* string;
14890  { MaybeObject* maybe_string = key->AsObject(GetHeap());
14891  if (!maybe_string->ToObject(&string)) return maybe_string;
14892  }
14893 
14894  // If the string table grew as part of EnsureCapacity, obj is not
14895  // the current string table and therefore we cannot use
14896  // StringTable::cast here.
14897  StringTable* table = reinterpret_cast<StringTable*>(obj);
14898 
14899  // Add the new string and return it along with the string table.
14900  entry = table->FindInsertionEntry(key->Hash());
14901  table->set(EntryToIndex(entry), string);
14902  table->ElementAdded();
14903  *s = string;
14904  return table;
14905 }
14906 
14907 
14909  SharedFunctionInfo* shared = context->closure()->shared();
14910  StringSharedKey key(src,
14911  shared,
14912  FLAG_use_strict ? STRICT : SLOPPY,
14913  RelocInfo::kNoPosition);
14914  int entry = FindEntry(&key);
14915  if (entry == kNotFound) return GetHeap()->undefined_value();
14916  return get(EntryToIndex(entry) + 1);
14917 }
14918 
14919 
14921  Context* context,
14922  StrictMode strict_mode,
14923  int scope_position) {
14924  StringSharedKey key(src,
14925  context->closure()->shared(),
14926  strict_mode,
14927  scope_position);
14928  int entry = FindEntry(&key);
14929  if (entry == kNotFound) return GetHeap()->undefined_value();
14930  return get(EntryToIndex(entry) + 1);
14931 }
14932 
14933 
14935  JSRegExp::Flags flags) {
14936  RegExpKey key(src, flags);
14937  int entry = FindEntry(&key);
14938  if (entry == kNotFound) return GetHeap()->undefined_value();
14939  return get(EntryToIndex(entry) + 1);
14940 }
14941 
14942 
14944  Context* context,
14945  Object* value) {
14946  SharedFunctionInfo* shared = context->closure()->shared();
14947  StringSharedKey key(src,
14948  shared,
14949  FLAG_use_strict ? STRICT : SLOPPY,
14950  RelocInfo::kNoPosition);
14951  CompilationCacheTable* cache;
14952  MaybeObject* maybe_cache = EnsureCapacity(1, &key);
14953  if (!maybe_cache->To(&cache)) return maybe_cache;
14954 
14955  Object* k;
14956  MaybeObject* maybe_k = key.AsObject(GetHeap());
14957  if (!maybe_k->To(&k)) return maybe_k;
14958 
14959  int entry = cache->FindInsertionEntry(key.Hash());
14960  cache->set(EntryToIndex(entry), k);
14961  cache->set(EntryToIndex(entry) + 1, value);
14962  cache->ElementAdded();
14963  return cache;
14964 }
14965 
14966 
14968  Context* context,
14969  SharedFunctionInfo* value,
14970  int scope_position) {
14971  StringSharedKey key(src,
14972  context->closure()->shared(),
14973  value->strict_mode(),
14974  scope_position);
14975  Object* obj;
14976  { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
14977  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
14978  }
14979 
14980  CompilationCacheTable* cache =
14981  reinterpret_cast<CompilationCacheTable*>(obj);
14982  int entry = cache->FindInsertionEntry(key.Hash());
14983 
14984  Object* k;
14985  { MaybeObject* maybe_k = key.AsObject(GetHeap());
14986  if (!maybe_k->ToObject(&k)) return maybe_k;
14987  }
14988 
14989  cache->set(EntryToIndex(entry), k);
14990  cache->set(EntryToIndex(entry) + 1, value);
14991  cache->ElementAdded();
14992  return cache;
14993 }
14994 
14995 
14997  JSRegExp::Flags flags,
14998  FixedArray* value) {
14999  RegExpKey key(src, flags);
15000  Object* obj;
15001  { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
15002  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15003  }
15004 
15005  CompilationCacheTable* cache =
15006  reinterpret_cast<CompilationCacheTable*>(obj);
15007  int entry = cache->FindInsertionEntry(key.Hash());
15008  // We store the value in the key slot, and compare the search key
15009  // to the stored value with a custon IsMatch function during lookups.
15010  cache->set(EntryToIndex(entry), value);
15011  cache->set(EntryToIndex(entry) + 1, value);
15012  cache->ElementAdded();
15013  return cache;
15014 }
15015 
15016 
15018  Object* the_hole_value = GetHeap()->the_hole_value();
15019  for (int entry = 0, size = Capacity(); entry < size; entry++) {
15020  int entry_index = EntryToIndex(entry);
15021  int value_index = entry_index + 1;
15022  if (get(value_index) == value) {
15023  NoWriteBarrierSet(this, entry_index, the_hole_value);
15024  NoWriteBarrierSet(this, value_index, the_hole_value);
15025  ElementRemoved();
15026  }
15027  }
15028  return;
15029 }
15030 
15031 
15032 // StringsKey used for HashTable where key is array of internalized strings.
15033 class StringsKey : public HashTableKey {
15034  public:
15035  explicit StringsKey(FixedArray* strings) : strings_(strings) { }
15036 
15037  bool IsMatch(Object* strings) {
15038  FixedArray* o = FixedArray::cast(strings);
15039  int len = strings_->length();
15040  if (o->length() != len) return false;
15041  for (int i = 0; i < len; i++) {
15042  if (o->get(i) != strings_->get(i)) return false;
15043  }
15044  return true;
15045  }
15046 
15047  uint32_t Hash() { return HashForObject(strings_); }
15048 
15049  uint32_t HashForObject(Object* obj) {
15050  FixedArray* strings = FixedArray::cast(obj);
15051  int len = strings->length();
15052  uint32_t hash = 0;
15053  for (int i = 0; i < len; i++) {
15054  hash ^= String::cast(strings->get(i))->Hash();
15055  }
15056  return hash;
15057  }
15058 
15059  Object* AsObject(Heap* heap) { return strings_; }
15060 
15061  private:
15062  FixedArray* strings_;
15063 };
15064 
15065 
15067  StringsKey key(array);
15068  int entry = FindEntry(&key);
15069  if (entry == kNotFound) return GetHeap()->undefined_value();
15070  return get(EntryToIndex(entry) + 1);
15071 }
15072 
15073 
15074 MaybeObject* MapCache::Put(FixedArray* array, Map* value) {
15075  StringsKey key(array);
15076  Object* obj;
15077  { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
15078  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15079  }
15080 
15081  MapCache* cache = reinterpret_cast<MapCache*>(obj);
15082  int entry = cache->FindInsertionEntry(key.Hash());
15083  cache->set(EntryToIndex(entry), array);
15084  cache->set(EntryToIndex(entry) + 1, value);
15085  cache->ElementAdded();
15086  return cache;
15087 }
15088 
15089 
15090 template<typename Shape, typename Key>
15092  int at_least_space_for,
15093  PretenureFlag pretenure) {
15094  Object* obj;
15095  { MaybeObject* maybe_obj =
15097  heap,
15098  at_least_space_for,
15100  pretenure);
15101  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15102  }
15103  // Initialize the next enumeration index.
15105  SetNextEnumerationIndex(PropertyDetails::kInitialIndex);
15106  return obj;
15107 }
15108 
15109 
15111  Handle<NameDictionary> dictionary) {
15112  CALL_HEAP_FUNCTION_VOID(dictionary->GetIsolate(),
15113  dictionary->GenerateNewEnumerationIndices());
15114 }
15115 
15116 template<typename Shape, typename Key>
15120 
15121  // Allocate and initialize iteration order array.
15122  Object* obj;
15123  { MaybeObject* maybe_obj = heap->AllocateFixedArray(length);
15124  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15125  }
15126  FixedArray* iteration_order = FixedArray::cast(obj);
15127  for (int i = 0; i < length; i++) {
15128  iteration_order->set(i, Smi::FromInt(i));
15129  }
15130 
15131  // Allocate array with enumeration order.
15132  { MaybeObject* maybe_obj = heap->AllocateFixedArray(length);
15133  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15134  }
15135  FixedArray* enumeration_order = FixedArray::cast(obj);
15136 
15137  // Fill the enumeration order array with property details.
15138  int capacity = HashTable<Shape, Key>::Capacity();
15139  int pos = 0;
15140  for (int i = 0; i < capacity; i++) {
15142  int index = DetailsAt(i).dictionary_index();
15143  enumeration_order->set(pos++, Smi::FromInt(index));
15144  }
15145  }
15146 
15147  // Sort the arrays wrt. enumeration order.
15148  iteration_order->SortPairs(enumeration_order, enumeration_order->length());
15149 
15150  // Overwrite the enumeration_order with the enumeration indices.
15151  for (int i = 0; i < length; i++) {
15152  int index = Smi::cast(iteration_order->get(i))->value();
15153  int enum_index = PropertyDetails::kInitialIndex + i;
15154  enumeration_order->set(index, Smi::FromInt(enum_index));
15155  }
15156 
15157  // Update the dictionary with new indices.
15158  capacity = HashTable<Shape, Key>::Capacity();
15159  pos = 0;
15160  for (int i = 0; i < capacity; i++) {
15162  int enum_index = Smi::cast(enumeration_order->get(pos++))->value();
15163  PropertyDetails details = DetailsAt(i);
15164  PropertyDetails new_details = PropertyDetails(
15165  details.attributes(), details.type(), enum_index);
15166  DetailsAtPut(i, new_details);
15167  }
15168  }
15169 
15170  // Set the next enumeration index.
15171  SetNextEnumerationIndex(PropertyDetails::kInitialIndex+length);
15172  return this;
15173 }
15174 
15175 template<typename Shape, typename Key>
15176 MaybeObject* Dictionary<Shape, Key>::EnsureCapacity(int n, Key key) {
15177  // Check whether there are enough enumeration indices to add n elements.
15178  if (Shape::kIsEnumerable &&
15179  !PropertyDetails::IsValidIndex(NextEnumerationIndex() + n)) {
15180  // If not, we generate new indices for the properties.
15181  Object* result;
15182  { MaybeObject* maybe_result = GenerateNewEnumerationIndices();
15183  if (!maybe_result->ToObject(&result)) return maybe_result;
15184  }
15185  }
15187 }
15188 
15189 
15190 template<typename Shape, typename Key>
15192  JSReceiver::DeleteMode mode) {
15194  PropertyDetails details = DetailsAt(entry);
15195  // Ignore attributes if forcing a deletion.
15196  if (details.IsDontDelete() && mode != JSReceiver::FORCE_DELETION) {
15197  return heap->false_value();
15198  }
15199  SetEntry(entry, heap->the_hole_value(), heap->the_hole_value());
15201  return heap->true_value();
15202 }
15203 
15204 
15205 template<typename Shape, typename Key>
15206 MaybeObject* Dictionary<Shape, Key>::Shrink(Key key) {
15207  return HashTable<Shape, Key>::Shrink(key);
15208 }
15209 
15210 
15211 template<typename Shape, typename Key>
15212 MaybeObject* Dictionary<Shape, Key>::AtPut(Key key, Object* value) {
15213  int entry = this->FindEntry(key);
15214 
15215  // If the entry is present set the value;
15216  if (entry != Dictionary<Shape, Key>::kNotFound) {
15217  ValueAtPut(entry, value);
15218  return this;
15219  }
15220 
15221  // Check whether the dictionary should be extended.
15222  Object* obj;
15223  { MaybeObject* maybe_obj = EnsureCapacity(1, key);
15224  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15225  }
15226 
15227  Object* k;
15228  { MaybeObject* maybe_k = Shape::AsObject(this->GetHeap(), key);
15229  if (!maybe_k->ToObject(&k)) return maybe_k;
15230  }
15231  PropertyDetails details = PropertyDetails(NONE, NORMAL, 0);
15232 
15233  return Dictionary<Shape, Key>::cast(obj)->AddEntry(key, value, details,
15235 }
15236 
15237 
15238 template<typename Shape, typename Key>
15239 MaybeObject* Dictionary<Shape, Key>::Add(Key key,
15240  Object* value,
15241  PropertyDetails details) {
15242  // Valdate key is absent.
15243  SLOW_ASSERT((this->FindEntry(key) == Dictionary<Shape, Key>::kNotFound));
15244  // Check whether the dictionary should be extended.
15245  Object* obj;
15246  { MaybeObject* maybe_obj = EnsureCapacity(1, key);
15247  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15248  }
15249 
15250  return Dictionary<Shape, Key>::cast(obj)->AddEntry(key, value, details,
15252 }
15253 
15254 
15255 // Add a key, value pair to the dictionary.
15256 template<typename Shape, typename Key>
15258  Object* value,
15259  PropertyDetails details,
15260  uint32_t hash) {
15261  // Compute the key object.
15262  Object* k;
15263  { MaybeObject* maybe_k = Shape::AsObject(this->GetHeap(), key);
15264  if (!maybe_k->ToObject(&k)) return maybe_k;
15265  }
15266 
15267  uint32_t entry = Dictionary<Shape, Key>::FindInsertionEntry(hash);
15268  // Insert element at empty or deleted entry
15269  if (!details.IsDeleted() &&
15270  details.dictionary_index() == 0 &&
15271  Shape::kIsEnumerable) {
15272  // Assign an enumeration index to the property and update
15273  // SetNextEnumerationIndex.
15274  int index = NextEnumerationIndex();
15275  details = PropertyDetails(details.attributes(), details.type(), index);
15276  SetNextEnumerationIndex(index + 1);
15277  }
15278  SetEntry(entry, k, value, details);
15279  ASSERT((Dictionary<Shape, Key>::KeyAt(entry)->IsNumber() ||
15280  Dictionary<Shape, Key>::KeyAt(entry)->IsName()));
15282  return this;
15283 }
15284 
15285 
15287  // If the dictionary requires slow elements an element has already
15288  // been added at a high index.
15289  if (requires_slow_elements()) return;
15290  // Check if this index is high enough that we should require slow
15291  // elements.
15292  if (key > kRequiresSlowElementsLimit) {
15293  set_requires_slow_elements();
15294  return;
15295  }
15296  // Update max key value.
15297  Object* max_index_object = get(kMaxNumberKeyIndex);
15298  if (!max_index_object->IsSmi() || max_number_key() < key) {
15299  FixedArray::set(kMaxNumberKeyIndex,
15300  Smi::FromInt(key << kRequiresSlowElementsTagSize));
15301  }
15302 }
15303 
15305  Handle<SeededNumberDictionary> dictionary,
15306  uint32_t key,
15307  Handle<Object> value,
15308  PropertyDetails details) {
15309  CALL_HEAP_FUNCTION(dictionary->GetIsolate(),
15310  dictionary->AddNumberEntry(key, *value, details),
15312 }
15313 
15314 MaybeObject* SeededNumberDictionary::AddNumberEntry(uint32_t key,
15315  Object* value,
15316  PropertyDetails details) {
15317  UpdateMaxNumberKey(key);
15318  SLOW_ASSERT(this->FindEntry(key) == kNotFound);
15319  return Add(key, value, details);
15320 }
15321 
15322 
15324  Object* value) {
15325  SLOW_ASSERT(this->FindEntry(key) == kNotFound);
15326  return Add(key, value, PropertyDetails(NONE, NORMAL, 0));
15327 }
15328 
15329 
15330 MaybeObject* SeededNumberDictionary::AtNumberPut(uint32_t key, Object* value) {
15331  UpdateMaxNumberKey(key);
15332  return AtPut(key, value);
15333 }
15334 
15335 
15336 MaybeObject* UnseededNumberDictionary::AtNumberPut(uint32_t key,
15337  Object* value) {
15338  return AtPut(key, value);
15339 }
15340 
15341 
15343  Handle<SeededNumberDictionary> dictionary,
15344  uint32_t index,
15345  Handle<Object> value,
15346  PropertyDetails details) {
15347  CALL_HEAP_FUNCTION(dictionary->GetIsolate(),
15348  dictionary->Set(index, *value, details),
15350 }
15351 
15352 
15355  uint32_t index,
15356  Handle<Object> value) {
15357  CALL_HEAP_FUNCTION(dictionary->GetIsolate(),
15358  dictionary->Set(index, *value),
15360 }
15361 
15362 
15363 MaybeObject* SeededNumberDictionary::Set(uint32_t key,
15364  Object* value,
15365  PropertyDetails details) {
15366  int entry = FindEntry(key);
15367  if (entry == kNotFound) return AddNumberEntry(key, value, details);
15368  // Preserve enumeration index.
15369  details = PropertyDetails(details.attributes(),
15370  details.type(),
15371  DetailsAt(entry).dictionary_index());
15372  MaybeObject* maybe_object_key =
15374  Object* object_key;
15375  if (!maybe_object_key->ToObject(&object_key)) return maybe_object_key;
15376  SetEntry(entry, object_key, value, details);
15377  return this;
15378 }
15379 
15380 
15381 MaybeObject* UnseededNumberDictionary::Set(uint32_t key,
15382  Object* value) {
15383  int entry = FindEntry(key);
15384  if (entry == kNotFound) return AddNumberEntry(key, value);
15385  MaybeObject* maybe_object_key =
15387  Object* object_key;
15388  if (!maybe_object_key->ToObject(&object_key)) return maybe_object_key;
15389  SetEntry(entry, object_key, value);
15390  return this;
15391 }
15392 
15393 
15394 
15395 template<typename Shape, typename Key>
15397  PropertyAttributes filter) {
15398  int capacity = HashTable<Shape, Key>::Capacity();
15399  int result = 0;
15400  for (int i = 0; i < capacity; i++) {
15402  if (HashTable<Shape, Key>::IsKey(k) && !FilterKey(k, filter)) {
15403  PropertyDetails details = DetailsAt(i);
15404  if (details.IsDeleted()) continue;
15405  PropertyAttributes attr = details.attributes();
15406  if ((attr & filter) == 0) result++;
15407  }
15408  }
15409  return result;
15410 }
15411 
15412 
15413 template<typename Shape, typename Key>
15415  return NumberOfElementsFilterAttributes(
15416  static_cast<PropertyAttributes>(DONT_ENUM | SYMBOLIC));
15417 }
15418 
15419 
15420 template<typename Shape, typename Key>
15422  FixedArray* storage,
15423  PropertyAttributes filter,
15424  typename Dictionary<Shape, Key>::SortMode sort_mode) {
15425  ASSERT(storage->length() >= NumberOfElementsFilterAttributes(filter));
15426  int capacity = HashTable<Shape, Key>::Capacity();
15427  int index = 0;
15428  for (int i = 0; i < capacity; i++) {
15430  if (HashTable<Shape, Key>::IsKey(k) && !FilterKey(k, filter)) {
15431  PropertyDetails details = DetailsAt(i);
15432  if (details.IsDeleted()) continue;
15433  PropertyAttributes attr = details.attributes();
15434  if ((attr & filter) == 0) storage->set(index++, k);
15435  }
15436  }
15437  if (sort_mode == Dictionary<Shape, Key>::SORTED) {
15438  storage->SortPairs(storage, index);
15439  }
15440  ASSERT(storage->length() >= index);
15441 }
15442 
15443 
15445  explicit EnumIndexComparator(NameDictionary* dict) : dict(dict) { }
15446  bool operator() (Smi* a, Smi* b) {
15447  PropertyDetails da(dict->DetailsAt(a->value()));
15448  PropertyDetails db(dict->DetailsAt(b->value()));
15449  return da.dictionary_index() < db.dictionary_index();
15450  }
15452 };
15453 
15454 
15456  int length = storage->length();
15457  int capacity = Capacity();
15458  int properties = 0;
15459  for (int i = 0; i < capacity; i++) {
15460  Object* k = KeyAt(i);
15461  if (IsKey(k) && !k->IsSymbol()) {
15462  PropertyDetails details = DetailsAt(i);
15463  if (details.IsDeleted() || details.IsDontEnum()) continue;
15464  storage->set(properties, Smi::FromInt(i));
15465  properties++;
15466  if (properties == length) break;
15467  }
15468  }
15469  EnumIndexComparator cmp(this);
15470  Smi** start = reinterpret_cast<Smi**>(storage->GetFirstElementAddress());
15471  std::sort(start, start + length, cmp);
15472  for (int i = 0; i < length; i++) {
15473  int index = Smi::cast(storage->get(i))->value();
15474  storage->set(i, KeyAt(index));
15475  }
15476 }
15477 
15478 
15479 template<typename Shape, typename Key>
15481  FixedArray* storage,
15482  int index,
15483  PropertyAttributes filter,
15484  typename Dictionary<Shape, Key>::SortMode sort_mode) {
15485  ASSERT(storage->length() >= NumberOfElementsFilterAttributes(filter));
15486  int capacity = HashTable<Shape, Key>::Capacity();
15487  for (int i = 0; i < capacity; i++) {
15489  if (HashTable<Shape, Key>::IsKey(k) && !FilterKey(k, filter)) {
15490  PropertyDetails details = DetailsAt(i);
15491  if (details.IsDeleted()) continue;
15492  PropertyAttributes attr = details.attributes();
15493  if ((attr & filter) == 0) storage->set(index++, k);
15494  }
15495  }
15496  if (sort_mode == Dictionary<Shape, Key>::SORTED) {
15497  storage->SortPairs(storage, index);
15498  }
15499  ASSERT(storage->length() >= index);
15500 }
15501 
15502 
15503 // Backwards lookup (slow).
15504 template<typename Shape, typename Key>
15506  int capacity = HashTable<Shape, Key>::Capacity();
15507  for (int i = 0; i < capacity; i++) {
15510  Object* e = ValueAt(i);
15511  if (e->IsPropertyCell()) {
15512  e = PropertyCell::cast(e)->value();
15513  }
15514  if (e == value) return k;
15515  }
15516  }
15518  return heap->undefined_value();
15519 }
15520 
15521 
15523  JSObject* obj, int unused_property_fields) {
15524  // Make sure we preserve dictionary representation if there are too many
15525  // descriptors.
15526  int number_of_elements = NumberOfElements();
15527  if (number_of_elements > kMaxNumberOfDescriptors) return obj;
15528 
15529  if (number_of_elements != NextEnumerationIndex()) {
15530  MaybeObject* maybe_result = GenerateNewEnumerationIndices();
15531  if (maybe_result->IsFailure()) return maybe_result;
15532  }
15533 
15534  int instance_descriptor_length = 0;
15535  int number_of_fields = 0;
15536 
15537  Heap* heap = GetHeap();
15538 
15539  // Compute the length of the instance descriptor.
15540  int capacity = Capacity();
15541  for (int i = 0; i < capacity; i++) {
15542  Object* k = KeyAt(i);
15543  if (IsKey(k)) {
15544  Object* value = ValueAt(i);
15545  PropertyType type = DetailsAt(i).type();
15546  ASSERT(type != FIELD);
15547  instance_descriptor_length++;
15548  if (type == NORMAL && !value->IsJSFunction()) {
15549  number_of_fields += 1;
15550  }
15551  }
15552  }
15553 
15554  int inobject_props = obj->map()->inobject_properties();
15555 
15556  // Allocate new map.
15557  Map* new_map;
15558  MaybeObject* maybe_new_map = obj->map()->CopyDropDescriptors();
15559  if (!maybe_new_map->To(&new_map)) return maybe_new_map;
15560  new_map->set_dictionary_map(false);
15561 
15562  if (instance_descriptor_length == 0) {
15563  ASSERT_LE(unused_property_fields, inobject_props);
15564  // Transform the object.
15565  new_map->set_unused_property_fields(inobject_props);
15566  obj->set_map(new_map);
15567  obj->set_properties(heap->empty_fixed_array());
15568  // Check that it really works.
15569  ASSERT(obj->HasFastProperties());
15570  return obj;
15571  }
15572 
15573  // Allocate the instance descriptor.
15574  DescriptorArray* descriptors;
15575  MaybeObject* maybe_descriptors =
15576  DescriptorArray::Allocate(GetIsolate(), instance_descriptor_length);
15577  if (!maybe_descriptors->To(&descriptors)) {
15578  return maybe_descriptors;
15579  }
15580 
15581  DescriptorArray::WhitenessWitness witness(descriptors);
15582 
15583  int number_of_allocated_fields =
15584  number_of_fields + unused_property_fields - inobject_props;
15585  if (number_of_allocated_fields < 0) {
15586  // There is enough inobject space for all fields (including unused).
15587  number_of_allocated_fields = 0;
15588  unused_property_fields = inobject_props - number_of_fields;
15589  }
15590 
15591  // Allocate the fixed array for the fields.
15592  FixedArray* fields;
15593  MaybeObject* maybe_fields =
15594  heap->AllocateFixedArray(number_of_allocated_fields);
15595  if (!maybe_fields->To(&fields)) return maybe_fields;
15596 
15597  // Fill in the instance descriptor and the fields.
15598  int current_offset = 0;
15599  for (int i = 0; i < capacity; i++) {
15600  Object* k = KeyAt(i);
15601  if (IsKey(k)) {
15602  Object* value = ValueAt(i);
15603  Name* key;
15604  if (k->IsSymbol()) {
15605  key = Symbol::cast(k);
15606  } else {
15607  // Ensure the key is a unique name before writing into the
15608  // instance descriptor.
15609  MaybeObject* maybe_key = heap->InternalizeString(String::cast(k));
15610  if (!maybe_key->To(&key)) return maybe_key;
15611  }
15612 
15613  PropertyDetails details = DetailsAt(i);
15614  int enumeration_index = details.dictionary_index();
15615  PropertyType type = details.type();
15616 
15617  if (value->IsJSFunction()) {
15618  ConstantDescriptor d(key, value, details.attributes());
15619  descriptors->Set(enumeration_index - 1, &d, witness);
15620  } else if (type == NORMAL) {
15621  if (current_offset < inobject_props) {
15622  obj->InObjectPropertyAtPut(current_offset,
15623  value,
15625  } else {
15626  int offset = current_offset - inobject_props;
15627  fields->set(offset, value);
15628  }
15629  FieldDescriptor d(key,
15630  current_offset++,
15631  details.attributes(),
15632  // TODO(verwaest): value->OptimalRepresentation();
15634  descriptors->Set(enumeration_index - 1, &d, witness);
15635  } else if (type == CALLBACKS) {
15636  CallbacksDescriptor d(key,
15637  value,
15638  details.attributes());
15639  descriptors->Set(enumeration_index - 1, &d, witness);
15640  } else {
15641  UNREACHABLE();
15642  }
15643  }
15644  }
15645  ASSERT(current_offset == number_of_fields);
15646 
15647  descriptors->Sort();
15648 
15649  new_map->InitializeDescriptors(descriptors);
15650  new_map->set_unused_property_fields(unused_property_fields);
15651 
15652  // Transform the object.
15653  obj->set_map(new_map);
15654 
15655  obj->set_properties(fields);
15656  ASSERT(obj->IsJSObject());
15657 
15658  // Check that it really works.
15659  ASSERT(obj->HasFastProperties());
15660 
15661  return obj;
15662 }
15663 
15664 
15666  Handle<ObjectHashSet> table,
15667  int n,
15668  Handle<Object> key,
15669  PretenureFlag pretenure) {
15670  Handle<HashTable<ObjectHashTableShape<1>, Object*> > table_base = table;
15671  CALL_HEAP_FUNCTION(table_base->GetIsolate(),
15672  table_base->EnsureCapacity(n, *key, pretenure),
15673  ObjectHashSet);
15674 }
15675 
15676 
15678  Handle<Object> key) {
15679  Handle<HashTable<ObjectHashTableShape<1>, Object*> > table_base = table;
15680  CALL_HEAP_FUNCTION(table_base->GetIsolate(),
15681  table_base->Shrink(*key),
15682  ObjectHashSet);
15683 }
15684 
15685 
15687  ASSERT(IsKey(key));
15688 
15689  // If the object does not have an identity hash, it was never used as a key.
15690  Object* hash = key->GetHash();
15691  if (hash->IsUndefined()) return false;
15692 
15693  return (FindEntry(key) != kNotFound);
15694 }
15695 
15696 
15698  Handle<Object> key) {
15699  ASSERT(table->IsKey(*key));
15700 
15701  // Make sure the key object has an identity hash code.
15702  Handle<Object> object_hash = Object::GetOrCreateHash(key,
15703  table->GetIsolate());
15704 
15705  int entry = table->FindEntry(*key);
15706 
15707  // Check whether key is already present.
15708  if (entry != kNotFound) return table;
15709 
15710  // Check whether the hash set should be extended and add entry.
15711  Handle<ObjectHashSet> new_table =
15712  ObjectHashSet::EnsureCapacity(table, 1, key);
15713  entry = new_table->FindInsertionEntry(Smi::cast(*object_hash)->value());
15714  new_table->set(EntryToIndex(entry), *key);
15715  new_table->ElementAdded();
15716  return new_table;
15717 }
15718 
15719 
15721  Handle<Object> key) {
15722  ASSERT(table->IsKey(*key));
15723 
15724  // If the object does not have an identity hash, it was never used as a key.
15725  if (key->GetHash()->IsUndefined()) return table;
15726 
15727  int entry = table->FindEntry(*key);
15728 
15729  // Check whether key is actually present.
15730  if (entry == kNotFound) return table;
15731 
15732  // Remove entry and try to shrink this hash set.
15733  table->set_the_hole(EntryToIndex(entry));
15734  table->ElementRemoved();
15735 
15736  return ObjectHashSet::Shrink(table, key);
15737 }
15738 
15739 
15742  int n,
15743  Handle<Object> key,
15744  PretenureFlag pretenure) {
15745  Handle<HashTable<ObjectHashTableShape<2>, Object*> > table_base = table;
15746  CALL_HEAP_FUNCTION(table_base->GetIsolate(),
15747  table_base->EnsureCapacity(n, *key, pretenure),
15748  ObjectHashTable);
15749 }
15750 
15751 
15754  Handle<HashTable<ObjectHashTableShape<2>, Object*> > table_base = table;
15755  CALL_HEAP_FUNCTION(table_base->GetIsolate(),
15756  table_base->Shrink(*key),
15757  ObjectHashTable);
15758 }
15759 
15760 
15762  ASSERT(IsKey(key));
15763 
15764  // If the object does not have an identity hash, it was never used as a key.
15765  Object* hash = key->GetHash();
15766  if (hash->IsUndefined()) {
15767  return GetHeap()->the_hole_value();
15768  }
15769  int entry = FindEntry(key);
15770  if (entry == kNotFound) return GetHeap()->the_hole_value();
15771  return get(EntryToIndex(entry) + 1);
15772 }
15773 
15774 
15776  Handle<Object> key,
15777  Handle<Object> value) {
15778  ASSERT(table->IsKey(*key));
15779 
15780  Isolate* isolate = table->GetIsolate();
15781 
15782  // Make sure the key object has an identity hash code.
15783  Handle<Object> hash = Object::GetOrCreateHash(key, isolate);
15784 
15785  int entry = table->FindEntry(*key);
15786 
15787  // Check whether to perform removal operation.
15788  if (value->IsTheHole()) {
15789  if (entry == kNotFound) return table;
15790  table->RemoveEntry(entry);
15791  return Shrink(table, key);
15792  }
15793 
15794  // Key is already in table, just overwrite value.
15795  if (entry != kNotFound) {
15796  table->set(EntryToIndex(entry) + 1, *value);
15797  return table;
15798  }
15799 
15800  // Check whether the hash table should be extended.
15801  table = EnsureCapacity(table, 1, key);
15802  table->AddEntry(table->FindInsertionEntry(Handle<Smi>::cast(hash)->value()),
15803  *key,
15804  *value);
15805  return table;
15806 }
15807 
15808 
15809 void ObjectHashTable::AddEntry(int entry, Object* key, Object* value) {
15810  set(EntryToIndex(entry), key);
15811  set(EntryToIndex(entry) + 1, value);
15812  ElementAdded();
15813 }
15814 
15815 
15816 void ObjectHashTable::RemoveEntry(int entry) {
15817  set_the_hole(EntryToIndex(entry));
15818  set_the_hole(EntryToIndex(entry) + 1);
15819  ElementRemoved();
15820 }
15821 
15822 
15824  ASSERT(IsKey(key));
15825  int entry = FindEntry(key);
15826  if (entry == kNotFound) return GetHeap()->the_hole_value();
15827  return get(EntryToValueIndex(entry));
15828 }
15829 
15830 
15831 MaybeObject* WeakHashTable::Put(Object* key, Object* value) {
15832  ASSERT(IsKey(key));
15833  int entry = FindEntry(key);
15834  // Key is already in table, just overwrite value.
15835  if (entry != kNotFound) {
15836  set(EntryToValueIndex(entry), value);
15837  return this;
15838  }
15839 
15840  // Check whether the hash table should be extended.
15841  Object* obj;
15842  { MaybeObject* maybe_obj = EnsureCapacity(1, key, TENURED);
15843  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
15844  }
15845  WeakHashTable* table = WeakHashTable::cast(obj);
15846  table->AddEntry(table->FindInsertionEntry(Hash(key)), key, value);
15847  return table;
15848 }
15849 
15850 
15851 void WeakHashTable::AddEntry(int entry, Object* key, Object* value) {
15852  set(EntryToIndex(entry), key);
15853  set(EntryToValueIndex(entry), value);
15854  ElementAdded();
15855 }
15856 
15857 
15859  DeclaredAccessorDescriptor* descriptor)
15860  : array_(descriptor->serialized_data()->GetDataStartAddress()),
15861  length_(descriptor->serialized_data()->length()),
15862  offset_(0) {
15863 }
15864 
15865 
15868  ASSERT(offset_ < length_);
15869  uint8_t* ptr = &array_[offset_];
15870  ASSERT(reinterpret_cast<uintptr_t>(ptr) % sizeof(uintptr_t) == 0);
15871  const DeclaredAccessorDescriptorData* data =
15872  reinterpret_cast<const DeclaredAccessorDescriptorData*>(ptr);
15873  offset_ += sizeof(*data);
15874  ASSERT(offset_ <= length_);
15875  return data;
15876 }
15877 
15878 
15880  Isolate* isolate,
15881  const DeclaredAccessorDescriptorData& descriptor,
15883  int previous_length =
15884  previous.is_null() ? 0 : previous->serialized_data()->length();
15885  int length = sizeof(descriptor) + previous_length;
15886  Handle<ByteArray> serialized_descriptor =
15887  isolate->factory()->NewByteArray(length);
15889  isolate->factory()->NewDeclaredAccessorDescriptor();
15890  value->set_serialized_data(*serialized_descriptor);
15891  // Copy in the data.
15892  {
15893  DisallowHeapAllocation no_allocation;
15894  uint8_t* array = serialized_descriptor->GetDataStartAddress();
15895  if (previous_length != 0) {
15896  uint8_t* previous_array =
15897  previous->serialized_data()->GetDataStartAddress();
15898  OS::MemCopy(array, previous_array, previous_length);
15899  array += previous_length;
15900  }
15901  ASSERT(reinterpret_cast<uintptr_t>(array) % sizeof(uintptr_t) == 0);
15903  reinterpret_cast<DeclaredAccessorDescriptorData*>(array);
15904  *data = descriptor;
15905  }
15906  return value;
15907 }
15908 
15909 
15910 #ifdef ENABLE_DEBUGGER_SUPPORT
15911 // Check if there is a break point at this code position.
15912 bool DebugInfo::HasBreakPoint(int code_position) {
15913  // Get the break point info object for this code position.
15914  Object* break_point_info = GetBreakPointInfo(code_position);
15915 
15916  // If there is no break point info object or no break points in the break
15917  // point info object there is no break point at this code position.
15918  if (break_point_info->IsUndefined()) return false;
15919  return BreakPointInfo::cast(break_point_info)->GetBreakPointCount() > 0;
15920 }
15921 
15922 
15923 // Get the break point info object for this code position.
15924 Object* DebugInfo::GetBreakPointInfo(int code_position) {
15925  // Find the index of the break point info object for this code position.
15926  int index = GetBreakPointInfoIndex(code_position);
15927 
15928  // Return the break point info object if any.
15929  if (index == kNoBreakPointInfo) return GetHeap()->undefined_value();
15930  return BreakPointInfo::cast(break_points()->get(index));
15931 }
15932 
15933 
15934 // Clear a break point at the specified code position.
15935 void DebugInfo::ClearBreakPoint(Handle<DebugInfo> debug_info,
15936  int code_position,
15937  Handle<Object> break_point_object) {
15938  Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position),
15939  debug_info->GetIsolate());
15940  if (break_point_info->IsUndefined()) return;
15941  BreakPointInfo::ClearBreakPoint(
15942  Handle<BreakPointInfo>::cast(break_point_info),
15943  break_point_object);
15944 }
15945 
15946 
15947 void DebugInfo::SetBreakPoint(Handle<DebugInfo> debug_info,
15948  int code_position,
15949  int source_position,
15950  int statement_position,
15951  Handle<Object> break_point_object) {
15952  Isolate* isolate = debug_info->GetIsolate();
15953  Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position),
15954  isolate);
15955  if (!break_point_info->IsUndefined()) {
15956  BreakPointInfo::SetBreakPoint(
15957  Handle<BreakPointInfo>::cast(break_point_info),
15958  break_point_object);
15959  return;
15960  }
15961 
15962  // Adding a new break point for a code position which did not have any
15963  // break points before. Try to find a free slot.
15964  int index = kNoBreakPointInfo;
15965  for (int i = 0; i < debug_info->break_points()->length(); i++) {
15966  if (debug_info->break_points()->get(i)->IsUndefined()) {
15967  index = i;
15968  break;
15969  }
15970  }
15971  if (index == kNoBreakPointInfo) {
15972  // No free slot - extend break point info array.
15973  Handle<FixedArray> old_break_points =
15974  Handle<FixedArray>(FixedArray::cast(debug_info->break_points()));
15975  Handle<FixedArray> new_break_points =
15976  isolate->factory()->NewFixedArray(
15977  old_break_points->length() +
15978  Debug::kEstimatedNofBreakPointsInFunction);
15979 
15980  debug_info->set_break_points(*new_break_points);
15981  for (int i = 0; i < old_break_points->length(); i++) {
15982  new_break_points->set(i, old_break_points->get(i));
15983  }
15984  index = old_break_points->length();
15985  }
15986  ASSERT(index != kNoBreakPointInfo);
15987 
15988  // Allocate new BreakPointInfo object and set the break point.
15989  Handle<BreakPointInfo> new_break_point_info = Handle<BreakPointInfo>::cast(
15990  isolate->factory()->NewStruct(BREAK_POINT_INFO_TYPE));
15991  new_break_point_info->set_code_position(Smi::FromInt(code_position));
15992  new_break_point_info->set_source_position(Smi::FromInt(source_position));
15993  new_break_point_info->
15994  set_statement_position(Smi::FromInt(statement_position));
15995  new_break_point_info->set_break_point_objects(
15996  isolate->heap()->undefined_value());
15997  BreakPointInfo::SetBreakPoint(new_break_point_info, break_point_object);
15998  debug_info->break_points()->set(index, *new_break_point_info);
15999 }
16000 
16001 
16002 // Get the break point objects for a code position.
16003 Object* DebugInfo::GetBreakPointObjects(int code_position) {
16004  Object* break_point_info = GetBreakPointInfo(code_position);
16005  if (break_point_info->IsUndefined()) {
16006  return GetHeap()->undefined_value();
16007  }
16008  return BreakPointInfo::cast(break_point_info)->break_point_objects();
16009 }
16010 
16011 
16012 // Get the total number of break points.
16013 int DebugInfo::GetBreakPointCount() {
16014  if (break_points()->IsUndefined()) return 0;
16015  int count = 0;
16016  for (int i = 0; i < break_points()->length(); i++) {
16017  if (!break_points()->get(i)->IsUndefined()) {
16018  BreakPointInfo* break_point_info =
16019  BreakPointInfo::cast(break_points()->get(i));
16020  count += break_point_info->GetBreakPointCount();
16021  }
16022  }
16023  return count;
16024 }
16025 
16026 
16027 Object* DebugInfo::FindBreakPointInfo(Handle<DebugInfo> debug_info,
16028  Handle<Object> break_point_object) {
16029  Heap* heap = debug_info->GetHeap();
16030  if (debug_info->break_points()->IsUndefined()) return heap->undefined_value();
16031  for (int i = 0; i < debug_info->break_points()->length(); i++) {
16032  if (!debug_info->break_points()->get(i)->IsUndefined()) {
16033  Handle<BreakPointInfo> break_point_info =
16034  Handle<BreakPointInfo>(BreakPointInfo::cast(
16035  debug_info->break_points()->get(i)));
16036  if (BreakPointInfo::HasBreakPointObject(break_point_info,
16037  break_point_object)) {
16038  return *break_point_info;
16039  }
16040  }
16041  }
16042  return heap->undefined_value();
16043 }
16044 
16045 
16046 // Find the index of the break point info object for the specified code
16047 // position.
16048 int DebugInfo::GetBreakPointInfoIndex(int code_position) {
16049  if (break_points()->IsUndefined()) return kNoBreakPointInfo;
16050  for (int i = 0; i < break_points()->length(); i++) {
16051  if (!break_points()->get(i)->IsUndefined()) {
16052  BreakPointInfo* break_point_info =
16053  BreakPointInfo::cast(break_points()->get(i));
16054  if (break_point_info->code_position()->value() == code_position) {
16055  return i;
16056  }
16057  }
16058  }
16059  return kNoBreakPointInfo;
16060 }
16061 
16062 
16063 // Remove the specified break point object.
16064 void BreakPointInfo::ClearBreakPoint(Handle<BreakPointInfo> break_point_info,
16065  Handle<Object> break_point_object) {
16066  Isolate* isolate = break_point_info->GetIsolate();
16067  // If there are no break points just ignore.
16068  if (break_point_info->break_point_objects()->IsUndefined()) return;
16069  // If there is a single break point clear it if it is the same.
16070  if (!break_point_info->break_point_objects()->IsFixedArray()) {
16071  if (break_point_info->break_point_objects() == *break_point_object) {
16072  break_point_info->set_break_point_objects(
16073  isolate->heap()->undefined_value());
16074  }
16075  return;
16076  }
16077  // If there are multiple break points shrink the array
16078  ASSERT(break_point_info->break_point_objects()->IsFixedArray());
16079  Handle<FixedArray> old_array =
16080  Handle<FixedArray>(
16081  FixedArray::cast(break_point_info->break_point_objects()));
16082  Handle<FixedArray> new_array =
16083  isolate->factory()->NewFixedArray(old_array->length() - 1);
16084  int found_count = 0;
16085  for (int i = 0; i < old_array->length(); i++) {
16086  if (old_array->get(i) == *break_point_object) {
16087  ASSERT(found_count == 0);
16088  found_count++;
16089  } else {
16090  new_array->set(i - found_count, old_array->get(i));
16091  }
16092  }
16093  // If the break point was found in the list change it.
16094  if (found_count > 0) break_point_info->set_break_point_objects(*new_array);
16095 }
16096 
16097 
16098 // Add the specified break point object.
16099 void BreakPointInfo::SetBreakPoint(Handle<BreakPointInfo> break_point_info,
16100  Handle<Object> break_point_object) {
16101  Isolate* isolate = break_point_info->GetIsolate();
16102 
16103  // If there was no break point objects before just set it.
16104  if (break_point_info->break_point_objects()->IsUndefined()) {
16105  break_point_info->set_break_point_objects(*break_point_object);
16106  return;
16107  }
16108  // If the break point object is the same as before just ignore.
16109  if (break_point_info->break_point_objects() == *break_point_object) return;
16110  // If there was one break point object before replace with array.
16111  if (!break_point_info->break_point_objects()->IsFixedArray()) {
16112  Handle<FixedArray> array = isolate->factory()->NewFixedArray(2);
16113  array->set(0, break_point_info->break_point_objects());
16114  array->set(1, *break_point_object);
16115  break_point_info->set_break_point_objects(*array);
16116  return;
16117  }
16118  // If there was more than one break point before extend array.
16119  Handle<FixedArray> old_array =
16120  Handle<FixedArray>(
16121  FixedArray::cast(break_point_info->break_point_objects()));
16122  Handle<FixedArray> new_array =
16123  isolate->factory()->NewFixedArray(old_array->length() + 1);
16124  for (int i = 0; i < old_array->length(); i++) {
16125  // If the break point was there before just ignore.
16126  if (old_array->get(i) == *break_point_object) return;
16127  new_array->set(i, old_array->get(i));
16128  }
16129  // Add the new break point.
16130  new_array->set(old_array->length(), *break_point_object);
16131  break_point_info->set_break_point_objects(*new_array);
16132 }
16133 
16134 
16135 bool BreakPointInfo::HasBreakPointObject(
16136  Handle<BreakPointInfo> break_point_info,
16137  Handle<Object> break_point_object) {
16138  // No break point.
16139  if (break_point_info->break_point_objects()->IsUndefined()) return false;
16140  // Single break point.
16141  if (!break_point_info->break_point_objects()->IsFixedArray()) {
16142  return break_point_info->break_point_objects() == *break_point_object;
16143  }
16144  // Multiple break points.
16145  FixedArray* array = FixedArray::cast(break_point_info->break_point_objects());
16146  for (int i = 0; i < array->length(); i++) {
16147  if (array->get(i) == *break_point_object) {
16148  return true;
16149  }
16150  }
16151  return false;
16152 }
16153 
16154 
16155 // Get the number of break points.
16156 int BreakPointInfo::GetBreakPointCount() {
16157  // No break point.
16158  if (break_point_objects()->IsUndefined()) return 0;
16159  // Single break point.
16160  if (!break_point_objects()->IsFixedArray()) return 1;
16161  // Multiple break points.
16162  return FixedArray::cast(break_point_objects())->length();
16163 }
16164 #endif // ENABLE_DEBUGGER_SUPPORT
16165 
16166 
16167 Object* JSDate::GetField(Object* object, Smi* index) {
16168  return JSDate::cast(object)->DoGetField(
16169  static_cast<FieldIndex>(index->value()));
16170 }
16171 
16172 
16173 Object* JSDate::DoGetField(FieldIndex index) {
16174  ASSERT(index != kDateValue);
16175 
16176  DateCache* date_cache = GetIsolate()->date_cache();
16177 
16178  if (index < kFirstUncachedField) {
16179  Object* stamp = cache_stamp();
16180  if (stamp != date_cache->stamp() && stamp->IsSmi()) {
16181  // Since the stamp is not NaN, the value is also not NaN.
16182  int64_t local_time_ms =
16183  date_cache->ToLocal(static_cast<int64_t>(value()->Number()));
16184  SetLocalFields(local_time_ms, date_cache);
16185  }
16186  switch (index) {
16187  case kYear: return year();
16188  case kMonth: return month();
16189  case kDay: return day();
16190  case kWeekday: return weekday();
16191  case kHour: return hour();
16192  case kMinute: return min();
16193  case kSecond: return sec();
16194  default: UNREACHABLE();
16195  }
16196  }
16197 
16198  if (index >= kFirstUTCField) {
16199  return GetUTCField(index, value()->Number(), date_cache);
16200  }
16201 
16202  double time = value()->Number();
16203  if (std::isnan(time)) return GetIsolate()->heap()->nan_value();
16204 
16205  int64_t local_time_ms = date_cache->ToLocal(static_cast<int64_t>(time));
16206  int days = DateCache::DaysFromTime(local_time_ms);
16207 
16208  if (index == kDays) return Smi::FromInt(days);
16209 
16210  int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
16211  if (index == kMillisecond) return Smi::FromInt(time_in_day_ms % 1000);
16212  ASSERT(index == kTimeInDay);
16213  return Smi::FromInt(time_in_day_ms);
16214 }
16215 
16216 
16217 Object* JSDate::GetUTCField(FieldIndex index,
16218  double value,
16219  DateCache* date_cache) {
16220  ASSERT(index >= kFirstUTCField);
16221 
16222  if (std::isnan(value)) return GetIsolate()->heap()->nan_value();
16223 
16224  int64_t time_ms = static_cast<int64_t>(value);
16225 
16226  if (index == kTimezoneOffset) {
16227  return Smi::FromInt(date_cache->TimezoneOffset(time_ms));
16228  }
16229 
16230  int days = DateCache::DaysFromTime(time_ms);
16231 
16232  if (index == kWeekdayUTC) return Smi::FromInt(date_cache->Weekday(days));
16233 
16234  if (index <= kDayUTC) {
16235  int year, month, day;
16236  date_cache->YearMonthDayFromDays(days, &year, &month, &day);
16237  if (index == kYearUTC) return Smi::FromInt(year);
16238  if (index == kMonthUTC) return Smi::FromInt(month);
16239  ASSERT(index == kDayUTC);
16240  return Smi::FromInt(day);
16241  }
16242 
16243  int time_in_day_ms = DateCache::TimeInDay(time_ms, days);
16244  switch (index) {
16245  case kHourUTC: return Smi::FromInt(time_in_day_ms / (60 * 60 * 1000));
16246  case kMinuteUTC: return Smi::FromInt((time_in_day_ms / (60 * 1000)) % 60);
16247  case kSecondUTC: return Smi::FromInt((time_in_day_ms / 1000) % 60);
16248  case kMillisecondUTC: return Smi::FromInt(time_in_day_ms % 1000);
16249  case kDaysUTC: return Smi::FromInt(days);
16250  case kTimeInDayUTC: return Smi::FromInt(time_in_day_ms);
16251  default: UNREACHABLE();
16252  }
16253 
16254  UNREACHABLE();
16255  return NULL;
16256 }
16257 
16258 
16259 void JSDate::SetValue(Object* value, bool is_value_nan) {
16260  set_value(value);
16261  if (is_value_nan) {
16262  HeapNumber* nan = GetIsolate()->heap()->nan_value();
16263  set_cache_stamp(nan, SKIP_WRITE_BARRIER);
16264  set_year(nan, SKIP_WRITE_BARRIER);
16265  set_month(nan, SKIP_WRITE_BARRIER);
16266  set_day(nan, SKIP_WRITE_BARRIER);
16267  set_hour(nan, SKIP_WRITE_BARRIER);
16268  set_min(nan, SKIP_WRITE_BARRIER);
16269  set_sec(nan, SKIP_WRITE_BARRIER);
16270  set_weekday(nan, SKIP_WRITE_BARRIER);
16271  } else {
16273  }
16274 }
16275 
16276 
16277 void JSDate::SetLocalFields(int64_t local_time_ms, DateCache* date_cache) {
16278  int days = DateCache::DaysFromTime(local_time_ms);
16279  int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
16280  int year, month, day;
16281  date_cache->YearMonthDayFromDays(days, &year, &month, &day);
16282  int weekday = date_cache->Weekday(days);
16283  int hour = time_in_day_ms / (60 * 60 * 1000);
16284  int min = (time_in_day_ms / (60 * 1000)) % 60;
16285  int sec = (time_in_day_ms / 1000) % 60;
16286  set_cache_stamp(date_cache->stamp());
16287  set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER);
16288  set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER);
16289  set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER);
16290  set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER);
16291  set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER);
16292  set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER);
16293  set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER);
16294 }
16295 
16296 
16298  ASSERT(is_external());
16299  set_backing_store(NULL);
16300  set_byte_length(Smi::FromInt(0));
16301 }
16302 
16303 
16305  set_byte_offset(Smi::FromInt(0));
16306  set_byte_length(Smi::FromInt(0));
16307 }
16308 
16309 
16311  NeuterView();
16312 }
16313 
16314 
16316  NeuterView();
16317  set_length(Smi::FromInt(0));
16318  set_elements(GetHeap()->EmptyExternalArrayForMap(map()));
16319 }
16320 
16321 
16322 static ElementsKind FixedToExternalElementsKind(ElementsKind elements_kind) {
16323  switch (elements_kind) {
16324 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
16325  case TYPE##_ELEMENTS: return EXTERNAL_##TYPE##_ELEMENTS;
16326 
16328 #undef TYPED_ARRAY_CASE
16329 
16330  default:
16331  UNREACHABLE();
16333  }
16334 }
16335 
16336 
16337 Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer(
16338  Handle<JSTypedArray> typed_array) {
16339 
16340  Handle<Map> map(typed_array->map());
16341  Isolate* isolate = typed_array->GetIsolate();
16342 
16343  ASSERT(IsFixedTypedArrayElementsKind(map->elements_kind()));
16344 
16345  Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer();
16346  Handle<FixedTypedArrayBase> fixed_typed_array(
16347  FixedTypedArrayBase::cast(typed_array->elements()));
16349  fixed_typed_array->DataSize(), false);
16350  memcpy(buffer->backing_store(),
16351  fixed_typed_array->DataPtr(),
16352  fixed_typed_array->DataSize());
16353  Handle<ExternalArray> new_elements =
16354  isolate->factory()->NewExternalArray(
16355  fixed_typed_array->length(), typed_array->type(),
16356  static_cast<uint8_t*>(buffer->backing_store()));
16357  Handle<Map> new_map = JSObject::GetElementsTransitionMap(
16358  typed_array,
16359  FixedToExternalElementsKind(map->elements_kind()));
16360 
16361  buffer->set_weak_first_view(*typed_array);
16362  ASSERT(typed_array->weak_next() == isolate->heap()->undefined_value());
16363  typed_array->set_buffer(*buffer);
16364  typed_array->set_map_and_elements(*new_map, *new_elements);
16365 
16366  return buffer;
16367 }
16368 
16369 
16371  Handle<Object> result(buffer(), GetIsolate());
16372  if (*result != Smi::FromInt(0)) {
16373  ASSERT(IsExternalArrayElementsKind(map()->elements_kind()));
16374  return Handle<JSArrayBuffer>::cast(result);
16375  }
16376  Handle<JSTypedArray> self(this);
16377  return MaterializeArrayBuffer(self);
16378 }
16379 
16380 
16382  return static_cast<HeapType*>(type_raw());
16383 }
16384 
16385 
16387  ASSERT(IsPropertyCell());
16388  set_type_raw(type, ignored);
16389 }
16390 
16391 
16393  Handle<Object> value) {
16394  Isolate* isolate = cell->GetIsolate();
16395  Handle<HeapType> old_type(cell->type(), isolate);
16396  // TODO(2803): Do not track ConsString as constant because they cannot be
16397  // embedded into code.
16398  Handle<HeapType> new_type = value->IsConsString() || value->IsTheHole()
16399  ? HeapType::Any(isolate) : HeapType::Constant(value, isolate);
16400 
16401  if (new_type->Is(old_type)) {
16402  return old_type;
16403  }
16404 
16405  cell->dependent_code()->DeoptimizeDependentCodeGroup(
16407 
16408  if (old_type->Is(HeapType::None()) || old_type->Is(HeapType::Undefined())) {
16409  return new_type;
16410  }
16411 
16412  return HeapType::Any(isolate);
16413 }
16414 
16415 
16417  Handle<Object> value) {
16418  cell->set_value(*value);
16419  if (!HeapType::Any()->Is(cell->type())) {
16420  Handle<HeapType> new_type = UpdatedType(cell, value);
16421  cell->set_type(*new_type);
16422  }
16423 }
16424 
16425 
16428  Handle<DependentCode> codes =
16430  info->object_wrapper());
16431  if (*codes != dependent_code()) set_dependent_code(*codes);
16433  Handle<HeapObject>(this), info->zone());
16434 }
16435 
16436 
16437 const char* GetBailoutReason(BailoutReason reason) {
16438  ASSERT(reason < kLastErrorMessage);
16439 #define ERROR_MESSAGES_TEXTS(C, T) T,
16440  static const char* error_messages_[] = {
16442  };
16443 #undef ERROR_MESSAGES_TEXTS
16444  return error_messages_[reason];
16445 }
16446 
16447 
16448 } } // namespace v8::internal
MUST_USE_RESULT MaybeObject * CopyInsertDescriptor(Descriptor *descriptor, TransitionFlag flag)
Definition: objects.cc:7044
byte * Address
Definition: globals.h:186
static const double kPretenureRatio
Definition: objects.h:8261
void VisitTwoByteString(const uint16_t *chars, unsigned length)
Definition: objects.cc:9069
void LookupRealNamedPropertyInPrototypes(Name *name, LookupResult *result)
Definition: objects.cc:3427
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
Object * type_feedback_info()
Definition: objects-inl.h:5819
static DeclaredAccessorInfo * cast(Object *obj)
void SetEnumCache(FixedArray *bridge_storage, FixedArray *new_cache, Object *new_index_cache)
Definition: objects.cc:7945
static Handle< Object > SetElement(Handle< JSObject > object, uint32_t index, Handle< Object > value, PropertyAttributes attributes, StrictMode strict_mode, bool check_prototype=true, SetPropertyMode set_mode=SET_PROPERTY)
Definition: objects.cc:12410
static Handle< Object > GetElementWithReceiver(Isolate *isolate, Handle< Object > object, Handle< Object > receiver, uint32_t index)
Definition: objects.cc:965
bool Contains(DependencyGroup group, Code *code)
Definition: objects.cc:11643
Object * Lookup(MapHandleList *maps, int code_kind)
Definition: objects.cc:7804
int IndexInCodeCache(Object *name, Code *code)
Definition: objects.cc:7183
static void SetValueInferType(Handle< PropertyCell > cell, Handle< Object > value)
Definition: objects.cc:16416
void set_elements_kind(ElementsKind elements_kind)
Definition: objects.h:5937
#define CHECK_NOT_EMPTY_HANDLE(isolate, call)
Definition: isolate.h:145
static uint32_t NameFlagsHashHelper(Name *name, Code::Flags flags)
Definition: objects.cc:7582
void SetBackPointer(Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:4928
StringKey(String *string)
Definition: objects.cc:13606
Object * KeyAt(int entry)
Definition: objects.h:3697
static bool OnSamePage(Address address1, Address address2)
Definition: spaces.h:2104
Failure * StackOverflow()
Definition: isolate.cc:871
Handle< JSObject > NewFunctionPrototype(Handle< JSFunction > function)
Definition: factory.cc:819
static void EnsureCanContainElements(Handle< JSObject > object, Object **elements, uint32_t count, EnsureElementsMode mode)
Definition: objects-inl.h:1603
Code * builtin(Name name)
Definition: builtins.h:322
void FlattenString(Handle< String > string)
Definition: handles.cc:151
static MUST_USE_RESULT MaybeObject * Allocate(Isolate *isolate, int number_of_deopt_points, PretenureFlag pretenure)
Definition: objects.cc:8177
static const int kGenerousAllocationCount
Definition: objects.h:6827
void PrintGeneralization(FILE *file, const char *reason, int modify_index, int split, int descriptors, bool constant_to_field, Representation old_representation, Representation new_representation)
Definition: objects.cc:1556
Object * get(AccessorComponent component)
Definition: objects.h:10319
#define SLOW_ASSERT(condition)
Definition: checks.h:306
static DeoptimizationOutputData * cast(Object *obj)
uint32_t HashForObject(Object *other)
Definition: objects.cc:13621
void CopyValuesTo(FixedArray *elements)
Definition: objects.cc:13133
DateCache * date_cache()
Definition: isolate.h:1025
static Handle< ObjectHashTable > EnsureCapacity(Handle< ObjectHashTable > table, int n, Handle< Object > key, PretenureFlag pretenure=NOT_TENURED)
Definition: objects.cc:15740
void AddCharacters(const Char *chars, int len)
Definition: objects-inl.h:6238
#define CHECK_EQ(expected, value)
Definition: checks.h:252
bool IsTrue() const
Definition: api.cc:2347
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(profile_deserialization
bool IsExternalArrayElementsKind(ElementsKind kind)
bool IsHoleyElementsKind(ElementsKind kind)
static const int kCodeEntryOffset
Definition: objects.h:7518
static DependentCode * ForObject(Handle< HeapObject > object, DependencyGroup group)
Definition: objects.cc:11527
Code * FindFirstHandler()
Definition: objects.cc:10518
bool HasElementsTransition()
Definition: objects-inl.h:4802
static void Insert(Name *key, AccessorInfo *entry, int valid_descriptors, Handle< DescriptorArray > array)
Definition: objects.cc:3143
static void EnsureHasInitialMap(Handle< JSFunction > function)
Definition: objects.cc:9792
static Handle< Object > SetValue(Handle< ExternalFloat32Array > array, uint32_t index, Handle< Object > value)
Definition: objects.cc:14684
Object * Lookup(Name *name, Code::Flags flags)
Definition: objects.cc:7484
static PropertyAttributes GetPropertyAttributeWithInterceptor(Handle< JSObject > object, Handle< JSObject > receiver, Handle< Name > name, bool continue_search)
Definition: objects.cc:4284
MUST_USE_RESULT MaybeObject * AllocateOneByteInternalizedString(Vector< const uint8_t > str, uint32_t hash_field)
Definition: heap-inl.h:138
void InvalidateRelocation()
Definition: objects.cc:10298
uint32_t HashForObject(Object *obj)
Definition: objects.cc:13740
void Shrink(int length)
Definition: objects.cc:7834
void set_has_deoptimization_support(bool value)
Definition: objects-inl.h:4404
#define PROFILE(IsolateGetter, Call)
Definition: cpu-profiler.h:194
virtual uint32_t Hash()=0
void RemoveFromCodeCache(Name *name, Code *code, int index)
Definition: objects.cc:7192
static Handle< Map > CurrentMapForDeprecated(Handle< Map > map)
Definition: objects.cc:2784
static Handle< Object > SetPropertyWithCallback(Handle< JSObject > object, Handle< Object > structure, Handle< Name > name, Handle< Object > value, Handle< JSObject > holder, StrictMode strict_mode)
Definition: objects.cc:2874
Object * LookupRegExp(String *source, JSRegExp::Flags flags)
Definition: objects.cc:14934
MUST_USE_RESULT MaybeObject * CopyDropDescriptors()
Definition: objects.cc:6717
static bool compare(const Chars1 *a, const Chars2 *b, int len)
Definition: objects.cc:8766
#define RETURN_IF_SCHEDULED_EXCEPTION(isolate)
Definition: isolate.h:120
void set(int index, Object *value)
Definition: objects-inl.h:2147
bool is_hidden_prototype()
Definition: objects.h:5889
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths true
Definition: flags.cc:208
static bool compare(const uint8_t *a, const uint8_t *b, int len)
Definition: objects.cc:8790
static Handle< FixedArray > SetFastElementsCapacityAndLength(Handle< JSObject > object, int capacity, int length, SetFastElementsCapacitySmiMode smi_mode)
Definition: objects.cc:11143
PropertyCell * GetPropertyCell(LookupResult *result)
Definition: objects.cc:14745
Handle< String > InternalizeString(Handle< String > str)
Definition: factory.cc:225
uint32_t bit_field3()
Definition: objects-inl.h:4762
void PrintF(const char *format,...)
Definition: v8utils.cc:40
bool IsOneByteEqualTo(Vector< const uint8_t > str)
Definition: objects.cc:9002
static void DecodeMinorKey(int minor_key, CompareIC::State *left_state, CompareIC::State *right_state, CompareIC::State *handler_state, Token::Value *op)
Definition: code-stubs.cc:350
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
virtual MaybeObject * AsObject(Heap *heap)
bool InNewSpace(Object *object)
Definition: heap-inl.h:307
Handle< PropertyCell > NewPropertyCell(Handle< Object > value)
Definition: factory.cc:793
static ExecutableAccessorInfo * cast(Object *obj)
unsigned stack_slots()
Definition: objects-inl.h:4468
bool EquivalentToForNormalization(Map *other, PropertyNormalizationMode mode)
Definition: objects.cc:9414
static TypeFeedbackInfo * cast(Object *obj)
static int Decode(Isolate *isolate, FILE *f, byte *begin, byte *end)
static Handle< Object > ToNumber(Isolate *isolate, Handle< Object > obj, bool *exc)
Definition: execution.cc:713
static String * cast(Object *obj)
MUST_USE_RESULT MaybeObject * Add(Key key, Object *value, PropertyDetails details)
Definition: objects.cc:15239
void SetValue(Object *value, bool is_value_nan)
Definition: objects.cc:16259
void DetailsAtPut(int entry, PropertyDetails value)
Definition: objects.h:3947
void copy(int from, int to)
Definition: objects-inl.h:4282
static int AppendUnique(Handle< Object > descriptors, Handle< FixedArray > array, int valid_descriptors)
Definition: objects.cc:3184
static void SetPrototype(Handle< JSFunction > function, Handle< Object > value)
Definition: objects.cc:9745
bool Equals(Name *other)
Definition: objects-inl.h:2954
MaybeObject * TryFlatten(PretenureFlag pretenure=NOT_TENURED)
Definition: objects-inl.h:2978
const uint32_t kTwoByteStringTag
Definition: objects.h:610
static void DeleteHiddenProperty(Handle< JSObject > object, Handle< Name > key)
Definition: objects.cc:4903
uint32_t NumberToUint32(Object *number)
Definition: v8conversions.h:61
void ClearTypeFeedbackInfo(Heap *heap)
Definition: objects.cc:10590
static void SetObserved(Handle< JSObject > object)
Definition: objects.cc:5611
bool function_with_prototype()
Definition: objects-inl.h:4071
void set_opt_count(int opt_count)
Definition: objects-inl.h:5449
Handle< DescriptorArray > NewDescriptorArray(int number_of_descriptors, int slack=0)
Definition: factory.cc:169
static Failure * InternalError()
Definition: objects-inl.h:1239
BailoutId ast_id(uint32_t index)
Definition: full-codegen.h:892
virtual MaybeObject * AsObject(Heap *heap)
Definition: objects.cc:13755
int NumberOfLocalElements(PropertyAttributes filter)
Definition: objects.cc:13452
uint32_t HashForObject(Object *other)
Definition: objects.cc:13830
bool ToUint32(uint32_t *value)
Definition: objects.cc:200
String * constructor_name()
Definition: objects.cc:1936
Isolate * isolate()
Definition: heap-inl.h:624
static Handle< ObjectHashTable > Shrink(Handle< ObjectHashTable > table, Handle< Object > key)
Definition: objects.cc:15752
static Handle< Map > CopyInstallDescriptors(Handle< Map > map, int new_descriptor, Handle< DescriptorArray > descriptors)
Definition: objects.cc:6849
void ReportFailedAccessCheckWrapper(Handle< JSObject > receiver, v8::AccessType type)
Definition: isolate.h:752
int unused_property_fields()
Definition: objects-inl.h:4022
void SourceCodePrint(StringStream *accumulator, int max_length)
Definition: objects.cc:9957
AllocationSite * FindFirstAllocationSite()
Definition: objects.cc:10458
static Handle< Object > SetPropertyViaPrototypesWithHandler(Handle< JSProxy > proxy, Handle< JSReceiver > receiver, Handle< Name > name, Handle< Object > value, PropertyAttributes attributes, StrictMode strict_mode, bool *done)
Definition: objects.cc:3565
MUST_USE_RESULT MaybeObject * Put(MapHandleList *maps, int code_kind, Code *code)
Definition: objects.cc:7813
static const char * Name(Value tok)
Definition: token.h:198
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
uint32_t HashForObject(Object *obj)
Definition: objects.cc:15049
static uint32_t StringHash(Object *obj)
Definition: objects.cc:13850
bool IsFastObjectElementsKind(ElementsKind kind)
MUST_USE_RESULT MaybeObject * GetLocalPropertyPostInterceptor(Object *receiver, Name *name, PropertyAttributes *attributes)
Definition: objects.cc:13191
void IteratePointer(ObjectVisitor *v, int offset)
Definition: objects-inl.h:1391
void set_number_of_entries(DependencyGroup group, int value)
Definition: objects-inl.h:4242
NormalizedMapSharingMode
Definition: objects.h:260
StaticResource< ConsStringIteratorOp > * objects_string_iterator()
Definition: isolate.h:942
#define LOG(isolate, Call)
Definition: log.h:86
RegExpKey(String *string, JSRegExp::Flags flags)
Definition: objects.cc:13717
Object * GetNormalizedProperty(const LookupResult *result)
Definition: objects.cc:680
AccessorDescriptor * FindAccessor(Name *name)
Definition: objects.cc:5944
Map * elements_transition_map()
Definition: objects-inl.h:4813
void CopyKeysTo(FixedArray *storage, PropertyAttributes filter, SortMode sort_mode)
static const int kContextOffset
Definition: objects.h:6743
static Object * GetObjectFromEntryAddress(Address location_of_address)
Definition: objects-inl.h:4673
static const int kEnumCacheBridgeIndicesCacheIndex
Definition: objects.h:3495
MUST_USE_RESULT MaybeObject * CopyJSObject(JSObject *source, AllocationSite *site=NULL)
Definition: heap.cc:4712
MUST_USE_RESULT MaybeObject * GetElementWithHandler(Object *receiver, uint32_t index)
Definition: objects.cc:501
static Handle< Map > GeneralizeRepresentation(Handle< Map > map, int modify_index, Representation new_representation, StoreMode store_mode)
Definition: objects.cc:2666
Object * GetHash()
Definition: objects.cc:1070
uint64_t double_to_uint64(double d)
Definition: double.h:37
void set_top(Address top)
Definition: spaces.h:2498
static void SetIdentityHash(Handle< JSObject > object, Handle< Smi > hash)
Definition: objects.cc:4781
int NumberOfOwnDescriptors()
Definition: objects.h:6174
static const int kDependentCodeOffset
Definition: objects.h:6438
bool SameValue(Handle< Value > that) const
Definition: api.cc:3009
void CopyTo(int pos, FixedArray *dest, int dest_pos, int len)
Definition: objects.cc:7902
MUST_USE_RESULT MaybeObject * GetPropertyWithDefinedGetter(Object *receiver, JSReceiver *getter)
Definition: objects.cc:529
virtual MUST_USE_RESULT Handle< Object > Get(Handle< Object > receiver, Handle< JSObject > holder, uint32_t key, Handle< FixedArrayBase > backing_store=Handle< FixedArrayBase >::null())=0
static const int kArrayIndexHashMask
Definition: objects.h:8663
static void SetInstancePrototype(Handle< JSFunction > function, Handle< Object > value)
Definition: objects.cc:9702
static HeapObject * cast(Object *obj)
static bool HasLocalElement(Handle< JSReceiver > object, uint32_t index)
Definition: objects-inl.h:6366
MaybeObject * AllocateNewStorageFor(Heap *heap, Representation representation)
Definition: objects-inl.h:279
#define TYPED_ARRAY_SHORT_PRINT(Type, type, TYPE, ctype, size)
static Handle< T > cast(Handle< S > that)
Definition: handles.h:75
bool IsMatch(Object *strings)
Definition: objects.cc:15037
bool MayNamedAccess(JSObject *receiver, Object *key, v8::AccessType type)
Definition: isolate.cc:775
static const int kMaxHashCalcLength
Definition: objects.h:8926
MUST_USE_RESULT MaybeObject * AllocateCodeCache()
Definition: heap.cc:2677
void set_pre_allocated_property_fields(int value)
Definition: objects-inl.h:4004
static CodeCacheHashTable * cast(Object *obj)
static const int kProtoTransitionElementsPerEntry
Definition: objects.h:6133
T Max(T a, T b)
Definition: utils.h:227
Object * AsObject(Heap *heap)
Definition: objects.cc:13623
static AccessorPair * cast(Object *obj)
MUST_USE_RESULT MaybeObject * AddKeysFromJSArray(JSArray *array)
Definition: objects.cc:7843
static void IterateBody(HeapObject *obj, int object_size, ObjectVisitor *v)
void ZapPrototypeTransitions()
Definition: objects.cc:11487
PropertyAttributes property_attributes()
Definition: objects-inl.h:6417
static uint32_t StringSharedHashHelper(String *source, SharedFunctionInfo *shared, StrictMode strict_mode, int scope_position)
Definition: objects.cc:13657
HeapObject * UncheckedPrototypeTransitions()
static ExternalTwoByteString * cast(Object *obj)
bool IsMatch(Object *obj)
Definition: objects.cc:13725
TraversableMap * GetAndResetParent()
Definition: objects.cc:7320
int GetEnumElementKeys(FixedArray *storage)
Definition: objects.cc:13597
void ZapTransitions()
Definition: objects.cc:11474
static bool HasRealElementProperty(Handle< JSObject > object, uint32_t index)
Definition: objects.cc:13259
SeededNumberDictionary * element_dictionary()
Definition: objects-inl.h:6148
static const int kEnumCacheBridgeLength
Definition: objects.h:3493
static Map * cast(Object *obj)
static uint16_t TrailSurrogate(uint32_t char_code)
Definition: unicode.h:134
static MUST_USE_RESULT MaybeObject * Allocate(Isolate *isolate, int deopt_entry_count, PretenureFlag pretenure)
Definition: objects.cc:8168
void FindAllTypes(TypeHandleList *types)
Definition: objects.cc:10503
EnumIndexComparator(NameDictionary *dict)
Definition: objects.cc:15445
MUST_USE_RESULT MaybeObject * Put(String *src, Context *context, Object *value)
Definition: objects.cc:14943
Handle< DeclaredAccessorDescriptor > NewDeclaredAccessorDescriptor()
Definition: factory.cc:675
kSerializedDataOffset Object
Definition: objects-inl.h:5016
static Handle< Object > SetPrototype(Handle< JSObject > object, Handle< Object > value, bool skip_hidden_prototypes=false)
Definition: objects.cc:11703
bool ToInt32(int32_t *value)
Definition: objects.cc:184
static ByteArray * cast(Object *obj)
T & at(int i) const
Definition: list.h:90
void set_type(HeapType *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects.cc:16386
static bool HasRealNamedProperty(Handle< JSObject > object, Handle< Name > key)
Definition: objects.cc:13241
Vector< T > SubVector(int from, int to)
Definition: utils.h:412
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including on console Map counters to a file Enable debugger compile events enable GDBJIT enable GDBJIT interface for all code objects dump only objects containing this substring stress the GC compactor to flush out pretty print source code print source AST function name where to insert a breakpoint print scopes for builtins trace contexts operations print stuff during garbage collection report code statistics after GC report handles after GC trace cache state transitions print interface inference details prints when objects are turned into dictionaries report heap spill statistics along with trace isolate state changes trace regexp bytecode execution Minimal Log all events to the log file Log API events to the log file Log heap samples on garbage collection for the hp2ps tool log positions Log suspect operations Used with turns on browser compatible mode for profiling v8 Specify the name of the log file Enable low level linux profiler Enable perf linux profiler(experimental annotate support).") DEFINE_string(gc_fake_mmap
bool LookupStringIfExists(String *str, String **result)
Definition: objects.cc:14845
Builtins * builtins()
Definition: isolate.h:948
int int32_t
Definition: unicode.cc:47
static AllocationSite * cast(Object *obj)
static Object * RawUninitializedSentinel(Heap *heap)
Definition: objects-inl.h:6691
#define CASE(name)
static FreeSpace * cast(Object *obj)
static const unsigned int kContainsCachedArrayIndexMask
Definition: objects.h:8673
#define INSTANCE_TYPE_TO_ARRAY_TYPE(Type, type, TYPE, ctype, size)
void ClearInstanceofCache()
Definition: heap-inl.h:754
static bool HasRealNamedCallbackProperty(Handle< JSObject > object, Handle< Name > key)
Definition: objects.cc:13283
Bootstrapper * bootstrapper()
Definition: isolate.h:858
void Set(int descriptor_number, Descriptor *desc, const WhitenessWitness &)
Definition: objects-inl.h:2695
bool VerifyBailoutId(BailoutId id)
Definition: objects.cc:10055
MUST_USE_RESULT MaybeObject * Uint32ToString(uint32_t value, bool check_number_string_cache=true)
Definition: heap.cc:3673
bool SameValue(Object *other)
Definition: objects.cc:1102
void Relocate(intptr_t delta)
Definition: objects.cc:10319
static SeqOneByteString * cast(Object *obj)
MUST_USE_RESULT MaybeObject * ToObject(Isolate *isolate)
Definition: objects.cc:91
void set_object_at(int i, Object *object)
Definition: objects-inl.h:4262
WriteBarrierMode GetWriteBarrierMode(const DisallowHeapAllocation &promise)
Definition: objects-inl.h:2350
static Failure * Exception()
Definition: objects-inl.h:1244
MUST_USE_RESULT MaybeObject * PutRegExp(String *src, JSRegExp::Flags flags, FixedArray *value)
Definition: objects.cc:14996
ExternalArrayType type()
Definition: objects.cc:14472
bool MayIndexedAccessWrapper(Handle< JSObject > receiver, uint32_t index, v8::AccessType type)
Definition: isolate.h:747
#define REGISTER(N, C)
const int kMaxInt
Definition: globals.h:248
Object * FindInCodeCache(Name *name, Code::Flags flags)
Definition: objects.cc:7173
static void Clear(Isolate *isolate, Address address, ConstantPoolArray *constant_pool)
Definition: ic.cc:429
static bool enabled()
Definition: serialize.h:485
MUST_USE_RESULT MaybeObject * GetElementsTransitionMapSlow(ElementsKind elements_kind)
Definition: objects.cc:3322
int length()
Definition: api.cc:775
bool marked_for_deoptimization()
Definition: objects-inl.h:4553
void set_map(Map *value)
Definition: objects-inl.h:1341
bool MakeExternal(v8::String::ExternalStringResource *resource)
Definition: objects.cc:1250
int GetLocalElementKeys(FixedArray *storage, PropertyAttributes filter)
Definition: objects.cc:13471
static Handle< Object > GetElementNoExceptionThrown(Isolate *isolate, Handle< Object > object, uint32_t index)
Definition: objects-inl.h:1071
#define INSTANCE_TYPE_TO_ELEMENT_SIZE(Type, type, TYPE, ctype, size)
void DeoptimizeDependentCodeGroup(Isolate *isolate, DependentCode::DependencyGroup group)
Definition: objects.cc:11692
int SourceStatementPosition(Address pc)
Definition: objects.cc:10415
Map * LookupElementsTransitionMap(ElementsKind elements_kind)
Definition: objects.cc:3265
static PropertyAttributes GetPropertyAttributeWithFailedAccessCheck(Handle< JSObject > object, LookupResult *result, Handle< Name > name, bool continue_search)
Definition: objects.cc:616
uint32_t Flags
Definition: objects.h:5184
static const int kSize
Definition: objects.h:9245
bool IsTwoByteEqualTo(Vector< const uc16 > str)
Definition: objects.cc:9018
void CheckArrayAbuse(JSObject *obj, const char *op, uint32_t key, bool allow_appending)
Definition: elements.cc:520
uint16_t SlicedStringGet(int index)
Definition: objects.cc:8629
static Handle< Object > GetPropertyWithCallback(Handle< JSObject > object, Handle< Object > receiver, Handle< Object > structure, Handle< Name > name)
Definition: objects.cc:392
bool Contains(Object *key)
Definition: objects.cc:15686
Object * SetValue(uint32_t index, Object *value)
Definition: objects.cc:14505
RandomNumberGenerator * random_number_generator()
Definition: isolate-inl.h:75
void SetEnumLength(int length)
Definition: objects.h:6189
Handle< Object > Lookup(MapHandleList *maps, Code::Flags flags)
Definition: objects.cc:7700
static Handle< Object > PrepareSlowElementsForSort(Handle< JSObject > object, uint32_t limit)
Definition: objects.cc:14232
static Handle< Object > SetHiddenProperty(Handle< JSObject > object, Handle< Name > key, Handle< Object > value)
Definition: objects.cc:4861
bool IsCacheable(Isolate *isolate)
Definition: objects.cc:8218
#define CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)
Definition: heap-inl.h:671
static const int kDescriptorLengthIndex
Definition: objects.h:3488
List< Handle< Map > > MapHandleList
Definition: list.h:218
void(* IndexedPropertyDeleterCallback)(uint32_t index, const PropertyCallbackInfo< Boolean > &info)
Definition: v8.h:3379
#define ASSERT(condition)
Definition: checks.h:329
void EvictOptimizedCodeMap(SharedFunctionInfo *code_map_holder)
bool CanTransitionToMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
static void AppendCallbackDescriptors(Handle< Map > map, Handle< Object > descriptors)
Definition: objects.cc:3173
bool MarkCodeForDeoptimization(Isolate *isolate, DependentCode::DependencyGroup group)
Definition: objects.cc:11654
static Handle< Object > SetLocalPropertyIgnoreAttributes(Handle< JSObject > object, Handle< Name > key, Handle< Object > value, PropertyAttributes attributes, ValueType value_type=OPTIMAL_REPRESENTATION, StoreMode mode=ALLOW_AS_CONSTANT, ExtensibilityCheck extensibility_check=PERFORM_EXTENSIBILITY_CHECK)
Definition: objects.cc:4141
MUST_USE_RESULT MaybeObject * UnionOfKeys(FixedArray *other)
Definition: objects.cc:7861
static Handle< JSObject > DeepCopy(Handle< JSObject > object, AllocationSiteUsageContext *site_context, DeepCopyHints hints=kNoHints)
Definition: objects.cc:5861
void LocalLookup(Name *name, LookupResult *result, bool search_hidden_prototypes=false)
Definition: objects.cc:5956
static Handle< Object > GetPropertyWithReceiver(Handle< Object > object, Handle< Object > receiver, Handle< Name > name, PropertyAttributes *attributes)
Definition: objects.cc:159
static Script * cast(Object *obj)
MUST_USE_RESULT MaybeObject * PutEval(String *src, Context *context, SharedFunctionInfo *value, int scope_position)
Definition: objects.cc:14967
ExternalArrayType
Definition: v8.h:2113
unsigned short uint16_t
Definition: unicode.cc:46
void(* AccessorSetterCallback)(Local< String > property, Local< Value > value, const PropertyCallbackInfo< void > &info)
Definition: v8.h:2146
static Handle< Map > RawCopy(Handle< Map > map, int instance_size)
Definition: objects.cc:6652
int SourcePosition(Address pc)
Definition: objects.cc:10383
#define VISITOR_SYNCHRONIZATION_TAGS_LIST(V)
Definition: objects.h:10696
static Dictionary< Shape, Key > * cast(Object *obj)
Definition: objects.h:3925
#define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL)
Definition: heap-inl.h:686
MUST_USE_RESULT MaybeObject * EnsureCapacity(int n, Key key)
Definition: objects.cc:15176
static Context * cast(Object *context)
Definition: contexts.h:244
Object * SlowReverseLookup(Object *value)
Definition: objects.cc:6624
const uc16 * GetTwoByteData(unsigned start)
Definition: objects.cc:8330
static const int kSourceIndex
Definition: objects.h:7926
static SharedFunctionInfo * cast(Object *obj)
static Handle< Object > SetAccessor(Handle< JSObject > object, Handle< AccessorInfo > info)
Definition: objects.cc:6482
const uint32_t kStringRepresentationMask
Definition: objects.h:615
int SizeFromMap(Map *map)
Definition: objects-inl.h:3946
TwoCharHashTableKey(uint16_t c1, uint16_t c2, uint32_t seed)
Definition: objects.cc:14788
MUST_USE_RESULT MaybeObject * AllocateTwoByteInternalizedString(Vector< const uc16 > str, uint32_t hash_field)
Definition: heap-inl.h:171
#define ERROR_MESSAGES_LIST(V)
Definition: objects.h:1093
static MUST_USE_RESULT MaybeObject * Allocate(Isolate *isolate, int number_of_descriptors, int slack=0)
Definition: objects.cc:7922
static bool HasHiddenProperties(Handle< JSObject > object)
Definition: objects.cc:4925
#define CHECK(condition)
Definition: checks.h:75
static Handle< Map > CopyForObserved(Handle< Map > map)
Definition: objects.cc:6933
static void MarkCodeAsExecuted(byte *sequence, Isolate *isolate)
Definition: objects.cc:10636
static const int kShortSize
Definition: objects.h:9243
Name * GetSortedKey(int descriptor_number)
Definition: objects-inl.h:2612
Name * GetKey(int transition_number)
static PropertyAttributes GetPropertyAttributeWithHandler(Handle< JSProxy > proxy, Handle< JSReceiver > receiver, Handle< Name > name)
Definition: objects.cc:3702
MUST_USE_RESULT MaybeObject * GetPropertyWithHandler(Object *receiver, Name *name)
Definition: objects.cc:471
void(* IndexedPropertySetterCallback)(uint32_t index, Local< Value > value, const PropertyCallbackInfo< Value > &info)
Definition: v8.h:3359
static Handle< Map > CopyDropDescriptors(Handle< Map > map)
Definition: objects.cc:6712
Handle< Object > GetProperty(Handle< JSReceiver > obj, const char *name)
Definition: handles.cc:196
const bool FLAG_enable_slow_asserts
Definition: checks.h:307
SmartArrayPointer< uc16 > ToWideCString(RobustnessFlag robustness_flag=FAST_STRING_TRAVERSAL)
Definition: objects.cc:8351
int number_of_entries(DependencyGroup group)
Definition: objects-inl.h:4236
const int kFastElementsKindCount
Definition: elements-kind.h:90
#define RETURN_IF_EMPTY_HANDLE(isolate, call)
Definition: isolate.h:151
void LookupCallbackProperty(Name *name, LookupResult *result)
Definition: objects.cc:6016
static FixedTypedArrayBase * cast(Object *obj)
MUST_USE_RESULT MaybeObject * Copy()
Definition: objects.cc:6990
MUST_USE_RESULT MaybeObject * AddToOptimizedCodeMap(Context *native_context, Code *code, FixedArray *literals, BailoutId osr_ast_id)
Definition: objects.cc:9503
Map * FindLastMatchMap(int verbatim, int length, DescriptorArray *descriptors)
Definition: objects.cc:2616
bool ReferencesObject(Object *obj)
Definition: objects.cc:5307
Factory * factory()
Definition: isolate.h:995
bool IsFastElementsKind(ElementsKind kind)
static ExternalAsciiString * cast(Object *obj)
void Lookup(Name *name, LookupResult *result)
Definition: objects.cc:6002
void InvalidateEmbeddedObjects()
Definition: objects.cc:10303
MUST_USE_RESULT MaybeObject * EnsureWritableFastElements()
Definition: objects-inl.h:6124
PropertyAttributes
void set_the_hole(int index)
Definition: objects-inl.h:2413
static Handle< Object > Freeze(Handle< JSObject > object)
Definition: objects.cc:5498
MUST_USE_RESULT MaybeObject * Put(Name *name, Code *code)
Definition: objects.cc:7623
static const int kInvalidStamp
Definition: date.h:63
Object * AsObject(Heap *heap)
Definition: objects.cc:15059
#define CALL_HEAP_FUNCTION_PASS_EXCEPTION(ISOLATE, FUNCTION_CALL)
Definition: heap-inl.h:690
void ConstantPoolIterateBody(ObjectVisitor *v)
Definition: objects.cc:9422
static Code * cast(Object *obj)
bool IsMatch(Object *string)
Definition: objects.cc:13610
static const unsigned kMaxEncodedSize
Definition: unicode.h:163
virtual const uint16_t * data() const =0
void AddDependentCode(DependentCode::DependencyGroup group, Handle< Code > code)
Definition: objects.cc:11505
void AdjustLiveBytes(Address address, int by, InvocationMode mode)
Definition: heap.cc:4034
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
static Symbol * cast(Object *obj)
#define HANDLE_CODE_AGE(AGE)
Handle< JSObject > VisitElementOrProperty(Handle< JSObject > object, Handle< JSObject > value)
Definition: objects.cc:5655
static MUST_USE_RESULT Handle< String > Truncate(Handle< SeqString > string, int new_length)
Definition: objects.cc:9144
Object * GetValue(int descriptor_number)
Definition: objects-inl.h:2646
int ToInt() const
Definition: utils.h:1162
static Handle< Object > GetPropertyPostInterceptor(Handle< JSObject > object, Handle< Object > receiver, Handle< Name > name, PropertyAttributes *attributes)
Definition: objects.cc:13168
void PrintDeoptLocation(FILE *out, int bailout_id)
Definition: objects.cc:10767
AccessorPair * GetLocalElementAccessorPair(uint32_t index)
Definition: objects.cc:11826
static Object ** RawField(HeapObject *obj, int offset)
Definition: objects-inl.h:1199
TransitionArray * unchecked_transition_array()
Definition: objects-inl.h:4945
#define RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, value)
Definition: isolate.h:137
bool MayNamedAccessWrapper(Handle< JSObject > receiver, Handle< Object > key, v8::AccessType type)
Definition: isolate.h:742
static Smi * cast(Object *object)
static const char * AllocationIndexToString(int index)
void AddDependentCompilationInfo(CompilationInfo *info)
Definition: objects.cc:16426
static void IterateBody(HeapObject *obj, ObjectVisitor *v)
Definition: objects-inl.h:6843
void PrintOn(FILE *out)
Definition: objects.cc:9263
void FindAllMaps(MapHandleList *maps)
Definition: objects.cc:10491
void ClearCodeCache(Heap *heap)
Definition: objects-inl.h:6600
static const int kOsrAstIdOffset
Definition: objects.h:6746
static Handle< Object > SetValue(Handle< ExternalFloat64Array > array, uint32_t index, Handle< Object > value)
Definition: objects.cc:14715
static const int kZeroHash
Definition: objects.h:8520
bool Equals(String *other)
Definition: objects-inl.h:2969
static const int kHeaderSize
Definition: objects.h:1891
static Handle< Map > CopyGeneralizeAllRepresentations(Handle< Map > map, int modify_index, StoreMode store_mode, PropertyAttributes attributes, const char *reason)
Definition: objects.cc:2489
static Handle< Map > CopyReplaceDescriptors(Handle< Map > map, Handle< DescriptorArray > descriptors, TransitionFlag flag, Handle< Name > name)
Definition: objects.cc:6811
Handle< Object > CallTrap(const char *name, Handle< Object > derived_trap, int argc, Handle< Object > args[])
Definition: objects.cc:3798
Map * GetMarkerMap(Isolate *isolate)
Definition: objects.cc:1064
kInstanceClassNameOffset flag
Definition: objects-inl.h:5115
static Handle< ObjectHashSet > Add(Handle< ObjectHashSet > table, Handle< Object > key)
Definition: objects.cc:15697
bool IsMatch(Object *string)
Definition: objects.cc:13824
int isnan(double x)
Object * GetInternalField(int index)
Definition: objects-inl.h:1924
MUST_USE_RESULT MaybeObject * AddNumberEntry(uint32_t key, Object *value)
Definition: objects.cc:15323
void set_dictionary_map(bool value)
Definition: objects-inl.h:4125
void Add(Vector< const char > format, Vector< FmtElm > elms)
static void TransitionElementsKind(Handle< JSObject > object, ElementsKind to_kind)
Definition: objects.cc:12779
static const int kSize
Definition: objects.h:10077
void IterateBody(InstanceType type, int object_size, ObjectVisitor *v)
Definition: objects.cc:1760
#define ASSERT_OBJECT_ALIGNED(address)
Definition: spaces.h:102
bool has_instance_call_handler()
Definition: objects-inl.h:4157
static Handle< Object > TryMigrateInstance(Handle< JSObject > instance)
Definition: objects.cc:3866
uint8_t byte
Definition: globals.h:185
Code * GetCodeFromOptimizedCodeMap(int index)
Definition: objects.cc:9569
void DeprecateTransitionTree()
Definition: objects.cc:2526
static bool IsOneByte(const uc16 *chars, int length)
Definition: objects.h:8985
SmartArrayPointer< char > ToCString(AllowNullsFlag allow_nulls, RobustnessFlag robustness_flag, int offset, int length, int *length_output=0)
Definition: objects.cc:8272
uint32_t TranslateAstIdToPcOffset(BailoutId ast_id)
Definition: objects.cc:10619
Object * ValueAt(int entry)
Definition: objects.h:3930
Object * InObjectPropertyAtPut(int index, Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:2002
static void MakeCodeAgeSequenceYoung(byte *sequence, Isolate *isolate)
Definition: objects.cc:10631
static void GeneralizeFieldRepresentation(Handle< JSObject > object, int modify_index, Representation new_representation, StoreMode store_mode)
Definition: objects.cc:2468
MUST_USE_RESULT MaybeObject * AllocateHeapNumber(double value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:2969
void(* AccessorGetterCallback)(Local< String > property, const PropertyCallbackInfo< Value > &info)
Definition: v8.h:2141
const uint16_t * SeqTwoByteStringGetData(unsigned start)
Definition: objects.cc:8373
void CopyFrom(int dst_index, DescriptorArray *src, int src_index, const WhitenessWitness &)
Definition: objects.cc:7960
static const int kMaxCachedPrototypeTransitions
Definition: objects.h:6415
void(* IndexedPropertyGetterCallback)(uint32_t index, const PropertyCallbackInfo< Value > &info)
Definition: v8.h:3350
void set_null(int index)
Definition: objects-inl.h:2404
static Handle< DependentCode > Insert(Handle< DependentCode > entries, DependencyGroup group, Handle< Object > object)
Definition: objects.cc:11540
static PropertyAttributes GetLocalElementAttribute(Handle< JSReceiver > object, uint32_t index)
Definition: objects-inl.h:6376
UnicodeCache * unicode_cache()
Definition: isolate.h:908
void RemoveByIndex(Object *name, Code *code, int index)
Definition: objects.cc:7539
static void DefineAccessor(Handle< JSObject > object, Handle< Name > name, Handle< Object > getter, Handle< Object > setter, PropertyAttributes attributes, v8::AccessControl access_control=v8::DEFAULT)
Definition: objects.cc:6288
static JSFunctionProxy * cast(Object *obj)
static uchar Length(uchar chr, int previous)
Definition: unicode-inl.h:163
V8_INLINE bool IsNull() const
Definition: v8.h:6247
static void NoWriteBarrierSet(FixedArray *array, int index, Object *value)
Definition: objects-inl.h:2384
bool IsMatch(Object *other)
Definition: objects.cc:13642
static const int kTransitionSize
Definition: transitions.h:168
int GetIndex(Name *name, Code::Flags flags)
Definition: objects.cc:7646
void CopyEnumCacheFrom(DescriptorArray *array)
Definition: objects.h:3357
static bool compare(const uint16_t *a, const uint16_t *b, int len)
Definition: objects.cc:8781
static Handle< DeclaredAccessorDescriptor > Create(Isolate *isolate, const DeclaredAccessorDescriptorData &data, Handle< DeclaredAccessorDescriptor > previous)
Definition: objects.cc:15879
static void DeoptimizeGlobalObject(JSObject *object)
Definition: deoptimizer.cc:483
static const int kLiteralNativeContextIndex
Definition: objects.h:7531
virtual size_t length() const =0
void FailurePrint(FILE *out=stdout)
Definition: objects.cc:1157
void Iterate(ObjectVisitor *v)
Definition: objects.cc:1751
#define UNREACHABLE()
Definition: checks.h:52
Object * Lookup(Name *name, Code::Flags flags)
Definition: objects.cc:7615
void RemoveCompilationInfo(DependentCode::DependencyGroup group, CompilationInfo *info)
Definition: objects.cc:11605
bool CanDeoptAt(Address pc)
Definition: objects.cc:10789
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
Handle< JSArrayBuffer > GetBuffer()
Definition: objects.cc:16370
virtual MUST_USE_RESULT MaybeObject * AddElementsToFixedArray(Object *receiver, JSObject *holder, FixedArray *to, FixedArrayBase *from=NULL)=0
static void Update(Handle< PolymorphicCodeCache > cache, MapHandleList *maps, Code::Flags flags, Handle< Code > code)
Definition: objects.cc:7662
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
T * start() const
Definition: utils.h:426
void(* NamedPropertyQueryCallback)(Local< String > property, const PropertyCallbackInfo< Integer > &info)
Definition: v8.h:3323
PolymorphicCodeCacheHashTableKey(MapHandleList *maps, int code_flags)
Definition: objects.cc:7718
static uint32_t ComputeUtf8Hash(Vector< const char > chars, uint32_t seed, int *utf16_length_out)
Definition: objects.cc:9214
v8::internal::Object * get(int index)
Definition: api.cc:780
bool WouldConvertToSlowElements(Handle< Object > key)
Definition: objects.cc:13008
bool IsUtf8EqualTo(Vector< const char > str, bool allow_prefix_match=false)
Definition: objects.cc:8972
static SeededNumberDictionary * cast(Object *obj)
Definition: objects.h:4104
void Append(Descriptor *desc, const WhitenessWitness &)
Definition: objects-inl.h:2723
void(* NamedPropertySetterCallback)(Local< String > property, Local< Value > value, const PropertyCallbackInfo< Value > &info)
Definition: v8.h:3312
virtual const char * NameOfCPURegister(int reg) const
bool MarkAsUndetectable()
Definition: objects.cc:8955
int NumberOfElementsFilterAttributes(PropertyAttributes filter)
Definition: objects.cc:15396
String * hidden_string()
Definition: heap.h:1349
bool ComputeArrayIndex(uint32_t *index)
Definition: objects.cc:9099
IntrusiveMapTransitionIterator(TransitionArray *transition_array)
Definition: objects.cc:7204
const char * Mnemonic() const
Definition: objects.cc:2251
bool IsTransitionElementsKind(ElementsKind kind)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static void PrintElementsTransition(FILE *file, Handle< JSObject > object, ElementsKind from_kind, Handle< FixedArrayBase > from_elements, ElementsKind to_kind, Handle< FixedArrayBase > to_elements)
Definition: objects.cc:1534
bool Contains(Address a)
Definition: spaces.h:2427
static Cell * cast(Object *obj)
Handle< String > NewSubString(Handle< String > str, int begin, int end)
Definition: factory.h:183
static Handle< Map > PutPrototypeTransition(Handle< Map > map, Handle< Object > prototype, Handle< Map > target_map)
Definition: objects.cc:11434
#define MUST_USE_RESULT
Definition: globals.h:381
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Definition: flags.cc:665
bool IsFastPackedElementsKind(ElementsKind kind)
friend class RelocIterator
Definition: objects.h:5688
bool IsSymbol() const
Definition: api.cc:2369
bool Contains(Address a)
Definition: spaces-inl.h:179
void Recompute(DependentCode *entries)
Definition: objects.cc:11518
MUST_USE_RESULT MaybeObject * Put(FixedArray *key, Map *value)
Definition: objects.cc:15074
static MUST_USE_RESULT MaybeObject * GetPropertyOrFail(Handle< Object > object, Handle< Object > receiver, LookupResult *result, Handle< Name > key, PropertyAttributes *attributes)
Definition: objects.cc:842
bool PassesFilter(const char *raw_filter)
Definition: objects.cc:9857
void set_undefined(int index)
Definition: objects-inl.h:2394
static SlicedString * cast(Object *obj)
static void MemCopy(void *dest, const void *src, size_t size)
Definition: platform.h:399
static const int kProtoTransitionMapOffset
Definition: objects.h:6135
int pre_allocated_property_fields()
Definition: objects-inl.h:3933
void set_expected_nof_properties(int value)
void SetNumberOfElements(int nof)
Definition: objects.h:3759
static uint32_t update(uint32_tprevious, boolvalue)
Definition: utils.h:296
MUST_USE_RESULT MaybeObject * AsElementsKind(ElementsKind kind)
Definition: objects.cc:3360
Context * native_context()
Definition: contexts.cc:67
Local< Value > GetPrototype()
Definition: api.cc:3192
static uint32_t MapsHashHelper(MapHandleList *maps, int code_flags)
Definition: objects.cc:7749
void IteratePointers(ObjectVisitor *v, int start, int end)
Definition: objects-inl.h:1385
Object * GetConstant(int descriptor_number)
Definition: objects-inl.h:2670
void MarkForConcurrentOptimization()
Definition: objects.cc:9455
MUST_USE_RESULT MaybeObject * AddEntry(Key key, Object *value, PropertyDetails details, uint32_t hash)
Definition: objects.cc:15257
static bool IsValidElementsTransition(ElementsKind from_kind, ElementsKind to_kind)
Definition: objects.cc:12848
static const uchar kMaxNonSurrogateCharCode
Definition: unicode.h:121
MUST_USE_RESULT MaybeObject * LookupKey(HashTableKey *key, Object **s)
Definition: objects.cc:14873
static const int kNotFound
Definition: transitions.h:145
static Failure * cast(MaybeObject *object)
Definition: objects-inl.h:667
void ClearAllICsByKind(Code::Kind kind)
Definition: heap.cc:483
#define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)
Definition: heap-inl.h:679
void FindAndReplace(const FindAndReplacePattern &pattern)
Definition: objects.cc:10470
void ResetForNewContext(int new_ic_age)
Definition: objects.cc:10133
static Handle< Object > SetValue(Handle< ExternalInt8Array > array, uint32_t index, Handle< Object > value)
Definition: objects.cc:14572
#define ERROR_MESSAGES_TEXTS(C, T)
bool ToArrayIndex(uint32_t *index)
Definition: objects-inl.h:2072
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
ElementsKind GetElementsKind()
Definition: objects-inl.h:5999
byte * instruction_start()
Definition: objects-inl.h:5857
static Handle< Map > GetElementsTransitionMap(Handle< JSObject > object, ElementsKind to_kind)
Definition: objects.cc:3313
static void PrintTop(Isolate *isolate, FILE *file, bool print_args, bool print_line_number)
Definition: frames.cc:804
MUST_USE_RESULT MaybeObject * AllocateFixedArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:5302
const int kPointerSize
Definition: globals.h:268
#define MAKE_STRUCT_CASE(NAME, Name, name)
void check(i::Vector< const uint8_t > string)
static const int kMaxCachedArrayIndexLength
Definition: objects.h:8649
MUST_USE_RESULT MaybeObject * ShareDescriptor(DescriptorArray *descriptors, Descriptor *descriptor)
Definition: objects.cc:6734
static PropertyAttributes GetPropertyAttributePostInterceptor(Handle< JSObject > object, Handle< JSObject > receiver, Handle< Name > name, bool continue_search)
Definition: objects.cc:4261
static const char *const kTags[kNumberOfSyncTags]
Definition: objects.h:10723
MUST_USE_RESULT MaybeObject * CopyUpTo(int enumeration_index)
Definition: objects.h:3454
Handle< Object > NewTypeError(const char *message, Vector< Handle< Object > > args)
Definition: factory.cc:1039
Object * LookupEval(String *src, Context *context, StrictMode strict_mode, int scope_position)
Definition: objects.cc:14920
uint32_t HashForObject(Key key, Object *object)
Definition: objects.h:3646
static Oddball * cast(Object *obj)
static PolymorphicCodeCacheHashTable * cast(Object *obj)
static Address & Address_at(Address addr)
Definition: v8memory.h:79
static uint16_t LeadSurrogate(uint32_t char_code)
Definition: unicode.h:131
unsigned int FastD2UI(double x)
Handle< Object > ToBoolean(bool value)
Definition: factory.cc:2022
static Handle< ObjectHashSet > EnsureCapacity(Handle< ObjectHashSet > table, int n, Handle< Object > key, PretenureFlag pretenure=NOT_TENURED)
Definition: objects.cc:15665
FlatStringReader(Isolate *isolate, Handle< String > str)
Definition: objects.cc:8430
static const int kMaxInstanceSize
Definition: objects.h:2748
static Handle< Object > GetPropertyWithInterceptor(Handle< JSObject > object, Handle< Object > receiver, Handle< Name > name, PropertyAttributes *attributes)
Definition: objects.cc:13205
Object ** RawFieldOfElementAt(int index)
Definition: objects.h:3264
int GetFieldIndex(int descriptor_number)
Definition: objects-inl.h:2664
MUST_USE_RESULT MaybeObject * AsObject(Heap *heap)
Definition: objects.cc:7768
static Handle< JSObject > Copy(Handle< JSObject > object)
Definition: objects.cc:5636
const int kHeapObjectTag
Definition: v8.h:5473
bool Equals(const Representation &other) const
Object * GetPrototype(Isolate *isolate)
Definition: objects.cc:1032
static void UpdateMapCodeCache(Handle< HeapObject > object, Handle< Name > name, Handle< Code > code)
Definition: objects.cc:4552
bool IsJSObjectMap()
Definition: objects.h:6363
static PropertyAttributes GetElementAttributeWithHandler(Handle< JSProxy > proxy, Handle< JSReceiver > receiver, uint32_t index)
Definition: objects.cc:3766
MUST_USE_RESULT MaybeObject * GenerateNewEnumerationIndices()
Definition: objects.cc:15117
#define DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName)
Definition: globals.h:370
void set_inobject_properties(int value)
Definition: objects-inl.h:3998
Object * RawFastPropertyAt(int index)
Definition: objects-inl.h:1964
Handle< Object > GetSourceCode()
Definition: objects.cc:9916
static Handle< ObjectHashSet > Remove(Handle< ObjectHashSet > table, Handle< Object > key)
Definition: objects.cc:15720
static int GetDeoptimizationId(Isolate *isolate, Address addr, BailoutType type)
Definition: deoptimizer.cc:701
int32_t Int32Value() const
Definition: api.cc:2929
static bool Contains(Name *key, AccessorInfo *entry, int valid_descriptors, Handle< DescriptorArray > array)
Definition: objects.cc:3137
MUST_USE_RESULT MaybeObject * CopyAddDescriptor(Descriptor *descriptor, TransitionFlag flag)
Definition: objects.cc:7002
bool IsBoolean() const
Definition: api.cc:2421
bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind, ElementsKind to_kind)
BailoutId TranslatePcOffsetToAstId(uint32_t pc_offset)
Definition: objects.cc:10608
static const int kProtoTransitionNumberOfEntriesOffset
Definition: objects.h:6132
static void TransformToFastProperties(Handle< JSObject > object, int unused_property_fields)
Definition: objects.cc:4663
STATIC_ASSERT(NUMBER_OF_KINDS<=16)
Handle< HeapNumber > NewHeapNumber(double value, PretenureFlag pretenure=NOT_TENURED)
Definition: factory.cc:1022
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static int TimeInDay(int64_t time_ms, int days)
Definition: date.h:88
static MUST_USE_RESULT MaybeObject * Allocate(Heap *heap, int at_least_space_for, PretenureFlag pretenure=NOT_TENURED)
Definition: objects.cc:15091
MUST_USE_RESULT MaybeObject * InternalizeString(String *str)
Definition: heap.cc:6009
uint32_t EntryForProbe(Key key, Object *k, int probe, uint32_t expected)
Definition: objects.cc:13972
static Handle< Object > GetOrCreateIdentityHash(Handle< JSReceiver > object)
Definition: objects-inl.h:6342
static const char * Kind2String(Kind kind)
Definition: objects.cc:10803
void SetCapacity(int capacity)
Definition: objects.h:3769
static Handle< PropertyCell > EnsurePropertyCell(Handle< JSGlobalObject > global, Handle< Name > name)
Definition: objects.cc:14752
bool ShouldConvertToFastDoubleElements(bool *has_smi_only_elements)
Definition: objects.cc:13079
Handle< ByteArray > NewByteArray(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: factory.cc:738
const Register pc
static FunctionTemplateInfo * cast(Object *obj)
static const int kIsNotArrayIndexMask
Definition: objects.h:8638
Vector< const uc16 > ToUC16Vector()
Definition: objects.h:8757
uint32_t FindInsertionEntry(uint32_t hash)
Definition: objects.cc:14103
Handle< FixedArray > NewFixedArray(int size, PretenureFlag pretenure=NOT_TENURED)
Definition: factory.cc:53
virtual bool IsMatch(Object *string)
bool IsFixedTypedArrayElementsKind(ElementsKind kind)
static Handle< Map > Get(Handle< NormalizedMapCache > cache, Handle< JSObject > object, PropertyNormalizationMode mode)
Definition: objects.cc:4498
bool IsOneByteRepresentationUnderneath()
Definition: objects-inl.h:333
bool DictionaryElementsInPrototypeChainOnly()
Definition: objects.cc:6197
static const int kDescriptorSize
Definition: objects.h:3509
SafepointEntry GetSafepointEntry(Address pc)
Definition: objects.cc:10435
int length() const
Definition: utils.h:420
OldSpace * old_pointer_space()
Definition: heap.h:638
static const int kPropertiesOffset
Definition: objects.h:2755
static void EnsureDescriptorSlack(Handle< Map > map, int slack)
Definition: objects.cc:3083
Handle< Foreign > object_wrapper()
Definition: compiler.h:306
Map * InternalizedStringMapForString(String *str)
Definition: heap.cc:4959
static bool Equals(State *state_1, State *state_2, unsigned to_check)
Definition: objects.cc:8862
#define ASSERT_LE(v1, v2)
Definition: checks.h:334
Object * Lookup(Object *key)
Definition: objects.cc:15823
static FixedDoubleArray * cast(Object *obj)
PretenureFlag GetPretenureMode()
Definition: objects.cc:12650
static const char * State2String(State state)
Definition: full-codegen.h:112
static PropertyAttributes GetPropertyAttributeWithReceiver(Handle< JSReceiver > object, Handle< JSReceiver > receiver, Handle< Name > name)
Definition: objects.cc:4327
int GetIndex(Object *name, Code *code)
Definition: objects.cc:7523
bool IsTwoByteRepresentation()
Definition: objects-inl.h:327
static const int kSize
Definition: objects.h:6440
static Code * GetCodeFromTargetAddress(Address address)
Definition: objects-inl.h:4662
bool is_inline_cache_stub()
Definition: objects-inl.h:4568
static Handle< Object > SetOwnElement(Handle< JSObject > object, uint32_t index, Handle< Object > value, StrictMode strict_mode)
Definition: objects.cc:12401
bool IsFastSmiElementsKind(ElementsKind kind)
StringsKey(FixedArray *strings)
Definition: objects.cc:15035
bool has_pending_exception()
Definition: isolate.h:587
void VisitOneByteString(const uint8_t *chars, unsigned length)
Definition: objects.cc:9065
bool IsMoreGeneralThan(int verbatim, int valid, int new_size, DescriptorArray *other)
Definition: objects.cc:8071
static Handle< DescriptorArray > CopyUpToAddAttributes(Handle< DescriptorArray > desc, int enumeration_index, PropertyAttributes attributes)
Definition: objects.cc:7061
static void DigestTransitionFeedback(Handle< AllocationSite > site, ElementsKind to_kind)
Definition: objects.cc:12671
FixedArray * GetEnumIndicesCache()
Definition: objects.h:3375
void set_kind(byte kind)
Definition: objects-inl.h:1827
static ElementsAccessor * ForArray(FixedArrayBase *array)
Definition: elements.cc:1856
static void UpdateCodeCache(Handle< Map > map, Handle< Name > name, Handle< Code > code)
Definition: objects.cc:7149
static const int kHeaderSize
Definition: objects.h:9042
static int GetOutputInfo(DeoptimizationOutputData *data, BailoutId node_id, SharedFunctionInfo *shared)
Definition: deoptimizer.cc:718
void NotifyLeafMapLayoutChange()
Definition: objects-inl.h:4221
double FastUI2D(unsigned x)
Definition: conversions.h:89
static const int kCachedCodeOffset
Definition: objects.h:6744
void CopyFrom(const CodeDesc &desc)
Definition: objects.cc:10327
bool BooleanValue() const
Definition: api.cc:2824
InterceptorInfo * GetNamedInterceptor()
Definition: objects.cc:13148
void SortPairs(FixedArray *numbers, uint32_t len)
Definition: objects.cc:13389
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
Definition: heap.cc:6887
MUST_USE_RESULT MaybeObject * Shrink(Key key)
Definition: objects.cc:15206
int CompareChars(const lchar *lhs, const rchar *rhs, int chars)
Definition: utils.h:852
PropertyDetails GetDetails(int descriptor_number)
Definition: objects-inl.h:2652
static PropertyCell * cast(Object *obj)
int TenToThe(int exponent)
Definition: utils.h:880
Object ** GetFirstElementAddress()
Definition: objects-inl.h:1425
bool Is(Object *obj)
TraversableMap * ChildIteratorNext()
Definition: objects.cc:7340
static unsigned Encode(char *out, uchar c, int previous, bool replace_invalid=false)
Definition: unicode-inl.h:114
void SetNumberOfDeletedElements(int nod)
Definition: objects.h:3764
unsigned back_edge_table_offset()
Definition: objects-inl.h:4501
Failure * Throw(Object *exception, MessageLocation *location=NULL)
Definition: isolate.cc:923
MaybeObject *(* getter)(Isolate *isolate, Object *object, void *data)
Definition: v8globals.h:351
DeclaredAccessorDescriptorIterator(DeclaredAccessorDescriptor *descriptor)
Definition: objects.cc:15858
InlineCacheState ic_state()
Definition: objects-inl.h:4308
byte * relocation_start()
Definition: objects-inl.h:5877
static const int kArrayIndexValueBits
Definition: objects.h:8654
#define STATIC_ASCII_VECTOR(x)
Definition: utils.h:570
void Reset(String *string, unsigned offset=0)
Definition: objects-inl.h:3403
V8_INLINE bool IsUndefined() const
Definition: v8.h:6229
Assembler * origin
Definition: v8globals.h:242
void set_construction_count(int value)
Definition: objects-inl.h:5224
uint32_t pc_offset(uint32_t index)
Definition: full-codegen.h:901
double get_scalar(int index)
Definition: objects-inl.h:2173
static AllocationMemento * cast(Object *obj)
static v8::internal::Handle< To > OpenHandle(v8::Local< From > handle)
Definition: api.h:308
Object * Lookup(Object *key)
Definition: objects.cc:15761
static void Fix(Handle< JSProxy > proxy)
Definition: objects.cc:3776
static uint32_t MakeArrayIndexHash(uint32_t value, int length)
Definition: objects.cc:9183
V8_INLINE Handle< Primitive > Undefined(Isolate *isolate)
Definition: v8.h:6541
Handle< Map > FindTransitionedMap(MapHandleList *candidates)
Definition: objects.cc:3211
uint16_t ConsStringGet(int index)
Definition: objects.cc:8598
static Handle< Object > GetOrCreateHash(Handle< Object > object, Isolate *isolate)
Definition: objects.cc:1091
void set_map_no_write_barrier(Map *value)
Definition: objects-inl.h:1352
bool LookupTwoCharsStringIfExists(uint16_t c1, uint16_t c2, String **result)
Definition: objects.cc:14858
static void DoGenerateNewEnumerationIndices(Handle< NameDictionary > dictionary)
Definition: objects.cc:15110
static BailoutId None()
Definition: utils.h:1164
Vector< const char > CStrVector(const char *data)
Definition: utils.h:574
void(* IndexedPropertyQueryCallback)(uint32_t index, const PropertyCallbackInfo< Integer > &info)
Definition: v8.h:3369
MUST_USE_RESULT MaybeObject * GetProperty(Name *key)
Definition: objects-inl.h:1081
int Weekday(int days)
Definition: date.h:95
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
bool EquivalentToForTransition(Map *other)
Definition: objects.cc:9409
Object * SlowReverseLookup(Object *value)
Definition: objects.cc:15505
#define DECLARE_TAG(ignore1, name, ignore2)
Definition: objects.cc:10226
static JSArray * cast(Object *obj)
void HeapSortPairs(FixedArray *content, FixedArray *numbers, int len)
Definition: objects.cc:13342
#define T(name, string, precedence)
Definition: token.cc:48
static int SizeFor(int length)
Definition: objects.h:3067
static Object * GetField(Object *date, Smi *index)
Definition: objects.cc:16167
bool IsTerminalElementsKind(ElementsKind kind)
MUST_USE_RESULT MaybeObject * EnsureCapacity(int n, HashTableKey *key, PretenureFlag pretenure=NOT_TENURED)
static Handle< Object > SetElement(Handle< JSReceiver > object, uint32_t index, Handle< Object > value, PropertyAttributes attributes, StrictMode strict_mode)
Definition: objects.cc:12387
Map * FindUpdatedMap(int verbatim, int length, DescriptorArray *descriptors)
Definition: objects.cc:2582
static const int kProtoTransitionHeaderSize
Definition: objects.h:6131
StringSharedKey(String *source, SharedFunctionInfo *shared, StrictMode strict_mode, int scope_position)
Definition: objects.cc:13633
#define ASSERT_LT(v1, v2)
Definition: checks.h:333
static uchar ValueOf(const byte *str, unsigned length, unsigned *cursor)
Definition: unicode-inl.h:152
Context * context()
Definition: isolate.h:557
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static JSDate * cast(Object *obj)
Definition: objects-inl.h:5776
bool Equals(unsigned length, String *string_1, String *string_2)
Definition: objects.cc:8868
void SetNextEnumerationIndex(int index)
Definition: objects.h:3979
V8_INLINE bool IsString() const
Definition: v8.h:6265
static ElementsAccessor * ForKind(ElementsKind elements_kind)
Definition: elements.h:178
static Handle< Object > SetFastElement(Handle< JSObject > object, uint32_t index, Handle< Object > value, StrictMode strict_mode, bool check_prototype)
Definition: objects.cc:12018
static SeqTwoByteString * cast(Object *obj)
Representation generalize(Representation other)
void GetLocalPropertyNames(FixedArray *storage, int index, PropertyAttributes filter=NONE)
Definition: objects.cc:13431
#define RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, T)
Definition: isolate.h:128
void LookupRealNamedProperty(Name *name, LookupResult *result)
Definition: objects.cc:3419
bool HasTransitionArray()
Definition: objects-inl.h:4807
const int kElementsKindCount
Definition: elements-kind.h:89
Definition: v8.h:2107
bool IsTransitionableFastElementsKind(ElementsKind from_kind)
static const int kHeaderSize
Definition: objects.h:3016
PropertyNormalizationMode
Definition: objects.h:252
void Lookup(Name *name, LookupResult *result)
Definition: objects.cc:135
FlatContent GetFlatContent()
Definition: objects.cc:8229
void set(int index, double value)
Definition: objects-inl.h:2207
static const int kMaxShortPrintLength
Definition: objects.h:8929
Handle< JSObject > StructureWalk(Handle< JSObject > object)
Definition: objects.cc:5676
static int SNPrintF(Vector< char > str, const char *format,...)
bool IsKey(Object *k)
Definition: objects.h:3701
Handle< NameDictionary > NameDictionaryShrink(Handle< NameDictionary > dict, Handle< Name > name)
Definition: objects.cc:769
MUST_USE_RESULT MaybeObject * NumberFromDouble(double value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3819
static const int kInitialLength
Definition: objects.h:6748
void PrintElementsKind(FILE *out, ElementsKind kind)
SimpleTransitionFlag
Definition: objects.h:282
static void OptimizeAsPrototype(Handle< JSObject > object)
Definition: objects.cc:9649
void Rehash(Key key)
Definition: objects.cc:14007
static const int kArrayIndexHashLengthShift
Definition: objects.h:8660
uint32_t DoubleToUint32(double x)
Definition: conversions.h:106
static void Initialize(Handle< JSArray > array, int capacity, int length=0)
Definition: objects.cc:11244
bool AsArrayIndex(uint32_t *index)
Definition: objects-inl.h:6267
static void MigrateInstance(Handle< JSObject > instance)
Definition: objects.cc:3852
static const int kMapOffset
Definition: objects.h:1890
void PrintName(FILE *out=stdout)
Definition: objects.cc:9839
bool ShouldConvertToSlowElements(int new_capacity)
Definition: objects.cc:13023
int NumberOfDescribedProperties(DescriptorFlag which=OWN_DESCRIPTORS, PropertyAttributes filter=NONE)
Definition: objects.cc:5913
static Handle< Object > SetValue(Handle< ExternalUint16Array > array, uint32_t index, Handle< Object > value)
Definition: objects.cc:14621
bool HasPrototypeTransitions()
Definition: objects-inl.h:4882
static Handle< Object > SetPropertyWithDefinedSetter(Handle< JSReceiver > object, Handle< JSReceiver > setter, Handle< Object > value)
Definition: objects.cc:2958
bool FindHandlers(CodeHandleList *code_list, int length=-1)
Definition: objects.cc:10531
int32_t DoubleToInt32(double x)
bool is_the_hole(int index)
Definition: objects-inl.h:2133
void IteratePrefix(ObjectVisitor *visitor)
Definition: objects.cc:13859
static const int kEntriesStart
Definition: objects.h:6742
static HeapNumber * cast(Object *obj)
bool CanHaveMoreTransitions()
Definition: objects-inl.h:4819
static PropertyAttributes GetElementAttributeWithReceiver(Handle< JSObject > object, Handle< JSReceiver > receiver, uint32_t index, bool continue_search)
Definition: objects.cc:4398
static void EnqueueChangeRecord(Handle< JSObject > object, const char *type, Handle< Name > name, Handle< Object > old_value)
Definition: objects.cc:2184
static void WriteToFlat(String *source, sinkchar *sink, int from, int to)
Definition: objects.cc:8635
static MUST_USE_RESULT MaybeObject * Allocate(Heap *heap, int at_least_space_for, MinimumCapacity capacity_option=USE_DEFAULT_MINIMUM_CAPACITY, PretenureFlag pretenure=NOT_TENURED)
NameDictionary * property_dictionary()
Definition: objects-inl.h:6142
static Handle< Object > SetElementsLength(Handle< JSArray > array, Handle< Object > length)
Definition: objects.cc:11330
static uint32_t HashSequentialString(const schar *chars, int length, uint32_t seed)
Definition: objects-inl.h:6258
void InitializeRepresentations(Representation representation)
Definition: objects-inl.h:2632
void set_value(double value)
Definition: objects-inl.h:1406
const char * GetBailoutReason(BailoutReason reason)
Definition: objects.cc:16437
static void MigrateToMap(Handle< JSObject > object, Handle< Map > new_map)
Definition: objects.cc:2364
virtual size_t length() const =0
Handle< String > InternalizeOneByteString(Vector< const uint8_t > str)
Definition: factory.cc:232
static double nan_value()
virtual void SetCapacityAndLength(Handle< JSArray > array, int capacity, int length)=0
MUST_USE_RESULT MaybeObject * CopySize(int new_length, PretenureFlag pretenure=NOT_TENURED)
Definition: objects.cc:7879
static Handle< Object > GetElementWithInterceptor(Handle< JSObject > object, Handle< Object > receiver, uint32_t index)
Definition: objects.cc:12887
bool is_compare_ic_stub()
Definition: objects.h:5307
Address NewSpaceTop()
Definition: heap.h:635
const char * ElementsKindToString(ElementsKind kind)
static ObjectHashTable * cast(Object *obj)
Definition: objects.h:4226
static Handle< Object > Call(Isolate *isolate, Handle< Object > callable, Handle< Object > receiver, int argc, Handle< Object > argv[], bool *pending_exception, bool convert_receiver=false)
Definition: execution.cc:153
void TrimOptimizedCodeMap(int shrink_by)
Definition: objects.cc:9637
MUST_USE_RESULT MaybeObject * AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field)
static Handle< Object > SetProperty(Handle< JSReceiver > object, Handle< Name > key, Handle< Object > value, PropertyAttributes attributes, StrictMode strict_mode, StoreFromKeyed store_mode=MAY_BE_STORE_FROM_KEYED)
Definition: objects.cc:2858
bool is_null() const
Definition: handles.h:81
void set_code_no_write_barrier(Code *code)
Definition: objects-inl.h:5538
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:103
void set_bailout_reason(BailoutReason reason)
Definition: objects.h:7063
V8_INLINE bool IsEmpty() const
Definition: v8.h:248
#define STRUCT_LIST(V)
Definition: objects.h:590
static void SetNormalizedProperty(Handle< JSObject > object, const LookupResult *result, Handle< Object > value)
Definition: objects.cc:691
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
AccessorDescriptor * GetCallbacks(int descriptor_number)
Definition: objects-inl.h:2681
void set_bit_field3(uint32_t bits)
Definition: objects-inl.h:4754
bool IsNumber() const
Definition: api.cc:2416
InterceptorInfo * GetIndexedInterceptor()
Definition: objects.cc:13158
#define CODE_KIND_LIST(V)
Definition: objects.h:5204
bool IsMapInArrayPrototypeChain()
Definition: objects.cc:3272
MUST_USE_RESULT MaybeObject * CopyWithPreallocatedFieldDescriptors()
Definition: objects.cc:6965
#define IS_POWER_OF_TWO(x)
Definition: utils.h:46
Map * GetTarget(int transition_number)
PropertyType GetType(int descriptor_number)
Definition: objects-inl.h:2659
void YearMonthDayFromDays(int days, int *year, int *month, int *day)
Definition: date.cc:77
static Object * cast(Object *value)
Definition: objects.h:1641
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:280
IncrementalMarking * incremental_marking()
Definition: heap.h:1781
bool Contains(Address addr)
Definition: heap.cc:5929
MUST_USE_RESULT MaybeObject * Put(Object *key, Object *value)
Definition: objects.cc:15831
static void AllocateStorageForMap(Handle< JSObject > object, Handle< Map > map)
Definition: objects.cc:3831
ElementsKind GetInitialFastElementsKind()
void BecomeJSObject(Handle< JSReceiver > object)
Definition: factory.cc:1553
#define UNIMPLEMENTED()
Definition: checks.h:50
uint16_t uc16
Definition: globals.h:309
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedArray(int length)
Definition: heap.cc:5308
static const int kEnumCacheIndex
Definition: objects.h:3489
void SetNumberOfDescriptors(int number_of_descriptors)
Definition: objects-inl.h:2440
Object * GetBackPointer()
Definition: objects-inl.h:4791
virtual MaybeObject * AsObject(Heap *heap)
Definition: objects.cc:13761
void HeapObjectShortPrint(StringStream *accumulator)
Definition: objects.cc:1621
void HeapNumberPrint(FILE *out=stdout)
Definition: objects.cc:1905
PropertyDetails DetailsAt(int entry)
Definition: objects.h:3940
Object * DeleteProperty(int entry, JSObject::DeleteMode mode)
Definition: objects.cc:15191
uint32_t ComputeLongHash(uint64_t key)
Definition: utils.h:335
FixedArray * GetLiteralsFromOptimizedCodeMap(int index)
Definition: objects.cc:9557
void set_bit_field(byte value)
Definition: objects-inl.h:4037
static const int kPrologueOffsetNotSet
Definition: objects.h:5227
void CopyEnumKeysTo(FixedArray *storage)
Definition: objects.cc:15455
static int SizeFor(int length)
Definition: objects.h:9118
void(* NamedPropertyGetterCallback)(Local< String > property, const PropertyCallbackInfo< Value > &info)
Definition: v8.h:3303
static Handle< DescriptorArray > Merge(Handle< DescriptorArray > desc, int verbatim, int valid, int new_size, int modify_index, StoreMode store_mode, Handle< DescriptorArray > other)
Definition: objects.cc:7971
Handle< String > InternalizeUtf8String(Vector< const char > str)
Definition: factory.cc:218
virtual const char * data() const =0
static JSReceiver * cast(Object *obj)
void LocalLookupRealNamedProperty(Name *name, LookupResult *result)
Definition: objects.cc:3371
void PrintInstanceMigration(FILE *file, Map *original_map, Map *new_map)
Definition: objects.cc:1592
static Handle< Object > SetValue(Handle< ExternalUint8Array > array, uint32_t index, Handle< Object > value)
Definition: objects.cc:14587
ElementsKind GetNextMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
static JSValue * cast(Object *obj)
Definition: objects-inl.h:5758
void CopyBytes(uint8_t *target, uint8_t *source)
Definition: runtime.cc:1309
bool fits_into(const Representation &other) const
static Handle< HeapType > UpdatedType(Handle< PropertyCell > cell, Handle< Object > value)
Definition: objects.cc:16392
bool ShouldConvertToFastElements()
Definition: objects.cc:13043
static const int kFieldsAdded
Definition: objects.h:2752
void ShortPrint(FILE *out=stdout)
Definition: objects.cc:1123
static void JSArrayUpdateLengthFromIndex(Handle< JSArray > array, uint32_t index, Handle< Object > value)
Definition: objects.cc:12861
static Handle< Object > SetValue(Handle< ExternalInt32Array > array, uint32_t index, Handle< Object > value)
Definition: objects.cc:14638
static void Visit(String *string, unsigned offset, Visitor &visitor, ConsOp &cons_op, int32_t type, unsigned length)
Definition: objects-inl.h:3048
static Handle< SeededNumberDictionary > NormalizeElements(Handle< JSObject > object)
Definition: objects.cc:4704
static Handle< Object > SetValue(Handle< ExternalInt16Array > array, uint32_t index, Handle< Object > value)
Definition: objects.cc:14604
bool is_observed()
Definition: objects.h:5930
friend class Object
Definition: objects.h:2775
#define TYPED_ARRAYS(V)
Definition: objects.h:4663
uint32_t HashForObject(Object *obj)
Definition: objects.cc:7588
static Handle< T > null()
Definition: handles.h:80
CodeCacheHashTableKey(Name *name, Code *code)
Definition: objects.cc:7567
void MemsetPointer(T **dest, U *value, int counter)
Definition: v8utils.h:198
MUST_USE_RESULT MaybeObject * CopyAsElementsKind(ElementsKind kind, TransitionFlag flag)
Definition: objects.cc:6881
Map * FindFirstMap()
Definition: objects.cc:10464
static const int kProtoTransitionPrototypeOffset
Definition: objects.h:6134
bool IsFalse() const
Definition: api.cc:2352
StringComparator(ConsStringIteratorOp *op_1, ConsStringIteratorOp *op_2)
Definition: objects.cc:8855
static Handle< Map > GetPrototypeTransition(Handle< Map > map, Handle< Object > prototype)
Definition: objects.cc:11416
static bool Contains(Name *key, AccessorInfo *entry, int valid_descriptors, Handle< FixedArray > array)
Definition: objects.cc:3155
static const int kInitialSize
Definition: objects.h:8131
static void Expand(Handle< JSArray > array, int minimum_size_of_backing_fixed_array)
Definition: objects.cc:11251
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
bool is_dictionary_map()
Definition: objects-inl.h:4132
void SetInstanceClassName(String *name)
Definition: objects.cc:9834
Object * extension()
Definition: contexts.h:378
void set_owns_descriptors(bool is_shared)
Definition: objects-inl.h:4142
static PropertyAttributes GetLocalPropertyAttribute(Handle< JSReceiver > object, Handle< Name > name)
Definition: objects.cc:4384
InstanceType instance_type()
Definition: objects-inl.h:4012
static JSProxy * cast(Object *obj)
void SetParent(TraversableMap *parent)
Definition: objects.cc:7317
Handle< String > Uint32ToString(uint32_t value)
Definition: factory.cc:1625
static bool ShouldZapGarbage()
Definition: heap.h:1486
static HeapObject * FromAddress(Address address)
Definition: objects-inl.h:1369
Object * AsObject(Heap *heap)
Definition: objects.cc:13733
Object * Lookup(String *src, Context *context)
Definition: objects.cc:14908
static Handle< Object > SetPropertyForResult(Handle< JSObject > object, LookupResult *result, Handle< Name > name, Handle< Object > value, PropertyAttributes attributes, StrictMode strict_mode, StoreFromKeyed store_mode=MAY_BE_STORE_FROM_KEYED)
Definition: objects.cc:4004
void USE(T)
Definition: globals.h:341
static Handle< ObjectHashSet > Shrink(Handle< ObjectHashSet > table, Handle< Object > key)
Definition: objects.cc:15677
const uint32_t kOneByteStringTag
Definition: objects.h:611
Counters * counters()
Definition: isolate.h:859
void Swap(uint32_t entry1, uint32_t entry2, WriteBarrierMode mode)
Definition: objects.cc:13988
MUST_USE_RESULT MaybeObject * AllocateFixedArray(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:5297
void SmiPrint(FILE *out=stdout)
Definition: objects.cc:1142
static Handle< Object > SetValue(Handle< ExternalUint32Array > array, uint32_t index, Handle< Object > value)
Definition: objects.cc:14653
void EnableDeoptimizationSupport(Code *recompiled)
Definition: objects.cc:10009
#define ASSERT_NE(v1, v2)
Definition: checks.h:331
JSObjectWalkVisitor(ContextObject *site_context, bool copying, JSObject::DeepCopyHints hints)
Definition: objects.cc:5646
bool IsTemplateFor(Object *object)
Definition: objects.cc:219
ZoneList< Handle< HeapObject > > * dependencies(DependentCode::DependencyGroup group)
Definition: compiler.h:260
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
Definition: assert-scope.h:214
static AccessorInfo * cast(Object *obj)
static FixedArray * cast(Object *obj)
static const unsigned kMaxOneByteChar
Definition: unicode.h:164
MUST_USE_RESULT MaybeObject * InternalizeUtf8String(const char *str)
Definition: heap.h:1222
static void SetFastDoubleElementsCapacityAndLength(Handle< JSObject > object, int capacity, int length)
Definition: objects.cc:11205
static const int kHeaderSize
Definition: objects.h:2757
static bool SetupArrayBufferAllocatingData(Isolate *isolate, Handle< JSArrayBuffer > array_buffer, size_t allocated_length, bool initialize=true)
Definition: runtime.cc:793
static uint32_t Hash(String *string, uint32_t seed)
Definition: objects.cc:9035
static const int kEnumCacheBridgeCacheIndex
Definition: objects.h:3494
void Print(const v8::FunctionCallbackInfo< v8::Value > &args)
bool IsCompatibleReceiver(Object *receiver)
Definition: objects-inl.h:6427
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset kInstanceCallHandlerOffset kInternalFieldCountOffset dependent_code
Definition: objects-inl.h:5047
Object * AsObject(Heap *heap)
Definition: objects.cc:14831
static Handle< TransitionArray > AddTransition(Handle< Map > map, Handle< Name > key, Handle< Map > target, SimpleTransitionFlag flag)
Definition: objects.cc:2458
static Handle< Map > CopyNormalized(Handle< Map > map, PropertyNormalizationMode mode, NormalizedMapSharingMode sharing)
Definition: objects.cc:6684
static HashTable * cast(Object *obj)
Definition: objects-inl.h:2929
ElementsKind elements_kind()
Definition: objects.h:5945
MUST_USE_RESULT MaybeObject * AtNumberPut(uint32_t key, Object *value)
Definition: objects.cc:15336
void set_is_shared(bool value)
Definition: objects-inl.h:4116
static const int kEntryLength
Definition: objects.h:6747
static Handle< Object > PreventExtensions(Handle< JSObject > object)
Definition: objects.cc:5420
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:39
static TypeHandle Constant(i::Handle< i::Object > value, Region *region)
Definition: types.h:235
FixedArray * GetEnumCache()
Definition: objects.h:3361
static Handle< ObjectHashTable > Put(Handle< ObjectHashTable > table, Handle< Object > key, Handle< Object > value)
Definition: objects.cc:15775
bool IsOneByteRepresentation()
Definition: objects-inl.h:321
int ExtraICState
Definition: objects.h:310
double FastI2D(int x)
Definition: conversions.h:81
static Handle< Object > DeleteElement(Handle< JSReceiver > object, uint32_t index, DeleteMode mode=NORMAL_DELETION)
Definition: objects.cc:5262
static void NormalizeProperties(Handle< JSObject > object, PropertyNormalizationMode mode, int expected_additional_properties)
Definition: objects.cc:4560
const uint32_t kMaxUInt32
Definition: globals.h:259
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:272
void SetEntry(int entry, Object *key, Object *value)
Definition: objects-inl.h:6465
static const int kFlagsIndex
Definition: objects.h:7927
Object * GetCallbacksObject(int descriptor_number)
Definition: objects-inl.h:2675
static const uint32_t kHashBitMask
Definition: objects.h:8646
static const int kNextMapIndex
Definition: objects.h:6741
Object * get(int index)
Definition: objects-inl.h:2127
static VisitorId GetVisitorId(int instance_type, int instance_size)
bool IsFastHoleyElementsKind(ElementsKind kind)
static Handle< AccessorPair > Copy(Handle< AccessorPair > pair)
Definition: objects.cc:8154
#define BUILTIN(name)
Definition: builtins.cc:143
static MUST_USE_RESULT Handle< SeededNumberDictionary > Set(Handle< SeededNumberDictionary > dictionary, uint32_t index, Handle< Object > value, PropertyDetails details)
Definition: objects.cc:15342
static Handle< Object > GetElement(Isolate *isolate, Handle< Object > object, uint32_t index)
Definition: objects-inl.h:1060
void IterateElements(ObjectVisitor *visitor)
Definition: objects.cc:13865
Name * FindFirstName()
Definition: objects.cc:10550
ContextObject * site_context()
Definition: objects.cc:5663
CodeCacheHashTableKey(Name *name, Code::Flags flags)
Definition: objects.cc:7564
ElementsAccessor * GetElementsAccessor()
Definition: objects-inl.h:6027
void OutputToFile(FILE *out)
static const int kAttachedToSharedFunctionInfo
Definition: objects.h:6479
void DeprecateTarget(Name *key, DescriptorArray *new_descriptors)
Definition: objects.cc:2544
String * TryFlattenGetString(PretenureFlag pretenure=NOT_TENURED)
Definition: objects-inl.h:2986
bool HasFastArgumentsElements()
Definition: objects.cc:11991
HeapObject * obj
static WeakHashTable * cast(Object *obj)
Definition: objects.h:4282
void set_bit_field2(byte value)
Definition: objects-inl.h:4047
void MakeOlder(MarkingParity)
Definition: objects.cc:10654
void CreateFillerObjectAt(Address addr, int size)
Definition: heap.cc:4005
void SwapPairs(FixedArray *numbers, int i, int j)
Definition: objects.cc:13315
void set_marked_for_deoptimization(bool flag)
Definition: objects-inl.h:4560
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric literals(0o77, 0b11)") DEFINE_bool(harmony_strings
static const int kHashShift
Definition: objects.h:8642
#define RUNTIME_ENTRY(name, nargs, ressize)
int NumberOfFields()
Definition: objects.cc:2479
static Name * cast(Object *obj)
bool has_debug_break_slots()
Definition: objects-inl.h:4412
static int LengthOfFixedArray(int deopt_points)
Definition: objects.h:5155
bool InstancesNeedRewriting(Map *target, int target_number_of_fields, int target_inobject, int target_unused)
Definition: objects.cc:2313
static const int kMaxCapacity
Definition: objects.h:3735
int SearchOptimizedCodeMap(Context *native_context, BailoutId osr_ast_id)
Definition: objects.cc:10193
Local< Boolean > ToBoolean() const
Definition: api.cc:2587
static uint32_t RegExpHash(String *string, Smi *flags)
Definition: objects.cc:13746
static Representation None()
JSFunction * closure()
Definition: contexts.h:367
int NumberOfLocalProperties(PropertyAttributes filter=NONE)
Definition: objects.cc:13301
int GetSortedKeyIndex(int descriptor_number)
Definition: objects-inl.h:2607
static int LengthFor(int number_of_descriptors)
Definition: objects.h:3529
#define CODE_AGE_LIST(V)
Definition: builtins.h:50
MUST_USE_RESULT MaybeObject * AtPut(Key key, Object *value)
Definition: objects.cc:15212
Handle< Object > NewError(const char *maker, const char *message, Handle< JSArray > args)
Definition: factory.cc:1153
static Handle< Map > CurrentMapForDeprecatedInternal(Handle< Map > map)
Definition: objects.cc:2797
Vector< Handle< Object > > HandleVector(v8::internal::Handle< T > *elms, int length)
Definition: v8utils.h:118
const int kMaxKeyedPolymorphism
Definition: ic.h:37
static Flags RemoveTypeFromFlags(Flags flags)
Definition: objects-inl.h:4656
static Representation Tagged()
static int DaysFromTime(int64_t time_ms)
Definition: date.h:80
AccessorPair * GetLocalPropertyAccessorPair(Name *name)
Definition: objects.cc:11809
bool IsInt32() const
Definition: api.cc:2431
void JSFunctionIterateBody(int object_size, ObjectVisitor *v)
Definition: objects.cc:9434
Object ** GetKeySlot(int transition_number)
void CopyChars(sinkchar *dest, const sourcechar *src, int chars)
Definition: v8utils.h:279
MUST_USE_RESULT MaybeObject * set_elements_transition_map(Map *transitioned_map)
Definition: objects-inl.h:4845
Map * FindRootMap()
Definition: objects.cc:2571
uint32_t hash_field()
Definition: objects-inl.h:2941
static Handle< Object > DeleteProperty(Handle< JSReceiver > object, Handle< Name > name, DeleteMode mode=NORMAL_DELETION)
Definition: objects.cc:5273
static void DeoptimizeMarkedCode(Isolate *isolate)
Definition: deoptimizer.cc:467
void UpdateToFinishedCode(DependencyGroup group, CompilationInfo *info, Code *code)
Definition: objects.cc:11581
static const int kConstructStubOffset
Definition: objects.h:7106
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit kIsExpressionBit allows_lazy_compilation
Definition: objects-inl.h:5129
static int SizeFor(int length)
Definition: objects.h:9078
Vector< const uint8_t > ToOneByteVector()
Definition: objects.h:8751
static Handle< Map > GeneralizeAllFieldRepresentations(Handle< Map > map, Representation new_representation)
Definition: objects.cc:2770
void SetKey(int transition_number, Name *value)
T Min(T a, T b)
Definition: utils.h:234
MUST_USE_RESULT MaybeObject * AsObject(Heap *heap)
Definition: objects.cc:7595
Object * GetHiddenProperty(Name *key)
Definition: objects.cc:4831
MUST_USE_RESULT MaybeObject * Shrink(Key key)
Definition: objects.cc:14071
static const char * GetStateName(State state)
Definition: ic.cc:2495
static ConsString * cast(Object *obj)
Handle< FixedArray > CopySizeFixedArray(Handle< FixedArray > array, int new_length, PretenureFlag pretenure=NOT_TENURED)
Definition: factory.cc:900
uint32_t HashForObject(Object *obj)
Definition: objects.cc:13681
bool HasDictionaryArgumentsElements()
Definition: objects.cc:12003
void JSObjectShortPrint(StringStream *accumulator)
Definition: objects.cc:1436
static CodeCache * cast(Object *obj)
MUST_USE_RESULT MaybeObject * LookupString(String *key, Object **s)
Definition: objects.cc:14775
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:2121
void DisableOptimization(BailoutReason reason)
Definition: objects.cc:10028
static Context * NativeContextFromLiterals(FixedArray *literals)
Definition: objects.cc:9845
Object * FindNthObject(int n, Map *match_map)
Definition: objects.cc:10441
Handle< Map > NewMap(InstanceType type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND)
Definition: factory.cc:809
void StringShortPrint(StringStream *accumulator)
Definition: objects.cc:1372
MaybeObject * AsObject(Heap *heap)
Definition: objects.cc:13834
MaybeObject *(* setter)(Isolate *isolate, JSObject *object, Object *value, void *data)
Definition: v8globals.h:352
void ClearNonLiveTransitions(Heap *heap)
Definition: objects.cc:9320
Object * GetComponent(AccessorComponent component)
Definition: objects.cc:8162
uint32_t HashForObject(Object *key)
Definition: objects.cc:14826
static Handle< Object > GetAccessor(Handle< JSObject > object, Handle< Name > name, AccessorComponent component)
Definition: objects.cc:6565
static const int kMaxValue
Definition: objects.h:1681
bool IsMarkedForConcurrentOptimization()
Definition: objects-inl.h:5509
int NextFreePropertyIndex()
Definition: objects.cc:5930
static const int kCodeCacheOffset
Definition: objects.h:6437
static Handle< Object > PrepareElementsForSort(Handle< JSObject > object, uint32_t limit)
Definition: objects.cc:14342
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
static const int kNotFound
Definition: objects.h:3486
struct v8::internal::IeeeDoubleBigEndianArchType::@42 bits
void SetTarget(int transition_number, Map *target)
String * Operate(String *string, unsigned *offset_out, int32_t *type_out, unsigned *length_out)
Definition: objects.cc:8463
MUST_USE_RESULT MaybeObject * Initialize(Heap *heap, const char *to_string, Object *to_number, byte kind)
Definition: objects.cc:9886
NewSpace * new_space()
Definition: heap.h:637
MUST_USE_RESULT MaybeObject * CopyReplaceDescriptor(DescriptorArray *descriptors, Descriptor *descriptor, int index, TransitionFlag flag)
Definition: objects.cc:7108
Handle< Object > NewNumberFromUint(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: factory.cc:1014
MUST_USE_RESULT MaybeObject * AllocateMap(InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND)
Definition: heap.cc:2643
#define ARRAY_SIZE(a)
Definition: globals.h:333
void EvictFromOptimizedCodeMap(Code *optimized_code, const char *reason)
Definition: objects.cc:9593
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
void(* NamedPropertyDeleterCallback)(Local< String > property, const PropertyCallbackInfo< Boolean > &info)
Definition: v8.h:3333
void TraverseTransitionTree(TraverseCallback callback, void *data)
Definition: objects.cc:7370
MUST_USE_RESULT MaybeObject * TransformPropertiesToFastFor(JSObject *obj, int unused_property_fields)
Definition: objects.cc:15522
static void Insert(Name *key, AccessorInfo *entry, int valid_descriptors, Handle< FixedArray > array)
Definition: objects.cc:3164
void UpdateMaxNumberKey(uint32_t key)
Definition: objects.cc:15286
static MUST_USE_RESULT MaybeObject * AsObject(Heap *heap, uint32_t key)
Definition: objects-inl.h:6517
void SetSortedKey(int pointer, int descriptor_number)
Definition: objects-inl.h:2617
void InitializeDescriptors(DescriptorArray *descriptors)
Definition: objects-inl.h:4744
static Handle< Object > SetPropertyWithInterceptor(Handle< JSObject > object, Handle< Name > name, Handle< Object > value, PropertyAttributes attributes, StrictMode strict_mode)
Definition: objects.cc:2825
AccessControl
Definition: v8.h:2165
Object * allocation_sites_list()
Definition: heap.h:1364
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size)
void set_hash_field(uint32_t value)
Definition: objects-inl.h:2946
FixedArray * GetPrototypeTransitions()
Definition: objects-inl.h:4857
ExtraICState extra_ic_state()
Definition: objects-inl.h:4320
static JSObject * cast(Object *obj)
void ClearInlineCaches()
Definition: objects.cc:10563
static const int kMaxArrayIndexSize
Definition: objects.h:8910
ElementsKind GetNextTransitionElementsKind(ElementsKind kind)
void AddDependentCompilationInfo(DependentCode::DependencyGroup group, CompilationInfo *info)
Definition: objects.cc:11495
Handle< JSArray > NewJSArray(ElementsKind elements_kind, int length, int capacity, ArrayStorageAllocationMode mode=INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE, PretenureFlag pretenure=NOT_TENURED)
Definition: factory.cc:1437
static const char *const kTagNames[kNumberOfSyncTags]
Definition: objects.h:10724
MarkCompactCollector * mark_compact_collector()
Definition: heap.h:1769
bool IsDictionaryElementsKind(ElementsKind kind)
static MUST_USE_RESULT Handle< SeededNumberDictionary > AddNumberEntry(Handle< SeededNumberDictionary > dictionary, uint32_t key, Handle< Object > value, PropertyDetails details)
Definition: objects.cc:15304
const DeclaredAccessorDescriptorData * Next()
Definition: objects.cc:15867
static const int kLiteralsOffset
Definition: objects.h:6745
int FastD2I(double x)
Definition: conversions.h:74
static const int kNoPreviousCharacter
Definition: unicode.h:120
Local< Uint32 > ToArrayIndex() const
Definition: api.cc:2901
void BecomeJSFunction(Handle< JSReceiver > object)
Definition: factory.cc:1561
Handle< T > CloseAndEscape(Handle< T > handle_value)
Definition: handles-inl.h:146
CompilationInfo * compilation_info_at(int i)
Definition: objects-inl.h:4256
static InterceptorInfo * cast(Object *obj)
bool IsFastDoubleElementsKind(ElementsKind kind)
void set_unused_property_fields(int value)
Definition: objects-inl.h:4027
const uint32_t kStringEncodingMask
Definition: objects.h:609
Name * GetKey(int descriptor_number)
Definition: objects-inl.h:2601
void LookupDescriptor(JSObject *holder, Name *name, LookupResult *result)
Definition: objects-inl.h:2560
Object * Lookup(FixedArray *key)
Definition: objects.cc:15066
MUST_USE_RESULT MaybeObject * AtNumberPut(uint32_t key, Object *value)
Definition: objects.cc:15330
static MUST_USE_RESULT Handle< UnseededNumberDictionary > Set(Handle< UnseededNumberDictionary > dictionary, uint32_t index, Handle< Object > value)
Definition: objects.cc:15353
static Handle< JSObject > DeepWalk(Handle< JSObject > object, AllocationSiteCreationContext *site_context)
Definition: objects.cc:5850
void SetComponents(Object *getter, Object *setter)
Definition: objects.h:10335
Handle< SeededNumberDictionary > NewSeededNumberDictionary(int at_least_space_for)
Definition: factory.cc:116
static DeoptimizationInputData * cast(Object *obj)
static void AddDependentCompilationInfo(Handle< AllocationSite > site, Reason reason, CompilationInfo *info)
Definition: objects.cc:12725
int64_t ToLocal(int64_t time_ms)
Definition: date.h:129
IntrusivePrototypeTransitionIterator(HeapObject *proto_trans)
Definition: objects.cc:7242
String * constructor_name()
Definition: objects.cc:1952
virtual MUST_USE_RESULT MaybeObject * AsObject(Heap *heap)=0
MUST_USE_RESULT MaybeObject * AsObject(Heap *heap)
Definition: objects.cc:13693
MUST_USE_RESULT MaybeObject * Update(Name *name, Code *code)
Definition: objects.cc:7389
static bool HasLocalProperty(Handle< JSReceiver >, Handle< Name > name)
Definition: objects-inl.h:6301
static JSGlobalObject * cast(Object *obj)
void StartInobjectSlackTracking(Map *map)
Definition: objects.cc:10066
static JSFunction * cast(Object *obj)