v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
objects-inl.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 //
28 // Review notes:
29 //
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
33 //
34 
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
37 
38 #include "elements.h"
39 #include "objects.h"
40 #include "contexts.h"
41 #include "conversions-inl.h"
42 #include "heap.h"
43 #include "isolate.h"
44 #include "property.h"
45 #include "spaces.h"
46 #include "store-buffer.h"
47 #include "v8memory.h"
48 #include "factory.h"
49 #include "incremental-marking.h"
50 #include "transitions-inl.h"
51 
52 namespace v8 {
53 namespace internal {
54 
55 PropertyDetails::PropertyDetails(Smi* smi) {
56  value_ = smi->value();
57 }
58 
59 
60 Smi* PropertyDetails::AsSmi() {
61  return Smi::FromInt(value_);
62 }
63 
64 
65 PropertyDetails PropertyDetails::AsDeleted() {
66  Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
67  return PropertyDetails(smi);
68 }
69 
70 
71 #define TYPE_CHECKER(type, instancetype) \
72  bool Object::Is##type() { \
73  return Object::IsHeapObject() && \
74  HeapObject::cast(this)->map()->instance_type() == instancetype; \
75  }
76 
77 
78 #define CAST_ACCESSOR(type) \
79  type* type::cast(Object* object) { \
80  ASSERT(object->Is##type()); \
81  return reinterpret_cast<type*>(object); \
82  }
83 
84 
85 #define INT_ACCESSORS(holder, name, offset) \
86  int holder::name() { return READ_INT_FIELD(this, offset); } \
87  void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
88 
89 
90 #define ACCESSORS(holder, name, type, offset) \
91  type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
92  void holder::set_##name(type* value, WriteBarrierMode mode) { \
93  WRITE_FIELD(this, offset, value); \
94  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
95  }
96 
97 
98 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
99 #define ACCESSORS_TO_SMI(holder, name, offset) \
100  Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
101  void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
102  WRITE_FIELD(this, offset, value); \
103  }
104 
105 
106 // Getter that returns a Smi as an int and writes an int as a Smi.
107 #define SMI_ACCESSORS(holder, name, offset) \
108  int holder::name() { \
109  Object* value = READ_FIELD(this, offset); \
110  return Smi::cast(value)->value(); \
111  } \
112  void holder::set_##name(int value) { \
113  WRITE_FIELD(this, offset, Smi::FromInt(value)); \
114  }
115 
116 
117 #define BOOL_GETTER(holder, field, name, offset) \
118  bool holder::name() { \
119  return BooleanBit::get(field(), offset); \
120  } \
121 
122 
123 #define BOOL_ACCESSORS(holder, field, name, offset) \
124  bool holder::name() { \
125  return BooleanBit::get(field(), offset); \
126  } \
127  void holder::set_##name(bool value) { \
128  set_##field(BooleanBit::set(field(), offset, value)); \
129  }
130 
131 
133  return IsFixedArray() || IsFixedDoubleArray();
134 }
135 
136 
138  // There is a constraint on the object; check.
139  if (!this->IsJSObject()) return false;
140  // Fetch the constructor function of the object.
141  Object* cons_obj = JSObject::cast(this)->map()->constructor();
142  if (!cons_obj->IsJSFunction()) return false;
143  JSFunction* fun = JSFunction::cast(cons_obj);
144  // Iterate through the chain of inheriting function templates to
145  // see if the required one occurs.
146  for (Object* type = fun->shared()->function_data();
147  type->IsFunctionTemplateInfo();
148  type = FunctionTemplateInfo::cast(type)->parent_template()) {
149  if (type == expected) return true;
150  }
151  // Didn't find the required type in the inheritance chain.
152  return false;
153 }
154 
155 
156 bool Object::IsSmi() {
157  return HAS_SMI_TAG(this);
158 }
159 
160 
161 bool Object::IsHeapObject() {
162  return Internals::HasHeapObjectTag(this);
163 }
164 
165 
167  ASSERT(!this->IsFailure());
168  return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
169 }
170 
171 
173 
174 
175 bool Object::IsString() {
176  return Object::IsHeapObject()
178 }
179 
180 
181 bool Object::IsSpecObject() {
182  return Object::IsHeapObject()
184 }
185 
186 
187 bool Object::IsSpecFunction() {
188  if (!Object::IsHeapObject()) return false;
189  InstanceType type = HeapObject::cast(this)->map()->instance_type();
190  return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
191 }
192 
193 
194 bool Object::IsSymbol() {
195  if (!this->IsHeapObject()) return false;
196  uint32_t type = HeapObject::cast(this)->map()->instance_type();
197  // Because the symbol tag is non-zero and no non-string types have the
198  // symbol bit set we can test for symbols with a very simple test
199  // operation.
202  return (type & kIsSymbolMask) != 0;
203 }
204 
205 
206 bool Object::IsConsString() {
207  if (!IsString()) return false;
208  return StringShape(String::cast(this)).IsCons();
209 }
210 
211 
212 bool Object::IsSlicedString() {
213  if (!IsString()) return false;
214  return StringShape(String::cast(this)).IsSliced();
215 }
216 
217 
218 bool Object::IsSeqString() {
219  if (!IsString()) return false;
220  return StringShape(String::cast(this)).IsSequential();
221 }
222 
223 
224 bool Object::IsSeqAsciiString() {
225  if (!IsString()) return false;
226  return StringShape(String::cast(this)).IsSequential() &&
228 }
229 
230 
231 bool Object::IsSeqTwoByteString() {
232  if (!IsString()) return false;
233  return StringShape(String::cast(this)).IsSequential() &&
235 }
236 
237 
238 bool Object::IsExternalString() {
239  if (!IsString()) return false;
240  return StringShape(String::cast(this)).IsExternal();
241 }
242 
243 
244 bool Object::IsExternalAsciiString() {
245  if (!IsString()) return false;
246  return StringShape(String::cast(this)).IsExternal() &&
248 }
249 
250 
251 bool Object::IsExternalTwoByteString() {
252  if (!IsString()) return false;
253  return StringShape(String::cast(this)).IsExternal() &&
255 }
256 
258  // Dictionary is covered under FixedArray.
259  return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
260 }
261 
262 StringShape::StringShape(String* str)
263  : type_(str->map()->instance_type()) {
264  set_valid();
265  ASSERT((type_ & kIsNotStringMask) == kStringTag);
266 }
267 
268 
269 StringShape::StringShape(Map* map)
270  : type_(map->instance_type()) {
271  set_valid();
272  ASSERT((type_ & kIsNotStringMask) == kStringTag);
273 }
274 
275 
276 StringShape::StringShape(InstanceType t)
277  : type_(static_cast<uint32_t>(t)) {
278  set_valid();
279  ASSERT((type_ & kIsNotStringMask) == kStringTag);
280 }
281 
282 
283 bool StringShape::IsSymbol() {
284  ASSERT(valid());
286  return (type_ & kIsSymbolMask) != 0;
287 }
288 
289 
291  uint32_t type = map()->instance_type();
292  return (type & kStringEncodingMask) == kAsciiStringTag;
293 }
294 
295 
297  uint32_t type = map()->instance_type();
298  return (type & kStringEncodingMask) == kTwoByteStringTag;
299 }
300 
301 
303  uint32_t type = map()->instance_type();
306  ASSERT(IsFlat());
307  switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
308  case kAsciiStringTag:
309  return true;
310  case kTwoByteStringTag:
311  return false;
312  default: // Cons or sliced string. Need to go deeper.
314  }
315 }
316 
317 
319  uint32_t type = map()->instance_type();
322  ASSERT(IsFlat());
323  switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
324  case kAsciiStringTag:
325  return false;
326  case kTwoByteStringTag:
327  return true;
328  default: // Cons or sliced string. Need to go deeper.
330  }
331 }
332 
333 
335  uint32_t type = map()->instance_type();
336  return (type & kStringEncodingMask) == kAsciiStringTag ||
338 }
339 
340 
341 bool StringShape::IsCons() {
342  return (type_ & kStringRepresentationMask) == kConsStringTag;
343 }
344 
345 
346 bool StringShape::IsSliced() {
347  return (type_ & kStringRepresentationMask) == kSlicedStringTag;
348 }
349 
350 
351 bool StringShape::IsIndirect() {
352  return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
353 }
354 
355 
356 bool StringShape::IsExternal() {
357  return (type_ & kStringRepresentationMask) == kExternalStringTag;
358 }
359 
360 
361 bool StringShape::IsSequential() {
362  return (type_ & kStringRepresentationMask) == kSeqStringTag;
363 }
364 
365 
366 StringRepresentationTag StringShape::representation_tag() {
367  uint32_t tag = (type_ & kStringRepresentationMask);
368  return static_cast<StringRepresentationTag>(tag);
369 }
370 
371 
372 uint32_t StringShape::encoding_tag() {
373  return type_ & kStringEncodingMask;
374 }
375 
376 
377 uint32_t StringShape::full_representation_tag() {
378  return (type_ & (kStringRepresentationMask | kStringEncodingMask));
379 }
380 
381 
384 
385 STATIC_CHECK(static_cast<uint32_t>(kStringEncodingMask) ==
387 
388 
389 bool StringShape::IsSequentialAscii() {
390  return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
391 }
392 
393 
394 bool StringShape::IsSequentialTwoByte() {
395  return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
396 }
397 
398 
399 bool StringShape::IsExternalAscii() {
400  return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
401 }
402 
403 
406 
408 
409 
410 bool StringShape::IsExternalTwoByte() {
411  return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
412 }
413 
414 
417 
419 
421  ASSERT(0 <= index && index <= length_);
422  if (is_ascii_) {
423  return static_cast<const byte*>(start_)[index];
424  } else {
425  return static_cast<const uc16*>(start_)[index];
426  }
427 }
428 
429 
430 bool Object::IsNumber() {
431  return IsSmi() || IsHeapNumber();
432 }
433 
434 
435 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
436 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
437 
438 
439 bool Object::IsFiller() {
440  if (!Object::IsHeapObject()) return false;
441  InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
442  return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
443 }
444 
445 
447 
448 
449 bool Object::IsExternalArray() {
450  if (!Object::IsHeapObject())
451  return false;
452  InstanceType instance_type =
453  HeapObject::cast(this)->map()->instance_type();
454  return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
455  instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
456 }
457 
458 
459 TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
460 TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
461 TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
462 TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
463 TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
464 TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
465 TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
466 TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
467 
468 
469 bool MaybeObject::IsFailure() {
470  return HAS_FAILURE_TAG(this);
471 }
472 
473 
474 bool MaybeObject::IsRetryAfterGC() {
475  return HAS_FAILURE_TAG(this)
477 }
478 
479 
480 bool MaybeObject::IsOutOfMemory() {
481  return HAS_FAILURE_TAG(this)
483 }
484 
485 
486 bool MaybeObject::IsException() {
487  return this == Failure::Exception();
488 }
489 
490 
491 bool MaybeObject::IsTheHole() {
492  return !IsFailure() && ToObjectUnchecked()->IsTheHole();
493 }
494 
495 
496 Failure* Failure::cast(MaybeObject* obj) {
497  ASSERT(HAS_FAILURE_TAG(obj));
498  return reinterpret_cast<Failure*>(obj);
499 }
500 
501 
502 bool Object::IsJSReceiver() {
504  return IsHeapObject() &&
506 }
507 
508 
509 bool Object::IsJSObject() {
511  return IsHeapObject() &&
513 }
514 
515 
516 bool Object::IsJSProxy() {
517  if (!Object::IsHeapObject()) return false;
518  InstanceType type = HeapObject::cast(this)->map()->instance_type();
519  return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
520 }
521 
522 
523 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
526 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
527 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
529 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
530 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
531 
532 
533 bool Object::IsDescriptorArray() {
534  return IsFixedArray();
535 }
536 
537 
538 bool Object::IsTransitionArray() {
539  return IsFixedArray();
540 }
541 
542 
543 bool Object::IsDeoptimizationInputData() {
544  // Must be a fixed array.
545  if (!IsFixedArray()) return false;
546 
547  // There's no sure way to detect the difference between a fixed array and
548  // a deoptimization data array. Since this is used for asserts we can
549  // check that the length is zero or else the fixed size plus a multiple of
550  // the entry size.
551  int length = FixedArray::cast(this)->length();
552  if (length == 0) return true;
553 
555  return length >= 0 &&
557 }
558 
559 
560 bool Object::IsDeoptimizationOutputData() {
561  if (!IsFixedArray()) return false;
562  // There's actually no way to see the difference between a fixed array and
563  // a deoptimization data array. Since this is used for asserts we can check
564  // that the length is plausible though.
565  if (FixedArray::cast(this)->length() % 2 != 0) return false;
566  return true;
567 }
568 
569 
570 bool Object::IsTypeFeedbackCells() {
571  if (!IsFixedArray()) return false;
572  // There's actually no way to see the difference between a fixed array and
573  // a cache cells array. Since this is used for asserts we can check that
574  // the length is plausible though.
575  if (FixedArray::cast(this)->length() % 2 != 0) return false;
576  return true;
577 }
578 
579 
580 bool Object::IsContext() {
581  if (!Object::IsHeapObject()) return false;
582  Map* map = HeapObject::cast(this)->map();
583  Heap* heap = map->GetHeap();
584  return (map == heap->function_context_map() ||
585  map == heap->catch_context_map() ||
586  map == heap->with_context_map() ||
587  map == heap->native_context_map() ||
588  map == heap->block_context_map() ||
589  map == heap->module_context_map() ||
590  map == heap->global_context_map());
591 }
592 
593 
594 bool Object::IsNativeContext() {
595  return Object::IsHeapObject() &&
596  HeapObject::cast(this)->map() ==
597  HeapObject::cast(this)->GetHeap()->native_context_map();
598 }
599 
600 
601 bool Object::IsScopeInfo() {
602  return Object::IsHeapObject() &&
603  HeapObject::cast(this)->map() ==
604  HeapObject::cast(this)->GetHeap()->scope_info_map();
605 }
606 
607 
608 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
609 
610 
611 template <> inline bool Is<JSFunction>(Object* obj) {
612  return obj->IsJSFunction();
613 }
614 
615 
616 TYPE_CHECKER(Code, CODE_TYPE)
617 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
618 TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
619 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
620 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
621 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
622 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
623 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
624 
625 
626 bool Object::IsStringWrapper() {
627  return IsJSValue() && JSValue::cast(this)->value()->IsString();
628 }
629 
630 
631 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
632 
633 
634 bool Object::IsBoolean() {
635  return IsOddball() &&
636  ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
637 }
638 
639 
640 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
641 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
642 
643 
644 template <> inline bool Is<JSArray>(Object* obj) {
645  return obj->IsJSArray();
646 }
647 
648 
649 bool Object::IsHashTable() {
650  return Object::IsHeapObject() &&
651  HeapObject::cast(this)->map() ==
652  HeapObject::cast(this)->GetHeap()->hash_table_map();
653 }
654 
655 
656 bool Object::IsDictionary() {
657  return IsHashTable() &&
658  this != HeapObject::cast(this)->GetHeap()->symbol_table();
659 }
660 
661 
662 bool Object::IsSymbolTable() {
663  return IsHashTable() && this ==
664  HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
665 }
666 
667 
668 bool Object::IsJSFunctionResultCache() {
669  if (!IsFixedArray()) return false;
670  FixedArray* self = FixedArray::cast(this);
671  int length = self->length();
672  if (length < JSFunctionResultCache::kEntriesIndex) return false;
675  return false;
676  }
677 #ifdef VERIFY_HEAP
678  if (FLAG_verify_heap) {
679  reinterpret_cast<JSFunctionResultCache*>(this)->
680  JSFunctionResultCacheVerify();
681  }
682 #endif
683  return true;
684 }
685 
686 
687 bool Object::IsNormalizedMapCache() {
688  if (!IsFixedArray()) return false;
689  if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
690  return false;
691  }
692 #ifdef VERIFY_HEAP
693  if (FLAG_verify_heap) {
694  reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
695  }
696 #endif
697  return true;
698 }
699 
700 
701 bool Object::IsCompilationCacheTable() {
702  return IsHashTable();
703 }
704 
705 
706 bool Object::IsCodeCacheHashTable() {
707  return IsHashTable();
708 }
709 
710 
711 bool Object::IsPolymorphicCodeCacheHashTable() {
712  return IsHashTable();
713 }
714 
715 
716 bool Object::IsMapCache() {
717  return IsHashTable();
718 }
719 
720 
721 bool Object::IsPrimitive() {
722  return IsOddball() || IsNumber() || IsString();
723 }
724 
725 
726 bool Object::IsJSGlobalProxy() {
727  bool result = IsHeapObject() &&
728  (HeapObject::cast(this)->map()->instance_type() ==
730  ASSERT(!result || IsAccessCheckNeeded());
731  return result;
732 }
733 
734 
735 bool Object::IsGlobalObject() {
736  if (!IsHeapObject()) return false;
737 
738  InstanceType type = HeapObject::cast(this)->map()->instance_type();
739  return type == JS_GLOBAL_OBJECT_TYPE ||
740  type == JS_BUILTINS_OBJECT_TYPE;
741 }
742 
743 
744 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
745 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
746 
747 
748 bool Object::IsUndetectableObject() {
749  return IsHeapObject()
750  && HeapObject::cast(this)->map()->is_undetectable();
751 }
752 
753 
754 bool Object::IsAccessCheckNeeded() {
755  return IsHeapObject()
757 }
758 
759 
761  if (!IsHeapObject()) return false;
762  switch (HeapObject::cast(this)->map()->instance_type()) {
763 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
765 #undef MAKE_STRUCT_CASE
766  default: return false;
767  }
768 }
769 
770 
771 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
772  bool Object::Is##Name() { \
773  return Object::IsHeapObject() \
774  && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
775  }
777 #undef MAKE_STRUCT_PREDICATE
778 
779 
780 bool Object::IsUndefined() {
781  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
782 }
783 
784 
785 bool Object::IsNull() {
786  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
787 }
788 
789 
790 bool Object::IsTheHole() {
791  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
792 }
793 
794 
795 bool Object::IsTrue() {
796  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
797 }
798 
799 
800 bool Object::IsFalse() {
801  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
802 }
803 
804 
806  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
807 }
808 
809 
810 double Object::Number() {
811  ASSERT(IsNumber());
812  return IsSmi()
813  ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
814  : reinterpret_cast<HeapNumber*>(this)->value();
815 }
816 
817 
819  return this->IsHeapNumber() && isnan(HeapNumber::cast(this)->value());
820 }
821 
822 
823 MaybeObject* Object::ToSmi() {
824  if (IsSmi()) return this;
825  if (IsHeapNumber()) {
826  double value = HeapNumber::cast(this)->value();
827  int int_value = FastD2I(value);
828  if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
829  return Smi::FromInt(int_value);
830  }
831  }
832  return Failure::Exception();
833 }
834 
835 
837  return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
838 }
839 
840 
841 MaybeObject* Object::GetElement(uint32_t index) {
842  // GetElement can trigger a getter which can cause allocation.
843  // This was not always the case. This ASSERT is here to catch
844  // leftover incorrect uses.
845  ASSERT(HEAP->IsAllocationAllowed());
846  return GetElementWithReceiver(this, index);
847 }
848 
849 
851  MaybeObject* maybe = GetElementWithReceiver(this, index);
852  ASSERT(!maybe->IsFailure());
853  Object* result = NULL; // Initialization to please compiler.
854  maybe->ToObject(&result);
855  return result;
856 }
857 
858 
859 MaybeObject* Object::GetProperty(String* key) {
860  PropertyAttributes attributes;
861  return GetPropertyWithReceiver(this, key, &attributes);
862 }
863 
864 
865 MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
866  return GetPropertyWithReceiver(this, key, attributes);
867 }
868 
869 
870 #define FIELD_ADDR(p, offset) \
871  (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
872 
873 #define READ_FIELD(p, offset) \
874  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
875 
876 #define WRITE_FIELD(p, offset, value) \
877  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
878 
879 #define WRITE_BARRIER(heap, object, offset, value) \
880  heap->incremental_marking()->RecordWrite( \
881  object, HeapObject::RawField(object, offset), value); \
882  if (heap->InNewSpace(value)) { \
883  heap->RecordWrite(object->address(), offset); \
884  }
885 
886 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
887  if (mode == UPDATE_WRITE_BARRIER) { \
888  heap->incremental_marking()->RecordWrite( \
889  object, HeapObject::RawField(object, offset), value); \
890  if (heap->InNewSpace(value)) { \
891  heap->RecordWrite(object->address(), offset); \
892  } \
893  }
894 
895 #ifndef V8_TARGET_ARCH_MIPS
896  #define READ_DOUBLE_FIELD(p, offset) \
897  (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
898 #else // V8_TARGET_ARCH_MIPS
899  // Prevent gcc from using load-double (mips ldc1) on (possibly)
900  // non-64-bit aligned HeapNumber::value.
901  static inline double read_double_field(void* p, int offset) {
902  union conversion {
903  double d;
904  uint32_t u[2];
905  } c;
906  c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
907  c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
908  return c.d;
909  }
910  #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
911 #endif // V8_TARGET_ARCH_MIPS
912 
913 #ifndef V8_TARGET_ARCH_MIPS
914  #define WRITE_DOUBLE_FIELD(p, offset, value) \
915  (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
916 #else // V8_TARGET_ARCH_MIPS
917  // Prevent gcc from using store-double (mips sdc1) on (possibly)
918  // non-64-bit aligned HeapNumber::value.
919  static inline void write_double_field(void* p, int offset,
920  double value) {
921  union conversion {
922  double d;
923  uint32_t u[2];
924  } c;
925  c.d = value;
926  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
927  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
928  }
929  #define WRITE_DOUBLE_FIELD(p, offset, value) \
930  write_double_field(p, offset, value)
931 #endif // V8_TARGET_ARCH_MIPS
932 
933 
934 #define READ_INT_FIELD(p, offset) \
935  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
936 
937 #define WRITE_INT_FIELD(p, offset, value) \
938  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
939 
940 #define READ_INTPTR_FIELD(p, offset) \
941  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
942 
943 #define WRITE_INTPTR_FIELD(p, offset, value) \
944  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
945 
946 #define READ_UINT32_FIELD(p, offset) \
947  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
948 
949 #define WRITE_UINT32_FIELD(p, offset, value) \
950  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
951 
952 #define READ_INT64_FIELD(p, offset) \
953  (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
954 
955 #define WRITE_INT64_FIELD(p, offset, value) \
956  (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
957 
958 #define READ_SHORT_FIELD(p, offset) \
959  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
960 
961 #define WRITE_SHORT_FIELD(p, offset, value) \
962  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
963 
964 #define READ_BYTE_FIELD(p, offset) \
965  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
966 
967 #define WRITE_BYTE_FIELD(p, offset, value) \
968  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
969 
970 
971 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
972  return &READ_FIELD(obj, byte_offset);
973 }
974 
975 
976 int Smi::value() {
977  return Internals::SmiValue(this);
978 }
979 
980 
981 Smi* Smi::FromInt(int value) {
982  ASSERT(Smi::IsValid(value));
983  int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
984  intptr_t tagged_value =
985  (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
986  return reinterpret_cast<Smi*>(tagged_value);
987 }
988 
989 
990 Smi* Smi::FromIntptr(intptr_t value) {
991  ASSERT(Smi::IsValid(value));
992  int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
993  return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
994 }
995 
996 
998  return static_cast<Type>(value() & kFailureTypeTagMask);
999 }
1000 
1001 
1003  return type() == INTERNAL_ERROR;
1004 }
1005 
1006 
1008  return type() == OUT_OF_MEMORY_EXCEPTION;
1009 }
1010 
1011 
1014  return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
1015  & kSpaceTagMask);
1016 }
1017 
1018 
1020  return Construct(INTERNAL_ERROR);
1021 }
1022 
1023 
1025  return Construct(EXCEPTION);
1026 }
1027 
1028 
1030  return Construct(OUT_OF_MEMORY_EXCEPTION);
1031 }
1032 
1033 
1034 intptr_t Failure::value() const {
1035  return static_cast<intptr_t>(
1036  reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
1037 }
1038 
1039 
1041  return RetryAfterGC(NEW_SPACE);
1042 }
1043 
1044 
1046  ASSERT((space & ~kSpaceTagMask) == 0);
1047  return Construct(RETRY_AFTER_GC, space);
1048 }
1049 
1050 
1051 Failure* Failure::Construct(Type type, intptr_t value) {
1052  uintptr_t info =
1053  (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1054  ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1055  return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1056 }
1057 
1058 
1059 bool Smi::IsValid(intptr_t value) {
1060 #ifdef DEBUG
1061  bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1062 #endif
1063 
1064 #ifdef V8_TARGET_ARCH_X64
1065  // To be representable as a long smi, the value must be a 32-bit integer.
1066  bool result = (value == static_cast<int32_t>(value));
1067 #else
1068  // To be representable as an tagged small integer, the two
1069  // most-significant bits of 'value' must be either 00 or 11 due to
1070  // sign-extension. To check this we add 01 to the two
1071  // most-significant bits, and check if the most-significant bit is 0
1072  //
1073  // CAUTION: The original code below:
1074  // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1075  // may lead to incorrect results according to the C language spec, and
1076  // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1077  // compiler may produce undefined results in case of signed integer
1078  // overflow. The computation must be done w/ unsigned ints.
1079  bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1080 #endif
1081  ASSERT(result == in_range);
1082  return result;
1083 }
1084 
1085 
1086 MapWord MapWord::FromMap(Map* map) {
1087  return MapWord(reinterpret_cast<uintptr_t>(map));
1088 }
1089 
1090 
1091 Map* MapWord::ToMap() {
1092  return reinterpret_cast<Map*>(value_);
1093 }
1094 
1095 
1096 bool MapWord::IsForwardingAddress() {
1097  return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1098 }
1099 
1100 
1101 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1102  Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1103  return MapWord(reinterpret_cast<uintptr_t>(raw));
1104 }
1105 
1106 
1107 HeapObject* MapWord::ToForwardingAddress() {
1108  ASSERT(IsForwardingAddress());
1109  return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1110 }
1111 
1112 
1113 #ifdef VERIFY_HEAP
1114 void HeapObject::VerifyObjectField(int offset) {
1115  VerifyPointer(READ_FIELD(this, offset));
1116 }
1117 
1118 void HeapObject::VerifySmiField(int offset) {
1119  CHECK(READ_FIELD(this, offset)->IsSmi());
1120 }
1121 #endif
1122 
1123 
1125  Heap* heap =
1126  MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1127  ASSERT(heap != NULL);
1128  ASSERT(heap->isolate() == Isolate::Current());
1129  return heap;
1130 }
1131 
1132 
1134  return GetHeap()->isolate();
1135 }
1136 
1137 
1139  return map_word().ToMap();
1140 }
1141 
1142 
1143 void HeapObject::set_map(Map* value) {
1144  set_map_word(MapWord::FromMap(value));
1145  if (value != NULL) {
1146  // TODO(1600) We are passing NULL as a slot because maps can never be on
1147  // evacuation candidate.
1148  value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1149  }
1150 }
1151 
1152 
1153 // Unsafe accessor omitting write barrier.
1155  set_map_word(MapWord::FromMap(value));
1156 }
1157 
1158 
1160  return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1161 }
1162 
1163 
1164 void HeapObject::set_map_word(MapWord map_word) {
1165  // WRITE_FIELD does not invoke write barrier, but there is no need
1166  // here.
1167  WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1168 }
1169 
1170 
1172  ASSERT_TAG_ALIGNED(address);
1173  return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1174 }
1175 
1176 
1178  return reinterpret_cast<Address>(this) - kHeapObjectTag;
1179 }
1180 
1181 
1183  return SizeFromMap(map());
1184 }
1185 
1186 
1187 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1188  v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1189  reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1190 }
1191 
1192 
1193 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1194  v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1195 }
1196 
1197 
1199  return READ_DOUBLE_FIELD(this, kValueOffset);
1200 }
1201 
1202 
1203 void HeapNumber::set_value(double value) {
1204  WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1205 }
1206 
1207 
1209  return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1211 }
1212 
1213 
1215  return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1216 }
1217 
1218 
1219 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1220 
1221 
1222 Object** FixedArray::GetFirstElementAddress() {
1223  return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1224 }
1225 
1226 
1228  Object* the_hole = GetHeap()->the_hole_value();
1229  Object** current = GetFirstElementAddress();
1230  for (int i = 0; i < length(); ++i) {
1231  Object* candidate = *current++;
1232  if (!candidate->IsSmi() && candidate != the_hole) return false;
1233  }
1234  return true;
1235 }
1236 
1237 
1238 FixedArrayBase* JSObject::elements() {
1239  Object* array = READ_FIELD(this, kElementsOffset);
1240  return static_cast<FixedArrayBase*>(array);
1241 }
1242 
1243 
1245 #if DEBUG
1247  ElementsAccessor* accessor = GetElementsAccessor();
1248  accessor->Validate(this);
1249  }
1250 #endif
1251 }
1252 
1253 
1255  ValidateElements();
1256  ElementsKind elements_kind = map()->elements_kind();
1257  if (!IsFastObjectElementsKind(elements_kind)) {
1258  if (IsFastHoleyElementsKind(elements_kind)) {
1260  } else {
1262  }
1263  }
1264  return this;
1265 }
1266 
1267 
1269  uint32_t count,
1270  EnsureElementsMode mode) {
1271  ElementsKind current_kind = map()->elements_kind();
1272  ElementsKind target_kind = current_kind;
1274  bool is_holey = IsFastHoleyElementsKind(current_kind);
1275  if (current_kind == FAST_HOLEY_ELEMENTS) return this;
1276  Heap* heap = GetHeap();
1277  Object* the_hole = heap->the_hole_value();
1278  for (uint32_t i = 0; i < count; ++i) {
1279  Object* current = *objects++;
1280  if (current == the_hole) {
1281  is_holey = true;
1282  target_kind = GetHoleyElementsKind(target_kind);
1283  } else if (!current->IsSmi()) {
1284  if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1285  if (IsFastSmiElementsKind(target_kind)) {
1286  if (is_holey) {
1287  target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1288  } else {
1289  target_kind = FAST_DOUBLE_ELEMENTS;
1290  }
1291  }
1292  } else if (is_holey) {
1293  target_kind = FAST_HOLEY_ELEMENTS;
1294  break;
1295  } else {
1296  target_kind = FAST_ELEMENTS;
1297  }
1298  }
1299  }
1300 
1301  if (target_kind != current_kind) {
1302  return TransitionElementsKind(target_kind);
1303  }
1304  return this;
1305 }
1306 
1307 
1309  uint32_t length,
1310  EnsureElementsMode mode) {
1311  if (elements->map() != GetHeap()->fixed_double_array_map()) {
1312  ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
1313  elements->map() == GetHeap()->fixed_cow_array_map());
1314  if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1316  }
1317  Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
1318  return EnsureCanContainElements(objects, length, mode);
1319  }
1320 
1324  } else if (GetElementsKind() == FAST_SMI_ELEMENTS) {
1325  FixedDoubleArray* double_array = FixedDoubleArray::cast(elements);
1326  for (uint32_t i = 0; i < length; ++i) {
1327  if (double_array->is_the_hole(i)) {
1329  }
1330  }
1332  }
1333 
1334  return this;
1335 }
1336 
1337 
1339  ElementsKind to_kind) {
1340  Map* current_map = map();
1341  ElementsKind from_kind = current_map->elements_kind();
1342  if (from_kind == to_kind) return current_map;
1343 
1344  Context* native_context = isolate->context()->native_context();
1345  Object* maybe_array_maps = native_context->js_array_maps();
1346  if (maybe_array_maps->IsFixedArray()) {
1347  FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
1348  if (array_maps->get(from_kind) == current_map) {
1349  Object* maybe_transitioned_map = array_maps->get(to_kind);
1350  if (maybe_transitioned_map->IsMap()) {
1351  return Map::cast(maybe_transitioned_map);
1352  }
1353  }
1354  }
1355 
1356  return GetElementsTransitionMapSlow(to_kind);
1357 }
1358 
1359 
1361  FixedArrayBase* value,
1362  WriteBarrierMode mode) {
1363  ASSERT(value->HasValidElements());
1364  if (new_map != NULL) {
1365  if (mode == UPDATE_WRITE_BARRIER) {
1366  set_map(new_map);
1367  } else {
1368  ASSERT(mode == SKIP_WRITE_BARRIER);
1369  set_map_no_write_barrier(new_map);
1370  }
1371  }
1372  ASSERT((map()->has_fast_smi_or_object_elements() ||
1373  (value == GetHeap()->empty_fixed_array())) ==
1374  (value->map() == GetHeap()->fixed_array_map() ||
1375  value->map() == GetHeap()->fixed_cow_array_map()));
1376  ASSERT((value == GetHeap()->empty_fixed_array()) ||
1377  (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
1378  WRITE_FIELD(this, kElementsOffset, value);
1379  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1380 }
1381 
1382 
1383 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1384  set_map_and_elements(NULL, value, mode);
1385 }
1386 
1387 
1388 void JSObject::initialize_properties() {
1389  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1390  WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1391 }
1392 
1393 
1395  ASSERT(map()->has_fast_smi_or_object_elements() ||
1396  map()->has_fast_double_elements());
1397  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1398  WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1399 }
1400 
1401 
1402 MaybeObject* JSObject::ResetElements() {
1403  Object* obj;
1404  ElementsKind elements_kind = GetInitialFastElementsKind();
1405  if (!FLAG_smi_only_arrays) {
1406  elements_kind = FastSmiToObjectElementsKind(elements_kind);
1407  }
1408  MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
1409  elements_kind);
1410  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1411  set_map(Map::cast(obj));
1413  return this;
1414 }
1415 
1416 
1418  ASSERT(this->map()->NumberOfOwnDescriptors() + 1 ==
1419  map->NumberOfOwnDescriptors());
1420  if (this->map()->unused_property_fields() == 0) {
1421  int new_size = properties()->length() + map->unused_property_fields() + 1;
1422  FixedArray* new_properties;
1423  MaybeObject* maybe_properties = properties()->CopySize(new_size);
1424  if (!maybe_properties->To(&new_properties)) return maybe_properties;
1425  set_properties(new_properties);
1426  }
1427  set_map(map);
1428  return this;
1429 }
1430 
1431 
1433  Handle<String> key) {
1434  if (!object->map()->HasTransitionArray()) return false;
1435  Handle<TransitionArray> transitions(object->map()->transitions());
1436  int transition = transitions->Search(*key);
1437  if (transition == TransitionArray::kNotFound) return false;
1438  PropertyDetails target_details = transitions->GetTargetDetails(transition);
1439  if (target_details.type() != FIELD) return false;
1440  if (target_details.attributes() != NONE) return false;
1441  Handle<Map> target(transitions->GetTarget(transition));
1442  JSObject::AddFastPropertyUsingMap(object, target);
1443  return true;
1444 }
1445 
1446 
1448  Map* map = this->map();
1449  int last_added = map->LastAdded();
1450  return map->instance_descriptors()->GetFieldIndex(last_added);
1451 }
1452 
1453 
1454 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1455 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1456 
1457 
1458 byte Oddball::kind() {
1459  return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1460 }
1461 
1462 
1464  WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1465 }
1466 
1467 
1468 Object* JSGlobalPropertyCell::value() {
1469  return READ_FIELD(this, kValueOffset);
1470 }
1471 
1472 
1473 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1474  // The write barrier is not used for global property cells.
1475  ASSERT(!val->IsJSGlobalPropertyCell());
1476  WRITE_FIELD(this, kValueOffset, val);
1477 }
1478 
1479 
1481  InstanceType type = map()->instance_type();
1482  // Check for the most common kind of JavaScript object before
1483  // falling into the generic switch. This speeds up the internal
1484  // field operations considerably on average.
1485  if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1486  switch (type) {
1487  case JS_MODULE_TYPE:
1488  return JSModule::kSize;
1489  case JS_GLOBAL_PROXY_TYPE:
1490  return JSGlobalProxy::kSize;
1491  case JS_GLOBAL_OBJECT_TYPE:
1492  return JSGlobalObject::kSize;
1494  return JSBuiltinsObject::kSize;
1495  case JS_FUNCTION_TYPE:
1496  return JSFunction::kSize;
1497  case JS_VALUE_TYPE:
1498  return JSValue::kSize;
1499  case JS_DATE_TYPE:
1500  return JSDate::kSize;
1501  case JS_ARRAY_TYPE:
1502  return JSArray::kSize;
1503  case JS_WEAK_MAP_TYPE:
1504  return JSWeakMap::kSize;
1505  case JS_REGEXP_TYPE:
1506  return JSRegExp::kSize;
1508  return JSObject::kHeaderSize;
1510  return JSMessageObject::kSize;
1511  default:
1512  UNREACHABLE();
1513  return 0;
1514  }
1515 }
1516 
1517 
1520  // Make sure to adjust for the number of in-object properties. These
1521  // properties do contribute to the size, but are not internal fields.
1522  return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1523  map()->inobject_properties();
1524 }
1525 
1526 
1528  ASSERT(index < GetInternalFieldCount() && index >= 0);
1529  return GetHeaderSize() + (kPointerSize * index);
1530 }
1531 
1532 
1534  ASSERT(index < GetInternalFieldCount() && index >= 0);
1535  // Internal objects do follow immediately after the header, whereas in-object
1536  // properties are at the end of the object. Therefore there is no need
1537  // to adjust the index here.
1538  return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1539 }
1540 
1541 
1542 void JSObject::SetInternalField(int index, Object* value) {
1543  ASSERT(index < GetInternalFieldCount() && index >= 0);
1544  // Internal objects do follow immediately after the header, whereas in-object
1545  // properties are at the end of the object. Therefore there is no need
1546  // to adjust the index here.
1547  int offset = GetHeaderSize() + (kPointerSize * index);
1548  WRITE_FIELD(this, offset, value);
1549  WRITE_BARRIER(GetHeap(), this, offset, value);
1550 }
1551 
1552 
1553 void JSObject::SetInternalField(int index, Smi* value) {
1554  ASSERT(index < GetInternalFieldCount() && index >= 0);
1555  // Internal objects do follow immediately after the header, whereas in-object
1556  // properties are at the end of the object. Therefore there is no need
1557  // to adjust the index here.
1558  int offset = GetHeaderSize() + (kPointerSize * index);
1559  WRITE_FIELD(this, offset, value);
1560 }
1561 
1562 
1563 // Access fast-case object properties at index. The use of these routines
1564 // is needed to correctly distinguish between properties stored in-object and
1565 // properties stored in the properties array.
1567  // Adjust for the number of properties stored in the object.
1568  index -= map()->inobject_properties();
1569  if (index < 0) {
1570  int offset = map()->instance_size() + (index * kPointerSize);
1571  return READ_FIELD(this, offset);
1572  } else {
1573  ASSERT(index < properties()->length());
1574  return properties()->get(index);
1575  }
1576 }
1577 
1578 
1580  // Adjust for the number of properties stored in the object.
1581  index -= map()->inobject_properties();
1582  if (index < 0) {
1583  int offset = map()->instance_size() + (index * kPointerSize);
1584  WRITE_FIELD(this, offset, value);
1585  WRITE_BARRIER(GetHeap(), this, offset, value);
1586  } else {
1587  ASSERT(index < properties()->length());
1588  properties()->set(index, value);
1589  }
1590  return value;
1591 }
1592 
1593 
1595  // Adjust for the number of properties stored in the object.
1596  index -= map()->inobject_properties();
1597  ASSERT(index < 0);
1598  return map()->instance_size() + (index * kPointerSize);
1599 }
1600 
1601 
1603  // Adjust for the number of properties stored in the object.
1604  index -= map()->inobject_properties();
1605  ASSERT(index < 0);
1606  int offset = map()->instance_size() + (index * kPointerSize);
1607  return READ_FIELD(this, offset);
1608 }
1609 
1610 
1612  Object* value,
1613  WriteBarrierMode mode) {
1614  // Adjust for the number of properties stored in the object.
1615  index -= map()->inobject_properties();
1616  ASSERT(index < 0);
1617  int offset = map()->instance_size() + (index * kPointerSize);
1618  WRITE_FIELD(this, offset, value);
1619  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1620  return value;
1621 }
1622 
1623 
1624 
1626  Object* pre_allocated_value,
1627  Object* filler_value) {
1628  ASSERT(!filler_value->IsHeapObject() ||
1629  !GetHeap()->InNewSpace(filler_value));
1630  ASSERT(!pre_allocated_value->IsHeapObject() ||
1631  !GetHeap()->InNewSpace(pre_allocated_value));
1632  int size = map->instance_size();
1633  int offset = kHeaderSize;
1634  if (filler_value != pre_allocated_value) {
1635  int pre_allocated = map->pre_allocated_property_fields();
1636  ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
1637  for (int i = 0; i < pre_allocated; i++) {
1638  WRITE_FIELD(this, offset, pre_allocated_value);
1639  offset += kPointerSize;
1640  }
1641  }
1642  while (offset < size) {
1643  WRITE_FIELD(this, offset, filler_value);
1644  offset += kPointerSize;
1645  }
1646 }
1647 
1648 
1650  ASSERT(properties()->IsDictionary() == map()->is_dictionary_map());
1651  return !properties()->IsDictionary();
1652 }
1653 
1654 
1656  JSObject::StoreFromKeyed store_mode) {
1657  // Allow extra fast properties if the object has more than
1658  // kFastPropertiesSoftLimit in-object properties. When this is the case,
1659  // it is very unlikely that the object is being used as a dictionary
1660  // and there is a good chance that allowing more map transitions
1661  // will be worth it.
1662  int inobject = map()->inobject_properties();
1663 
1664  int limit;
1665  if (store_mode == CERTAINLY_NOT_STORE_FROM_KEYED) {
1666  limit = Max(inobject, kMaxFastProperties);
1667  } else {
1668  limit = Max(inobject, kFastPropertiesSoftLimit);
1669  }
1670  return properties > limit;
1671 }
1672 
1673 
1674 void Struct::InitializeBody(int object_size) {
1675  Object* value = GetHeap()->undefined_value();
1676  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1677  WRITE_FIELD(this, offset, value);
1678  }
1679 }
1680 
1681 
1682 bool Object::ToArrayIndex(uint32_t* index) {
1683  if (IsSmi()) {
1684  int value = Smi::cast(this)->value();
1685  if (value < 0) return false;
1686  *index = value;
1687  return true;
1688  }
1689  if (IsHeapNumber()) {
1690  double value = HeapNumber::cast(this)->value();
1691  uint32_t uint_value = static_cast<uint32_t>(value);
1692  if (value == static_cast<double>(uint_value)) {
1693  *index = uint_value;
1694  return true;
1695  }
1696  }
1697  return false;
1698 }
1699 
1700 
1702  if (!this->IsJSValue()) return false;
1703 
1704  JSValue* js_value = JSValue::cast(this);
1705  if (!js_value->value()->IsString()) return false;
1706 
1707  String* str = String::cast(js_value->value());
1708  if (index >= (uint32_t)str->length()) return false;
1709 
1710  return true;
1711 }
1712 
1713 
1714 
1716 #if ENABLE_EXTRA_CHECKS
1717  if (!(IsSmi() ||
1718  IsString() ||
1719  IsSpecObject() ||
1720  IsHeapNumber() ||
1721  IsUndefined() ||
1722  IsTrue() ||
1723  IsFalse() ||
1724  IsNull())) {
1725  FATAL("API call returned invalid object");
1726  }
1727 #endif // ENABLE_EXTRA_CHECKS
1728 }
1729 
1730 
1732  ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
1733  return reinterpret_cast<FixedArrayBase*>(object);
1734 }
1735 
1736 
1738  ASSERT(index >= 0 && index < this->length());
1739  return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1740 }
1741 
1742 
1743 bool FixedArray::is_the_hole(int index) {
1744  return get(index) == GetHeap()->the_hole_value();
1745 }
1746 
1747 
1748 void FixedArray::set(int index, Smi* value) {
1749  ASSERT(map() != HEAP->fixed_cow_array_map());
1750  ASSERT(index >= 0 && index < this->length());
1751  ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1752  int offset = kHeaderSize + index * kPointerSize;
1753  WRITE_FIELD(this, offset, value);
1754 }
1755 
1756 
1757 void FixedArray::set(int index, Object* value) {
1758  ASSERT(map() != HEAP->fixed_cow_array_map());
1759  ASSERT(index >= 0 && index < this->length());
1760  int offset = kHeaderSize + index * kPointerSize;
1761  WRITE_FIELD(this, offset, value);
1762  WRITE_BARRIER(GetHeap(), this, offset, value);
1763 }
1764 
1765 
1766 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1767  return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1768 }
1769 
1770 
1772  return BitCast<double, uint64_t>(kHoleNanInt64);
1773 }
1774 
1775 
1777  ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
1778  ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
1779  return OS::nan_value();
1780 }
1781 
1782 
1783 double FixedDoubleArray::get_scalar(int index) {
1784  ASSERT(map() != HEAP->fixed_cow_array_map() &&
1785  map() != HEAP->fixed_array_map());
1786  ASSERT(index >= 0 && index < this->length());
1787  double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
1788  ASSERT(!is_the_hole_nan(result));
1789  return result;
1790 }
1791 
1793  ASSERT(map() != HEAP->fixed_cow_array_map() &&
1794  map() != HEAP->fixed_array_map());
1795  ASSERT(index >= 0 && index < this->length());
1796  return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
1797 }
1798 
1799 MaybeObject* FixedDoubleArray::get(int index) {
1800  if (is_the_hole(index)) {
1801  return GetHeap()->the_hole_value();
1802  } else {
1803  return GetHeap()->NumberFromDouble(get_scalar(index));
1804  }
1805 }
1806 
1807 
1808 void FixedDoubleArray::set(int index, double value) {
1809  ASSERT(map() != HEAP->fixed_cow_array_map() &&
1810  map() != HEAP->fixed_array_map());
1811  int offset = kHeaderSize + index * kDoubleSize;
1812  if (isnan(value)) value = canonical_not_the_hole_nan_as_double();
1813  WRITE_DOUBLE_FIELD(this, offset, value);
1814 }
1815 
1816 
1818  ASSERT(map() != HEAP->fixed_cow_array_map() &&
1819  map() != HEAP->fixed_array_map());
1820  int offset = kHeaderSize + index * kDoubleSize;
1821  WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1822 }
1823 
1824 
1826  int offset = kHeaderSize + index * kDoubleSize;
1827  return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
1828 }
1829 
1830 
1832  Heap* heap = GetHeap();
1833  if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
1834  if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1835  return UPDATE_WRITE_BARRIER;
1836 }
1837 
1838 
1839 void FixedArray::set(int index,
1840  Object* value,
1841  WriteBarrierMode mode) {
1842  ASSERT(map() != HEAP->fixed_cow_array_map());
1843  ASSERT(index >= 0 && index < this->length());
1844  int offset = kHeaderSize + index * kPointerSize;
1845  WRITE_FIELD(this, offset, value);
1846  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1847 }
1848 
1849 
1851  int index,
1852  Object* value) {
1853  ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1854  ASSERT(index >= 0 && index < array->length());
1855  int offset = kHeaderSize + index * kPointerSize;
1856  WRITE_FIELD(array, offset, value);
1857  Heap* heap = array->GetHeap();
1858  if (heap->InNewSpace(value)) {
1859  heap->RecordWrite(array->address(), offset);
1860  }
1861 }
1862 
1863 
1865  int index,
1866  Object* value) {
1867  ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1868  ASSERT(index >= 0 && index < array->length());
1869  ASSERT(!HEAP->InNewSpace(value));
1870  WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1871 }
1872 
1873 
1874 void FixedArray::set_undefined(int index) {
1875  ASSERT(map() != HEAP->fixed_cow_array_map());
1876  set_undefined(GetHeap(), index);
1877 }
1878 
1879 
1880 void FixedArray::set_undefined(Heap* heap, int index) {
1881  ASSERT(index >= 0 && index < this->length());
1882  ASSERT(!heap->InNewSpace(heap->undefined_value()));
1883  WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1884  heap->undefined_value());
1885 }
1886 
1887 
1888 void FixedArray::set_null(int index) {
1889  set_null(GetHeap(), index);
1890 }
1891 
1892 
1893 void FixedArray::set_null(Heap* heap, int index) {
1894  ASSERT(index >= 0 && index < this->length());
1895  ASSERT(!heap->InNewSpace(heap->null_value()));
1896  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1897 }
1898 
1899 
1900 void FixedArray::set_the_hole(int index) {
1901  ASSERT(map() != HEAP->fixed_cow_array_map());
1902  ASSERT(index >= 0 && index < this->length());
1903  ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1904  WRITE_FIELD(this,
1905  kHeaderSize + index * kPointerSize,
1906  GetHeap()->the_hole_value());
1907 }
1908 
1909 
1910 void FixedArray::set_unchecked(int index, Smi* value) {
1911  ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1912  int offset = kHeaderSize + index * kPointerSize;
1913  WRITE_FIELD(this, offset, value);
1914 }
1915 
1916 
1918  int index,
1919  Object* value,
1920  WriteBarrierMode mode) {
1921  int offset = kHeaderSize + index * kPointerSize;
1922  WRITE_FIELD(this, offset, value);
1923  CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
1924 }
1925 
1926 
1927 void FixedArray::set_null_unchecked(Heap* heap, int index) {
1928  ASSERT(index >= 0 && index < this->length());
1929  ASSERT(!heap->InNewSpace(heap->null_value()));
1930  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1931 }
1932 
1933 
1935  return HeapObject::RawField(this, kHeaderSize);
1936 }
1937 
1938 
1940  ASSERT(length() >= kFirstIndex ||
1941  this == HEAP->empty_descriptor_array());
1942  return length() < kFirstIndex;
1943 }
1944 
1945 
1946 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
1947  WRITE_FIELD(
1948  this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
1949 }
1950 
1951 
1952 // Perform a binary search in a fixed array. Low and high are entry indices. If
1953 // there are three entries in this array it should be called with low=0 and
1954 // high=2.
1955 template<SearchMode search_mode, typename T>
1956 int BinarySearch(T* array, String* name, int low, int high, int valid_entries) {
1957  uint32_t hash = name->Hash();
1958  int limit = high;
1959 
1960  ASSERT(low <= high);
1961 
1962  while (low != high) {
1963  int mid = (low + high) / 2;
1964  String* mid_name = array->GetSortedKey(mid);
1965  uint32_t mid_hash = mid_name->Hash();
1966 
1967  if (mid_hash >= hash) {
1968  high = mid;
1969  } else {
1970  low = mid + 1;
1971  }
1972  }
1973 
1974  for (; low <= limit; ++low) {
1975  int sort_index = array->GetSortedKeyIndex(low);
1976  String* entry = array->GetKey(sort_index);
1977  if (entry->Hash() != hash) break;
1978  if (entry->Equals(name)) {
1979  if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
1980  return sort_index;
1981  }
1982  return T::kNotFound;
1983  }
1984  }
1985 
1986  return T::kNotFound;
1987 }
1988 
1989 
1990 // Perform a linear search in this fixed array. len is the number of entry
1991 // indices that are valid.
1992 template<SearchMode search_mode, typename T>
1993 int LinearSearch(T* array, String* name, int len, int valid_entries) {
1994  uint32_t hash = name->Hash();
1995  if (search_mode == ALL_ENTRIES) {
1996  for (int number = 0; number < len; number++) {
1997  int sorted_index = array->GetSortedKeyIndex(number);
1998  String* entry = array->GetKey(sorted_index);
1999  uint32_t current_hash = entry->Hash();
2000  if (current_hash > hash) break;
2001  if (current_hash == hash && entry->Equals(name)) return sorted_index;
2002  }
2003  } else {
2004  ASSERT(len >= valid_entries);
2005  for (int number = 0; number < valid_entries; number++) {
2006  String* entry = array->GetKey(number);
2007  uint32_t current_hash = entry->Hash();
2008  if (current_hash == hash && entry->Equals(name)) return number;
2009  }
2010  }
2011  return T::kNotFound;
2012 }
2013 
2014 
2015 template<SearchMode search_mode, typename T>
2016 int Search(T* array, String* name, int valid_entries) {
2017  if (search_mode == VALID_ENTRIES) {
2018  SLOW_ASSERT(array->IsSortedNoDuplicates(valid_entries));
2019  } else {
2020  SLOW_ASSERT(array->IsSortedNoDuplicates());
2021  }
2022 
2023  int nof = array->number_of_entries();
2024  if (nof == 0) return T::kNotFound;
2025 
2026  // Fast case: do linear search for small arrays.
2027  const int kMaxElementsForLinearSearch = 8;
2028  if ((search_mode == ALL_ENTRIES &&
2029  nof <= kMaxElementsForLinearSearch) ||
2030  (search_mode == VALID_ENTRIES &&
2031  valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2032  return LinearSearch<search_mode>(array, name, nof, valid_entries);
2033  }
2034 
2035  // Slow case: perform binary search.
2036  return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2037 }
2038 
2039 
2040 int DescriptorArray::Search(String* name, int valid_descriptors) {
2041  return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
2042 }
2043 
2044 
2045 int DescriptorArray::SearchWithCache(String* name, Map* map) {
2046  int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2047  if (number_of_own_descriptors == 0) return kNotFound;
2048 
2049  DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2050  int number = cache->Lookup(map, name);
2051 
2052  if (number == DescriptorLookupCache::kAbsent) {
2053  number = Search(name, number_of_own_descriptors);
2054  cache->Update(map, name, number);
2055  }
2056 
2057  return number;
2058 }
2059 
2060 
2062  String* name,
2063  LookupResult* result) {
2064  DescriptorArray* descriptors = this->instance_descriptors();
2065  int number = descriptors->SearchWithCache(name, this);
2066  if (number == DescriptorArray::kNotFound) return result->NotFound();
2067  result->DescriptorResult(holder, descriptors->GetDetails(number), number);
2068 }
2069 
2070 
2072  String* name,
2073  LookupResult* result) {
2074  if (HasTransitionArray()) {
2075  TransitionArray* transition_array = transitions();
2076  int number = transition_array->Search(name);
2077  if (number != TransitionArray::kNotFound) {
2078  return result->TransitionResult(holder, number);
2079  }
2080  }
2081  result->NotFound();
2082 }
2083 
2084 
2085 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2086  ASSERT(descriptor_number < number_of_descriptors());
2087  return HeapObject::RawField(
2088  reinterpret_cast<HeapObject*>(this),
2089  OffsetOfElementAt(ToKeyIndex(descriptor_number)));
2090 }
2091 
2092 
2093 String* DescriptorArray::GetKey(int descriptor_number) {
2094  ASSERT(descriptor_number < number_of_descriptors());
2095  return String::cast(get(ToKeyIndex(descriptor_number)));
2096 }
2097 
2098 
2099 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2100  return GetDetails(descriptor_number).pointer();
2101 }
2102 
2103 
2104 String* DescriptorArray::GetSortedKey(int descriptor_number) {
2105  return GetKey(GetSortedKeyIndex(descriptor_number));
2106 }
2107 
2108 
2109 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2110  PropertyDetails details = GetDetails(descriptor_index);
2111  set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2112 }
2113 
2114 
2115 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2116  ASSERT(descriptor_number < number_of_descriptors());
2117  return HeapObject::RawField(
2118  reinterpret_cast<HeapObject*>(this),
2119  OffsetOfElementAt(ToValueIndex(descriptor_number)));
2120 }
2121 
2122 
2123 Object* DescriptorArray::GetValue(int descriptor_number) {
2124  ASSERT(descriptor_number < number_of_descriptors());
2125  return get(ToValueIndex(descriptor_number));
2126 }
2127 
2128 
2129 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
2130  ASSERT(descriptor_number < number_of_descriptors());
2131  Object* details = get(ToDetailsIndex(descriptor_number));
2132  return PropertyDetails(Smi::cast(details));
2133 }
2134 
2135 
2136 PropertyType DescriptorArray::GetType(int descriptor_number) {
2137  return GetDetails(descriptor_number).type();
2138 }
2139 
2140 
2141 int DescriptorArray::GetFieldIndex(int descriptor_number) {
2142  return Descriptor::IndexFromValue(GetValue(descriptor_number));
2143 }
2144 
2145 
2147  return JSFunction::cast(GetValue(descriptor_number));
2148 }
2149 
2150 
2152  ASSERT(GetType(descriptor_number) == CALLBACKS);
2153  return GetValue(descriptor_number);
2154 }
2155 
2156 
2158  ASSERT(GetType(descriptor_number) == CALLBACKS);
2159  Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
2160  return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
2161 }
2162 
2163 
2164 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2165  desc->Init(GetKey(descriptor_number),
2166  GetValue(descriptor_number),
2167  GetDetails(descriptor_number));
2168 }
2169 
2170 
2171 void DescriptorArray::Set(int descriptor_number,
2172  Descriptor* desc,
2173  const WhitenessWitness&) {
2174  // Range check.
2175  ASSERT(descriptor_number < number_of_descriptors());
2176  ASSERT(desc->GetDetails().descriptor_index() <=
2178  ASSERT(desc->GetDetails().descriptor_index() > 0);
2179 
2181  ToKeyIndex(descriptor_number),
2182  desc->GetKey());
2184  ToValueIndex(descriptor_number),
2185  desc->GetValue());
2187  ToDetailsIndex(descriptor_number),
2188  desc->GetDetails().AsSmi());
2189 }
2190 
2191 
2192 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
2193  // Range check.
2194  ASSERT(descriptor_number < number_of_descriptors());
2195  ASSERT(desc->GetDetails().descriptor_index() <=
2197  ASSERT(desc->GetDetails().descriptor_index() > 0);
2198 
2199  set(ToKeyIndex(descriptor_number), desc->GetKey());
2200  set(ToValueIndex(descriptor_number), desc->GetValue());
2201  set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
2202 }
2203 
2204 
2205 void DescriptorArray::Append(Descriptor* desc,
2206  const WhitenessWitness& witness) {
2207  int descriptor_number = number_of_descriptors();
2208  int enumeration_index = descriptor_number + 1;
2209  SetNumberOfDescriptors(descriptor_number + 1);
2210  desc->SetEnumerationIndex(enumeration_index);
2211  Set(descriptor_number, desc, witness);
2212 
2213  uint32_t hash = desc->GetKey()->Hash();
2214 
2215  int insertion;
2216 
2217  for (insertion = descriptor_number; insertion > 0; --insertion) {
2218  String* key = GetSortedKey(insertion - 1);
2219  if (key->Hash() <= hash) break;
2220  SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2221  }
2222 
2223  SetSortedKey(insertion, descriptor_number);
2224 }
2225 
2226 
2227 void DescriptorArray::Append(Descriptor* desc) {
2228  int descriptor_number = number_of_descriptors();
2229  int enumeration_index = descriptor_number + 1;
2230  SetNumberOfDescriptors(descriptor_number + 1);
2231  desc->SetEnumerationIndex(enumeration_index);
2232  Set(descriptor_number, desc);
2233 
2234  uint32_t hash = desc->GetKey()->Hash();
2235 
2236  int insertion;
2237 
2238  for (insertion = descriptor_number; insertion > 0; --insertion) {
2239  String* key = GetSortedKey(insertion - 1);
2240  if (key->Hash() <= hash) break;
2241  SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2242  }
2243 
2244  SetSortedKey(insertion, descriptor_number);
2245 }
2246 
2247 
2248 void DescriptorArray::SwapSortedKeys(int first, int second) {
2249  int first_key = GetSortedKeyIndex(first);
2250  SetSortedKey(first, GetSortedKeyIndex(second));
2251  SetSortedKey(second, first_key);
2252 }
2253 
2254 
2256  : marking_(array->GetHeap()->incremental_marking()) {
2257  marking_->EnterNoMarkingScope();
2258  ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
2259 }
2260 
2261 
2263  marking_->LeaveNoMarkingScope();
2264 }
2265 
2266 
2267 template<typename Shape, typename Key>
2268 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
2269  const int kMinCapacity = 32;
2270  int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2271  if (capacity < kMinCapacity) {
2272  capacity = kMinCapacity; // Guarantee min capacity.
2273  }
2274  return capacity;
2275 }
2276 
2277 
2278 template<typename Shape, typename Key>
2280  return FindEntry(GetIsolate(), key);
2281 }
2282 
2283 
2284 // Find entry for key otherwise return kNotFound.
2285 template<typename Shape, typename Key>
2287  uint32_t capacity = Capacity();
2288  uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
2289  uint32_t count = 1;
2290  // EnsureCapacity will guarantee the hash table is never full.
2291  while (true) {
2292  Object* element = KeyAt(entry);
2293  // Empty entry.
2294  if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2295  if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2296  Shape::IsMatch(key, element)) return entry;
2297  entry = NextProbe(entry, count++, capacity);
2298  }
2299  return kNotFound;
2300 }
2301 
2302 
2304  Object* max_index_object = get(kMaxNumberKeyIndex);
2305  if (!max_index_object->IsSmi()) return false;
2306  return 0 !=
2307  (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2308 }
2309 
2311  ASSERT(!requires_slow_elements());
2312  Object* max_index_object = get(kMaxNumberKeyIndex);
2313  if (!max_index_object->IsSmi()) return 0;
2314  uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2315  return value >> kRequiresSlowElementsTagSize;
2316 }
2317 
2319  set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2320 }
2321 
2322 
2323 // ------------------------------------
2324 // Cast operations
2325 
2326 
2355 CAST_ACCESSOR(Oddball)
2356 CAST_ACCESSOR(JSGlobalPropertyCell)
2357 CAST_ACCESSOR(SharedFunctionInfo)
2363 CAST_ACCESSOR(JSBuiltinsObject)
2386 
2387 
2388 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2390 #undef MAKE_STRUCT_CAST
2391 
2392 
2393 template <typename Shape, typename Key>
2395  ASSERT(obj->IsHashTable());
2396  return reinterpret_cast<HashTable*>(obj);
2397 }
2398 
2399 
2401 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2402 
2404 
2405 
2406 uint32_t String::hash_field() {
2407  return READ_UINT32_FIELD(this, kHashFieldOffset);
2408 }
2409 
2410 
2411 void String::set_hash_field(uint32_t value) {
2412  WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2413 #if V8_HOST_ARCH_64_BIT
2414  WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2415 #endif
2416 }
2417 
2418 
2419 bool String::Equals(String* other) {
2420  if (other == this) return true;
2421  if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
2422  return false;
2423  }
2424  return SlowEquals(other);
2425 }
2426 
2427 
2428 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2429  if (!StringShape(this).IsCons()) return this;
2430  ConsString* cons = ConsString::cast(this);
2431  if (cons->IsFlat()) return cons->first();
2432  return SlowTryFlatten(pretenure);
2433 }
2434 
2435 
2437  MaybeObject* flat = TryFlatten(pretenure);
2438  Object* successfully_flattened;
2439  if (!flat->ToObject(&successfully_flattened)) return this;
2440  return String::cast(successfully_flattened);
2441 }
2442 
2443 
2444 uint16_t String::Get(int index) {
2445  ASSERT(index >= 0 && index < length());
2446  switch (StringShape(this).full_representation_tag()) {
2448  return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
2450  return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2453  return ConsString::cast(this)->ConsStringGet(index);
2460  return SlicedString::cast(this)->SlicedStringGet(index);
2461  default:
2462  break;
2463  }
2464 
2465  UNREACHABLE();
2466  return 0;
2467 }
2468 
2469 
2470 void String::Set(int index, uint16_t value) {
2471  ASSERT(index >= 0 && index < length());
2472  ASSERT(StringShape(this).IsSequential());
2473 
2474  return this->IsAsciiRepresentation()
2475  ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
2476  : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2477 }
2478 
2479 
2481  if (!StringShape(this).IsCons()) return true;
2482  return ConsString::cast(this)->second()->length() == 0;
2483 }
2484 
2485 
2487  // Giving direct access to underlying string only makes sense if the
2488  // wrapping string is already flattened.
2489  ASSERT(this->IsFlat());
2490  ASSERT(StringShape(this).IsIndirect());
2492  const int kUnderlyingOffset = SlicedString::kParentOffset;
2493  return String::cast(READ_FIELD(this, kUnderlyingOffset));
2494 }
2495 
2496 
2498  ASSERT(index >= 0 && index < length());
2499  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2500 }
2501 
2502 
2504  ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2505  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2506  static_cast<byte>(value));
2507 }
2508 
2509 
2511  return FIELD_ADDR(this, kHeaderSize);
2512 }
2513 
2514 
2516  return reinterpret_cast<char*>(GetCharsAddress());
2517 }
2518 
2519 
2521  return FIELD_ADDR(this, kHeaderSize);
2522 }
2523 
2524 
2526  return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2527 }
2528 
2529 
2531  ASSERT(index >= 0 && index < length());
2532  return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2533 }
2534 
2535 
2537  ASSERT(index >= 0 && index < length());
2538  WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2539 }
2540 
2541 
2543  return SizeFor(length());
2544 }
2545 
2546 
2548  return SizeFor(length());
2549 }
2550 
2551 
2553  return String::cast(READ_FIELD(this, kParentOffset));
2554 }
2555 
2556 
2558  ASSERT(parent->IsSeqString() || parent->IsExternalString());
2559  WRITE_FIELD(this, kParentOffset, parent);
2560  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
2561 }
2562 
2563 
2564 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
2565 
2566 
2567 String* ConsString::first() {
2568  return String::cast(READ_FIELD(this, kFirstOffset));
2569 }
2570 
2571 
2573  return READ_FIELD(this, kFirstOffset);
2574 }
2575 
2576 
2578  WRITE_FIELD(this, kFirstOffset, value);
2579  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
2580 }
2581 
2582 
2584  return String::cast(READ_FIELD(this, kSecondOffset));
2585 }
2586 
2587 
2589  return READ_FIELD(this, kSecondOffset);
2590 }
2591 
2592 
2594  WRITE_FIELD(this, kSecondOffset, value);
2595  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
2596 }
2597 
2598 
2600  InstanceType type = map()->instance_type();
2602 }
2603 
2604 
2606  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2607 }
2608 
2609 
2611  if (is_short()) return;
2612  const char** data_field =
2613  reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
2614  *data_field = resource()->data();
2615 }
2616 
2617 
2619  const ExternalAsciiString::Resource* resource) {
2620  *reinterpret_cast<const Resource**>(
2621  FIELD_ADDR(this, kResourceOffset)) = resource;
2622  if (resource != NULL) update_data_cache();
2623 }
2624 
2625 
2627  return resource()->data();
2628 }
2629 
2630 
2632  ASSERT(index >= 0 && index < length());
2633  return GetChars()[index];
2634 }
2635 
2636 
2638  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2639 }
2640 
2641 
2643  if (is_short()) return;
2644  const uint16_t** data_field =
2645  reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
2646  *data_field = resource()->data();
2647 }
2648 
2649 
2651  const ExternalTwoByteString::Resource* resource) {
2652  *reinterpret_cast<const Resource**>(
2653  FIELD_ADDR(this, kResourceOffset)) = resource;
2654  if (resource != NULL) update_data_cache();
2655 }
2656 
2657 
2659  return resource()->data();
2660 }
2661 
2662 
2664  ASSERT(index >= 0 && index < length());
2665  return GetChars()[index];
2666 }
2667 
2668 
2670  unsigned start) {
2671  return GetChars() + start;
2672 }
2673 
2674 
2676  set_finger_index(kEntriesIndex);
2677  set_size(kEntriesIndex);
2678 }
2679 
2680 
2682  int cache_size = size();
2683  Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2684  MemsetPointer(entries_start,
2685  GetHeap()->the_hole_value(),
2686  cache_size - kEntriesIndex);
2687  MakeZeroSize();
2688 }
2689 
2690 
2692  return Smi::cast(get(kCacheSizeIndex))->value();
2693 }
2694 
2695 
2697  set(kCacheSizeIndex, Smi::FromInt(size));
2698 }
2699 
2700 
2702  return Smi::cast(get(kFingerIndex))->value();
2703 }
2704 
2705 
2707  set(kFingerIndex, Smi::FromInt(finger_index));
2708 }
2709 
2710 
2711 byte ByteArray::get(int index) {
2712  ASSERT(index >= 0 && index < this->length());
2713  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2714 }
2715 
2716 
2717 void ByteArray::set(int index, byte value) {
2718  ASSERT(index >= 0 && index < this->length());
2719  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2720 }
2721 
2722 
2723 int ByteArray::get_int(int index) {
2724  ASSERT(index >= 0 && (index * kIntSize) < this->length());
2725  return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2726 }
2727 
2728 
2730  ASSERT_TAG_ALIGNED(address);
2731  return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2732 }
2733 
2734 
2736  return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2737 }
2738 
2739 
2741  return reinterpret_cast<uint8_t*>(external_pointer());
2742 }
2743 
2744 
2746  ASSERT((index >= 0) && (index < this->length()));
2747  uint8_t* ptr = external_pixel_pointer();
2748  return ptr[index];
2749 }
2750 
2751 
2752 MaybeObject* ExternalPixelArray::get(int index) {
2753  return Smi::FromInt(static_cast<int>(get_scalar(index)));
2754 }
2755 
2756 
2757 void ExternalPixelArray::set(int index, uint8_t value) {
2758  ASSERT((index >= 0) && (index < this->length()));
2759  uint8_t* ptr = external_pixel_pointer();
2760  ptr[index] = value;
2761 }
2762 
2763 
2764 void* ExternalArray::external_pointer() {
2765  intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2766  return reinterpret_cast<void*>(ptr);
2767 }
2768 
2769 
2770 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2771  intptr_t ptr = reinterpret_cast<intptr_t>(value);
2772  WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2773 }
2774 
2775 
2777  ASSERT((index >= 0) && (index < this->length()));
2778  int8_t* ptr = static_cast<int8_t*>(external_pointer());
2779  return ptr[index];
2780 }
2781 
2782 
2783 MaybeObject* ExternalByteArray::get(int index) {
2784  return Smi::FromInt(static_cast<int>(get_scalar(index)));
2785 }
2786 
2787 
2788 void ExternalByteArray::set(int index, int8_t value) {
2789  ASSERT((index >= 0) && (index < this->length()));
2790  int8_t* ptr = static_cast<int8_t*>(external_pointer());
2791  ptr[index] = value;
2792 }
2793 
2794 
2796  ASSERT((index >= 0) && (index < this->length()));
2797  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2798  return ptr[index];
2799 }
2800 
2801 
2802 MaybeObject* ExternalUnsignedByteArray::get(int index) {
2803  return Smi::FromInt(static_cast<int>(get_scalar(index)));
2804 }
2805 
2806 
2807 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2808  ASSERT((index >= 0) && (index < this->length()));
2809  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2810  ptr[index] = value;
2811 }
2812 
2813 
2815  ASSERT((index >= 0) && (index < this->length()));
2816  int16_t* ptr = static_cast<int16_t*>(external_pointer());
2817  return ptr[index];
2818 }
2819 
2820 
2821 MaybeObject* ExternalShortArray::get(int index) {
2822  return Smi::FromInt(static_cast<int>(get_scalar(index)));
2823 }
2824 
2825 
2826 void ExternalShortArray::set(int index, int16_t value) {
2827  ASSERT((index >= 0) && (index < this->length()));
2828  int16_t* ptr = static_cast<int16_t*>(external_pointer());
2829  ptr[index] = value;
2830 }
2831 
2832 
2834  ASSERT((index >= 0) && (index < this->length()));
2835  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2836  return ptr[index];
2837 }
2838 
2839 
2840 MaybeObject* ExternalUnsignedShortArray::get(int index) {
2841  return Smi::FromInt(static_cast<int>(get_scalar(index)));
2842 }
2843 
2844 
2846  ASSERT((index >= 0) && (index < this->length()));
2847  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2848  ptr[index] = value;
2849 }
2850 
2851 
2853  ASSERT((index >= 0) && (index < this->length()));
2854  int32_t* ptr = static_cast<int32_t*>(external_pointer());
2855  return ptr[index];
2856 }
2857 
2858 
2859 MaybeObject* ExternalIntArray::get(int index) {
2860  return GetHeap()->NumberFromInt32(get_scalar(index));
2861 }
2862 
2863 
2864 void ExternalIntArray::set(int index, int32_t value) {
2865  ASSERT((index >= 0) && (index < this->length()));
2866  int32_t* ptr = static_cast<int32_t*>(external_pointer());
2867  ptr[index] = value;
2868 }
2869 
2870 
2872  ASSERT((index >= 0) && (index < this->length()));
2873  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2874  return ptr[index];
2875 }
2876 
2877 
2878 MaybeObject* ExternalUnsignedIntArray::get(int index) {
2879  return GetHeap()->NumberFromUint32(get_scalar(index));
2880 }
2881 
2882 
2883 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2884  ASSERT((index >= 0) && (index < this->length()));
2885  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2886  ptr[index] = value;
2887 }
2888 
2889 
2891  ASSERT((index >= 0) && (index < this->length()));
2892  float* ptr = static_cast<float*>(external_pointer());
2893  return ptr[index];
2894 }
2895 
2896 
2897 MaybeObject* ExternalFloatArray::get(int index) {
2898  return GetHeap()->NumberFromDouble(get_scalar(index));
2899 }
2900 
2901 
2902 void ExternalFloatArray::set(int index, float value) {
2903  ASSERT((index >= 0) && (index < this->length()));
2904  float* ptr = static_cast<float*>(external_pointer());
2905  ptr[index] = value;
2906 }
2907 
2908 
2910  ASSERT((index >= 0) && (index < this->length()));
2911  double* ptr = static_cast<double*>(external_pointer());
2912  return ptr[index];
2913 }
2914 
2915 
2916 MaybeObject* ExternalDoubleArray::get(int index) {
2917  return GetHeap()->NumberFromDouble(get_scalar(index));
2918 }
2919 
2920 
2921 void ExternalDoubleArray::set(int index, double value) {
2922  ASSERT((index >= 0) && (index < this->length()));
2923  double* ptr = static_cast<double*>(external_pointer());
2924  ptr[index] = value;
2925 }
2926 
2927 
2929  return READ_BYTE_FIELD(this, kVisitorIdOffset);
2930 }
2931 
2932 
2933 void Map::set_visitor_id(int id) {
2934  ASSERT(0 <= id && id < 256);
2935  WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2936 }
2937 
2938 
2940  return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2941 }
2942 
2943 
2945  return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2946 }
2947 
2948 
2950  return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2951 }
2952 
2953 
2955  int instance_size = map->instance_size();
2956  if (instance_size != kVariableSizeSentinel) return instance_size;
2957  // We can ignore the "symbol" bit becase it is only set for symbols
2958  // and implies a string type.
2959  int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2960  // Only inline the most frequent cases.
2961  if (instance_type == FIXED_ARRAY_TYPE) {
2962  return FixedArray::BodyDescriptor::SizeOf(map, this);
2963  }
2964  if (instance_type == ASCII_STRING_TYPE) {
2965  return SeqAsciiString::SizeFor(
2966  reinterpret_cast<SeqAsciiString*>(this)->length());
2967  }
2968  if (instance_type == BYTE_ARRAY_TYPE) {
2969  return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2970  }
2971  if (instance_type == FREE_SPACE_TYPE) {
2972  return reinterpret_cast<FreeSpace*>(this)->size();
2973  }
2974  if (instance_type == STRING_TYPE) {
2976  reinterpret_cast<SeqTwoByteString*>(this)->length());
2977  }
2978  if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2980  reinterpret_cast<FixedDoubleArray*>(this)->length());
2981  }
2982  ASSERT(instance_type == CODE_TYPE);
2983  return reinterpret_cast<Code*>(this)->CodeSize();
2984 }
2985 
2986 
2987 void Map::set_instance_size(int value) {
2988  ASSERT_EQ(0, value & (kPointerSize - 1));
2989  value >>= kPointerSizeLog2;
2990  ASSERT(0 <= value && value < 256);
2991  WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2992 }
2993 
2994 
2996  ASSERT(0 <= value && value < 256);
2997  WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2998 }
2999 
3000 
3002  ASSERT(0 <= value && value < 256);
3003  WRITE_BYTE_FIELD(this,
3004  kPreAllocatedPropertyFieldsOffset,
3005  static_cast<byte>(value));
3006 }
3007 
3008 
3010  return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
3011 }
3012 
3013 
3015  WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
3016 }
3017 
3018 
3020  return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
3021 }
3022 
3023 
3025  WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
3026 }
3027 
3028 
3030  return READ_BYTE_FIELD(this, kBitFieldOffset);
3031 }
3032 
3033 
3035  WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
3036 }
3037 
3038 
3040  return READ_BYTE_FIELD(this, kBitField2Offset);
3041 }
3042 
3043 
3045  WRITE_BYTE_FIELD(this, kBitField2Offset, value);
3046 }
3047 
3048 
3050  if (value) {
3051  set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
3052  } else {
3053  set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
3054  }
3055 }
3056 
3057 
3059  return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
3060 }
3061 
3062 
3064  set_bit_field3(FunctionWithPrototype::update(bit_field3(), value));
3065 }
3066 
3067 
3069  return FunctionWithPrototype::decode(bit_field3());
3070 }
3071 
3072 
3073 void Map::set_is_access_check_needed(bool access_check_needed) {
3074  if (access_check_needed) {
3075  set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
3076  } else {
3077  set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
3078  }
3079 }
3080 
3081 
3083  return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
3084 }
3085 
3086 
3087 void Map::set_is_extensible(bool value) {
3088  if (value) {
3089  set_bit_field2(bit_field2() | (1 << kIsExtensible));
3090  } else {
3091  set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
3092  }
3093 }
3094 
3096  return ((1 << kIsExtensible) & bit_field2()) != 0;
3097 }
3098 
3099 
3101  if (value) {
3102  set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
3103  } else {
3104  set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
3105  }
3106 }
3107 
3109  return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
3110 }
3111 
3112 
3113 void Map::set_is_shared(bool value) {
3114  set_bit_field3(IsShared::update(bit_field3(), value));
3115 }
3116 
3117 
3119  return IsShared::decode(bit_field3());
3120 }
3121 
3122 
3123 void Map::set_dictionary_map(bool value) {
3124  set_bit_field3(DictionaryMap::update(bit_field3(), value));
3125 }
3126 
3127 
3129  return DictionaryMap::decode(bit_field3());
3130 }
3131 
3132 
3134  return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
3135 }
3136 
3137 
3139  return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
3140 }
3141 
3142 
3143 void Map::set_owns_descriptors(bool is_shared) {
3144  set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared));
3145 }
3146 
3147 
3149  return OwnsDescriptors::decode(bit_field3());
3150 }
3151 
3152 
3154  STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
3155  // Make sure that all call stubs have an arguments count.
3156  ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
3157  ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
3158  ExtractArgumentsCountFromFlags(flags) >= 0);
3159  WRITE_INT_FIELD(this, kFlagsOffset, flags);
3160 }
3161 
3162 
3164  return ExtractKindFromFlags(flags());
3165 }
3166 
3167 
3169  InlineCacheState result = ExtractICStateFromFlags(flags());
3170  // Only allow uninitialized or debugger states for non-IC code
3171  // objects. This is used in the debugger to determine whether or not
3172  // a call to code object has been replaced with a debug break call.
3173  ASSERT(is_inline_cache_stub() ||
3174  result == UNINITIALIZED ||
3175  result == DEBUG_BREAK ||
3176  result == DEBUG_PREPARE_STEP_IN);
3177  return result;
3178 }
3179 
3180 
3182  ASSERT(is_inline_cache_stub());
3183  return ExtractExtraICStateFromFlags(flags());
3184 }
3185 
3186 
3188  return ExtractTypeFromFlags(flags());
3189 }
3190 
3191 
3193  ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
3194  return ExtractArgumentsCountFromFlags(flags());
3195 }
3196 
3197 
3199  ASSERT(kind() == STUB ||
3200  kind() == UNARY_OP_IC ||
3201  kind() == BINARY_OP_IC ||
3202  kind() == COMPARE_IC ||
3203  kind() == TO_BOOLEAN_IC);
3204  return StubMajorKeyField::decode(
3205  READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
3206 }
3207 
3208 
3209 void Code::set_major_key(int major) {
3210  ASSERT(kind() == STUB ||
3211  kind() == UNARY_OP_IC ||
3212  kind() == BINARY_OP_IC ||
3213  kind() == COMPARE_IC ||
3214  kind() == TO_BOOLEAN_IC);
3215  ASSERT(0 <= major && major < 256);
3216  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
3217  int updated = StubMajorKeyField::update(previous, major);
3218  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
3219 }
3220 
3221 
3223  return kind() == STUB && IsPregeneratedField::decode(flags());
3224 }
3225 
3226 
3227 void Code::set_is_pregenerated(bool value) {
3228  ASSERT(kind() == STUB);
3229  Flags f = flags();
3230  f = static_cast<Flags>(IsPregeneratedField::update(f, value));
3231  set_flags(f);
3232 }
3233 
3234 
3236  ASSERT_EQ(FUNCTION, kind());
3237  return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
3238 }
3239 
3240 
3241 void Code::set_optimizable(bool value) {
3242  ASSERT_EQ(FUNCTION, kind());
3243  WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
3244 }
3245 
3246 
3248  ASSERT_EQ(FUNCTION, kind());
3249  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3250  return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
3251 }
3252 
3253 
3255  ASSERT_EQ(FUNCTION, kind());
3256  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3257  flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
3258  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3259 }
3260 
3261 
3263  ASSERT_EQ(FUNCTION, kind());
3264  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3265  return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
3266 }
3267 
3268 
3270  ASSERT_EQ(FUNCTION, kind());
3271  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3272  flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
3273  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3274 }
3275 
3276 
3278  ASSERT_EQ(FUNCTION, kind());
3279  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3280  return FullCodeFlagsIsCompiledOptimizable::decode(flags);
3281 }
3282 
3283 
3285  ASSERT_EQ(FUNCTION, kind());
3286  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3287  flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
3288  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3289 }
3290 
3291 
3293  ASSERT_EQ(FUNCTION, kind());
3294  return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
3295 }
3296 
3297 
3299  ASSERT_EQ(FUNCTION, kind());
3300  ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
3301  WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
3302 }
3303 
3304 
3306  ASSERT_EQ(FUNCTION, kind());
3307  return READ_BYTE_FIELD(this, kProfilerTicksOffset);
3308 }
3309 
3310 
3311 void Code::set_profiler_ticks(int ticks) {
3312  ASSERT_EQ(FUNCTION, kind());
3313  ASSERT(ticks < 256);
3314  WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
3315 }
3316 
3317 
3318 unsigned Code::stack_slots() {
3319  ASSERT(kind() == OPTIMIZED_FUNCTION);
3320  return StackSlotsField::decode(
3321  READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
3322 }
3323 
3324 
3325 void Code::set_stack_slots(unsigned slots) {
3326  CHECK(slots <= (1 << kStackSlotsBitCount));
3327  ASSERT(kind() == OPTIMIZED_FUNCTION);
3328  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
3329  int updated = StackSlotsField::update(previous, slots);
3330  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
3331 }
3332 
3333 
3335  ASSERT(kind() == OPTIMIZED_FUNCTION);
3336  return SafepointTableOffsetField::decode(
3337  READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
3338 }
3339 
3340 
3341 void Code::set_safepoint_table_offset(unsigned offset) {
3342  CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
3343  ASSERT(kind() == OPTIMIZED_FUNCTION);
3344  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3345  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
3346  int updated = SafepointTableOffsetField::update(previous, offset);
3347  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
3348 }
3349 
3350 
3352  ASSERT_EQ(FUNCTION, kind());
3353  return StackCheckTableOffsetField::decode(
3354  READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
3355 }
3356 
3357 
3358 void Code::set_stack_check_table_offset(unsigned offset) {
3359  ASSERT_EQ(FUNCTION, kind());
3360  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3361  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
3362  int updated = StackCheckTableOffsetField::update(previous, offset);
3363  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
3364 }
3365 
3366 
3368  ASSERT(is_call_stub() || is_keyed_call_stub());
3369  byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
3370  return static_cast<CheckType>(type);
3371 }
3372 
3373 
3375  ASSERT(is_call_stub() || is_keyed_call_stub());
3376  WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
3377 }
3378 
3379 
3381  ASSERT(is_unary_op_stub());
3382  return UnaryOpTypeField::decode(
3383  READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
3384 }
3385 
3386 
3388  ASSERT(is_unary_op_stub());
3389  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
3390  int updated = UnaryOpTypeField::update(previous, value);
3391  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
3392 }
3393 
3394 
3396  ASSERT(is_binary_op_stub());
3397  return BinaryOpTypeField::decode(
3398  READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
3399 }
3400 
3401 
3403  ASSERT(is_binary_op_stub());
3404  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
3405  int updated = BinaryOpTypeField::update(previous, value);
3406  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
3407 }
3408 
3409 
3411  ASSERT(is_binary_op_stub());
3412  return BinaryOpResultTypeField::decode(
3413  READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
3414 }
3415 
3416 
3418  ASSERT(is_binary_op_stub());
3419  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
3420  int updated = BinaryOpResultTypeField::update(previous, value);
3421  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
3422 }
3423 
3424 
3426  ASSERT(is_compare_ic_stub());
3427  return CompareStateField::decode(
3428  READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
3429 }
3430 
3431 
3433  ASSERT(is_compare_ic_stub());
3434  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
3435  int updated = CompareStateField::update(previous, value);
3436  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
3437 }
3438 
3439 
3441  ASSERT(is_compare_ic_stub());
3442  return CompareOperationField::decode(
3443  READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
3444 }
3445 
3446 
3448  ASSERT(is_compare_ic_stub());
3449  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
3450  int updated = CompareOperationField::update(previous, value);
3451  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
3452 }
3453 
3454 
3456  ASSERT(is_to_boolean_ic_stub());
3457  return ToBooleanStateField::decode(
3458  READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
3459 }
3460 
3461 
3463  ASSERT(is_to_boolean_ic_stub());
3464  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
3465  int updated = ToBooleanStateField::update(previous, value);
3466  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
3467 }
3468 
3469 
3471  ASSERT(kind() == STUB);
3472  return HasFunctionCacheField::decode(
3473  READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
3474 }
3475 
3476 
3478  ASSERT(kind() == STUB);
3479  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
3480  int updated = HasFunctionCacheField::update(previous, flag);
3481  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
3482 }
3483 
3484 
3486  Kind kind = this->kind();
3487  return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
3488 }
3489 
3490 
3492  InlineCacheState ic_state,
3493  ExtraICState extra_ic_state,
3494  StubType type,
3495  int argc,
3496  InlineCacheHolderFlag holder) {
3497  // Extra IC state is only allowed for call IC stubs or for store IC
3498  // stubs.
3499  ASSERT(extra_ic_state == kNoExtraICState ||
3500  kind == CALL_IC ||
3501  kind == STORE_IC ||
3502  kind == KEYED_STORE_IC);
3503  // Compute the bit mask.
3504  int bits = KindField::encode(kind)
3505  | ICStateField::encode(ic_state)
3506  | TypeField::encode(type)
3507  | ExtraICStateField::encode(extra_ic_state)
3508  | (argc << kArgumentsCountShift)
3509  | CacheHolderField::encode(holder);
3510  return static_cast<Flags>(bits);
3511 }
3512 
3513 
3515  StubType type,
3516  ExtraICState extra_ic_state,
3517  InlineCacheHolderFlag holder,
3518  int argc) {
3519  return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
3520 }
3521 
3522 
3524  return KindField::decode(flags);
3525 }
3526 
3527 
3529  return ICStateField::decode(flags);
3530 }
3531 
3532 
3534  return ExtraICStateField::decode(flags);
3535 }
3536 
3537 
3539  return TypeField::decode(flags);
3540 }
3541 
3542 
3544  return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
3545 }
3546 
3547 
3549  return CacheHolderField::decode(flags);
3550 }
3551 
3552 
3554  int bits = flags & ~TypeField::kMask;
3555  return static_cast<Flags>(bits);
3556 }
3557 
3558 
3561  // GetCodeFromTargetAddress might be called when marking objects during mark
3562  // sweep. reinterpret_cast is therefore used instead of the more appropriate
3563  // Code::cast. Code::cast does not work when the object's map is
3564  // marked.
3565  Code* result = reinterpret_cast<Code*>(code);
3566  return result;
3567 }
3568 
3569 
3571  return HeapObject::
3572  FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
3573 }
3574 
3575 
3576 Object* Map::prototype() {
3577  return READ_FIELD(this, kPrototypeOffset);
3578 }
3579 
3580 
3581 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
3582  ASSERT(value->IsNull() || value->IsJSReceiver());
3583  WRITE_FIELD(this, kPrototypeOffset, value);
3584  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
3585 }
3586 
3587 
3588 // If the descriptor is using the empty transition array, install a new empty
3589 // transition array that will have place for an element transition.
3590 static MaybeObject* EnsureHasTransitionArray(Map* map) {
3591  TransitionArray* transitions;
3592  MaybeObject* maybe_transitions;
3593  if (!map->HasTransitionArray()) {
3594  maybe_transitions = TransitionArray::Allocate(0);
3595  if (!maybe_transitions->To(&transitions)) return maybe_transitions;
3596  transitions->set_back_pointer_storage(map->GetBackPointer());
3597  } else if (!map->transitions()->IsFullTransitionArray()) {
3598  maybe_transitions = map->transitions()->ExtendToFullTransitionArray();
3599  if (!maybe_transitions->To(&transitions)) return maybe_transitions;
3600  } else {
3601  return map;
3602  }
3603  map->set_transitions(transitions);
3604  return transitions;
3605 }
3606 
3607 
3609  int len = descriptors->number_of_descriptors();
3610 #ifdef DEBUG
3612 
3613  bool used_indices[DescriptorArray::kMaxNumberOfDescriptors];
3614  for (int i = 0; i < len; ++i) used_indices[i] = false;
3615 
3616  // Ensure that all enumeration indexes between 1 and length occur uniquely in
3617  // the descriptor array.
3618  for (int i = 0; i < len; ++i) {
3619  int enum_index = descriptors->GetDetails(i).descriptor_index() -
3620  PropertyDetails::kInitialIndex;
3621  ASSERT(0 <= enum_index && enum_index < len);
3622  ASSERT(!used_indices[enum_index]);
3623  used_indices[enum_index] = true;
3624  }
3625 #endif
3626 
3627  set_instance_descriptors(descriptors);
3628  SetNumberOfOwnDescriptors(len);
3629 }
3630 
3631 
3632 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
3633 SMI_ACCESSORS(Map, bit_field3, kBitField3Offset)
3634 
3635 
3636 void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) {
3637  Object* back_pointer = GetBackPointer();
3638 
3639  if (Heap::ShouldZapGarbage() && HasTransitionArray()) {
3640  ZapTransitions();
3641  }
3642 
3643  WRITE_FIELD(this, kTransitionsOrBackPointerOffset, back_pointer);
3645  heap, this, kTransitionsOrBackPointerOffset, back_pointer, mode);
3646 }
3647 
3648 
3649 void Map::AppendDescriptor(Descriptor* desc,
3650  const DescriptorArray::WhitenessWitness& witness) {
3651  DescriptorArray* descriptors = instance_descriptors();
3652  int number_of_own_descriptors = NumberOfOwnDescriptors();
3653  ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
3654  descriptors->Append(desc, witness);
3655  SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
3656 }
3657 
3658 
3660  Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
3661  if (object->IsDescriptorArray()) {
3662  return TransitionArray::cast(object)->back_pointer_storage();
3663  } else {
3664  ASSERT(object->IsMap() || object->IsUndefined());
3665  return object;
3666  }
3667 }
3668 
3669 
3671  return HasTransitionArray() && transitions()->HasElementsTransition();
3672 }
3673 
3674 
3676  Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
3677  return object->IsTransitionArray();
3678 }
3679 
3680 
3682  return transitions()->elements_transition();
3683 }
3684 
3685 
3687  if (!HasTransitionArray()) return true;
3688  return FixedArray::SizeFor(transitions()->length() +
3691 }
3692 
3693 
3694 MaybeObject* Map::AddTransition(String* key,
3695  Map* target,
3697  if (HasTransitionArray()) return transitions()->CopyInsert(key, target);
3698  return TransitionArray::NewWith(flag, key, target, GetBackPointer());
3699 }
3700 
3701 
3702 void Map::SetTransition(int transition_index, Map* target) {
3703  transitions()->SetTarget(transition_index, target);
3704 }
3705 
3706 
3707 Map* Map::GetTransition(int transition_index) {
3708  return transitions()->GetTarget(transition_index);
3709 }
3710 
3711 
3712 MaybeObject* Map::set_elements_transition_map(Map* transitioned_map) {
3713  MaybeObject* allow_elements = EnsureHasTransitionArray(this);
3714  if (allow_elements->IsFailure()) return allow_elements;
3715  transitions()->set_elements_transition(transitioned_map);
3716  return this;
3717 }
3718 
3719 
3721  if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
3722  if (!transitions()->HasPrototypeTransitions()) {
3723  return GetHeap()->empty_fixed_array();
3724  }
3725  return transitions()->GetPrototypeTransitions();
3726 }
3727 
3728 
3729 MaybeObject* Map::SetPrototypeTransitions(FixedArray* proto_transitions) {
3730  MaybeObject* allow_prototype = EnsureHasTransitionArray(this);
3731  if (allow_prototype->IsFailure()) return allow_prototype;
3732 #ifdef DEBUG
3733  if (HasPrototypeTransitions()) {
3734  ASSERT(GetPrototypeTransitions() != proto_transitions);
3735  ZapPrototypeTransitions();
3736  }
3737 #endif
3738  transitions()->SetPrototypeTransitions(proto_transitions);
3739  return this;
3740 }
3741 
3742 
3744  return HasTransitionArray() && transitions()->HasPrototypeTransitions();
3745 }
3746 
3747 
3748 TransitionArray* Map::transitions() {
3749  ASSERT(HasTransitionArray());
3750  Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
3751  return TransitionArray::cast(object);
3752 }
3753 
3754 
3755 void Map::set_transitions(TransitionArray* transition_array,
3756  WriteBarrierMode mode) {
3757  // In release mode, only run this code if verify_heap is on.
3758  if (Heap::ShouldZapGarbage() && HasTransitionArray()) {
3759  CHECK(transitions() != transition_array);
3760  ZapTransitions();
3761  }
3762 
3763  WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
3765  GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
3766 }
3767 
3768 
3769 void Map::init_back_pointer(Object* undefined) {
3770  ASSERT(undefined->IsUndefined());
3771  WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
3772 }
3773 
3774 
3776  ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
3777  ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
3778  (value->IsMap() && GetBackPointer()->IsUndefined()));
3779  Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
3780  if (object->IsTransitionArray()) {
3782  } else {
3783  WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
3785  GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
3786  }
3787 }
3788 
3789 
3790 // Can either be Smi (no transitions), normal transition array, or a transition
3791 // array with the header overwritten as a Smi (thus iterating).
3793  Object* object = *HeapObject::RawField(this,
3795  TransitionArray* transition_array = static_cast<TransitionArray*>(object);
3796  return transition_array;
3797 }
3798 
3799 
3801  ASSERT(HasTransitionArray());
3802  ASSERT(unchecked_transition_array()->HasPrototypeTransitions());
3803  return unchecked_transition_array()->UncheckedPrototypeTransitions();
3804 }
3805 
3806 
3807 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
3808 ACCESSORS(Map, constructor, Object, kConstructorOffset)
3809 
3810 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
3811 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
3812 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
3813 
3814 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
3815 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
3816 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3817 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
3818 
3819 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
3820 
3821 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
3822 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3823 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
3824 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
3825 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
3826 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
3827  kExpectedReceiverTypeOffset)
3828 
3829 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
3830 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
3831 
3832 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
3833 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
3834 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
3835 
3836 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
3837 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
3838 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
3839 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
3840 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
3841 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
3842 
3843 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
3844 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
3845 
3846 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
3847 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
3848 
3849 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
3850 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
3851 ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
3852  kPropertyAccessorsOffset)
3853 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
3854  kPrototypeTemplateOffset)
3855 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
3856 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
3857  kNamedPropertyHandlerOffset)
3858 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
3859  kIndexedPropertyHandlerOffset)
3860 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
3861  kInstanceTemplateOffset)
3862 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
3863 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
3864 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
3865  kInstanceCallHandlerOffset)
3866 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
3867  kAccessCheckInfoOffset)
3868 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
3869 
3870 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
3871 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
3872  kInternalFieldCountOffset)
3873 
3874 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
3875 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
3876 
3877 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
3878 
3879 ACCESSORS(Script, source, Object, kSourceOffset)
3880 ACCESSORS(Script, name, Object, kNameOffset)
3881 ACCESSORS(Script, id, Object, kIdOffset)
3882 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
3883 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
3884 ACCESSORS(Script, data, Object, kDataOffset)
3885 ACCESSORS(Script, context_data, Object, kContextOffset)
3886 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
3887 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
3888 ACCESSORS_TO_SMI(Script, compilation_type, kCompilationTypeOffset)
3889 ACCESSORS_TO_SMI(Script, compilation_state, kCompilationStateOffset)
3890 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3891 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3893  kEvalFrominstructionsOffsetOffset)
3894 
3895 #ifdef ENABLE_DEBUGGER_SUPPORT
3896 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3897 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
3898 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3899 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3900 
3901 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
3902 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
3903 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
3904 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3905 #endif
3906 
3907 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3908 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
3909  kOptimizedCodeMapOffset)
3910 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3911 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3912 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3913  kInstanceClassNameOffset)
3914 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3915 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3916 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3917 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3918 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3919  kThisPropertyAssignmentsOffset)
3920 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
3921 
3922 
3923 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3924  kHiddenPrototypeBit)
3925 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3926 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3927  kNeedsAccessCheckBit)
3928 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
3929  kReadOnlyPrototypeBit)
3930 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
3931  kIsExpressionBit)
3932 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3933  kIsTopLevelBit)
3934 BOOL_GETTER(SharedFunctionInfo,
3936  has_only_simple_this_property_assignments,
3937  kHasOnlySimpleThisPropertyAssignments)
3938 BOOL_ACCESSORS(SharedFunctionInfo,
3939  compiler_hints,
3941  kAllowLazyCompilation)
3942 BOOL_ACCESSORS(SharedFunctionInfo,
3943  compiler_hints,
3944  allows_lazy_compilation_without_context,
3945  kAllowLazyCompilationWithoutContext)
3946 BOOL_ACCESSORS(SharedFunctionInfo,
3947  compiler_hints,
3949  kUsesArguments)
3950 BOOL_ACCESSORS(SharedFunctionInfo,
3951  compiler_hints,
3952  has_duplicate_parameters,
3953  kHasDuplicateParameters)
3954 
3955 
3956 #if V8_HOST_ARCH_32_BIT
3957 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3958 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3959  kFormalParameterCountOffset)
3960 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3961  kExpectedNofPropertiesOffset)
3962 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3963 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3964  kStartPositionAndTypeOffset)
3965 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3966 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3967  kFunctionTokenPositionOffset)
3968 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3969  kCompilerHintsOffset)
3970 SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3971  kThisPropertyAssignmentsCountOffset)
3972 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3973 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
3974 SMI_ACCESSORS(SharedFunctionInfo,
3976  kStressDeoptCounterOffset)
3977 #else
3978 
3979 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
3980  STATIC_ASSERT(holder::offset % kPointerSize == 0); \
3981  int holder::name() { \
3982  int value = READ_INT_FIELD(this, offset); \
3983  ASSERT(kHeapObjectTag == 1); \
3984  ASSERT((value & kHeapObjectTag) == 0); \
3985  return value >> 1; \
3986  } \
3987  void holder::set_##name(int value) { \
3988  ASSERT(kHeapObjectTag == 1); \
3989  ASSERT((value & 0xC0000000) == 0xC0000000 || \
3990  (value & 0xC0000000) == 0x000000000); \
3991  WRITE_INT_FIELD(this, \
3992  offset, \
3993  (value << 1) & ~kHeapObjectTag); \
3994  }
3995 
3996 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
3997  STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
3998  INT_ACCESSORS(holder, name, offset)
3999 
4000 
4001 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
4002 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
4003  formal_parameter_count,
4004  kFormalParameterCountOffset)
4005 
4006 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
4007  expected_nof_properties,
4008  kExpectedNofPropertiesOffset)
4009 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
4010 
4011 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
4012 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
4013  start_position_and_type,
4014  kStartPositionAndTypeOffset)
4015 
4016 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
4017  function_token_position,
4018  kFunctionTokenPositionOffset)
4019 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
4020  compiler_hints,
4021  kCompilerHintsOffset)
4022 
4023 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
4024  this_property_assignments_count,
4025  kThisPropertyAssignmentsCountOffset)
4026 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
4027 
4028 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, counters, kCountersOffset)
4029 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
4030  stress_deopt_counter,
4031  kStressDeoptCounterOffset)
4032 #endif
4033 
4034 
4036  return READ_BYTE_FIELD(this, kConstructionCountOffset);
4037 }
4038 
4039 
4041  ASSERT(0 <= value && value < 256);
4042  WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
4043 }
4044 
4045 
4046 BOOL_ACCESSORS(SharedFunctionInfo,
4047  compiler_hints,
4048  live_objects_may_exist,
4049  kLiveObjectsMayExist)
4050 
4051 
4052 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
4053  return initial_map() != GetHeap()->undefined_value();
4054 }
4055 
4056 
4057 BOOL_GETTER(SharedFunctionInfo,
4058  compiler_hints,
4059  optimization_disabled,
4060  kOptimizationDisabled)
4061 
4062 
4063 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
4064  set_compiler_hints(BooleanBit::set(compiler_hints(),
4065  kOptimizationDisabled,
4066  disable));
4067  // If disabling optimizations we reflect that in the code object so
4068  // it will not be counted as optimizable code.
4069  if ((code()->kind() == Code::FUNCTION) && disable) {
4070  code()->set_optimizable(false);
4071  }
4072 }
4073 
4074 
4076  if (code()->kind() != Code::FUNCTION) return 0;
4077  return code()->profiler_ticks();
4078 }
4079 
4080 
4082  int hints = compiler_hints();
4083  if (BooleanBit::get(hints, kExtendedModeFunction)) {
4084  ASSERT(BooleanBit::get(hints, kStrictModeFunction));
4085  return EXTENDED_MODE;
4086  }
4087  return BooleanBit::get(hints, kStrictModeFunction)
4089 }
4090 
4091 
4093  // We only allow language mode transitions that go set the same language mode
4094  // again or go up in the chain:
4095  // CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
4096  ASSERT(this->language_mode() == CLASSIC_MODE ||
4097  this->language_mode() == language_mode ||
4098  language_mode == EXTENDED_MODE);
4099  int hints = compiler_hints();
4100  hints = BooleanBit::set(
4101  hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
4102  hints = BooleanBit::set(
4103  hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
4104  set_compiler_hints(hints);
4105 }
4106 
4107 
4109  return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
4110 }
4111 
4112 BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
4113  kExtendedModeFunction)
4114 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
4115 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
4117  kNameShouldPrintAsAnonymous)
4118 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
4119 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
4120 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
4121 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
4122  kDontOptimize)
4123 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
4124 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
4125 
4126 void SharedFunctionInfo::BeforeVisitingPointers() {
4127  if (IsInobjectSlackTrackingInProgress()) DetachInitialMap();
4128 
4129  // Flush optimized code map on major GC.
4130  // Note: we may experiment with rebuilding it or retaining entries
4131  // which should survive as we iterate through optimized functions
4132  // anyway.
4133  set_optimized_code_map(Smi::FromInt(0));
4134 }
4135 
4136 
4137 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
4138 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
4139 
4140 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
4141 
4142 bool Script::HasValidSource() {
4143  Object* src = this->source();
4144  if (!src->IsString()) return true;
4145  String* src_str = String::cast(src);
4146  if (!StringShape(src_str).IsExternal()) return true;
4147  if (src_str->IsAsciiRepresentation()) {
4148  return ExternalAsciiString::cast(src)->resource() != NULL;
4149  } else if (src_str->IsTwoByteRepresentation()) {
4150  return ExternalTwoByteString::cast(src)->resource() != NULL;
4151  }
4152  return true;
4153 }
4154 
4155 
4157  ASSERT(code()->kind() == Code::BUILTIN);
4158  set_formal_parameter_count(kDontAdaptArgumentsSentinel);
4159 }
4160 
4161 
4163  return start_position_and_type() >> kStartPositionShift;
4164 }
4165 
4166 
4167 void SharedFunctionInfo::set_start_position(int start_position) {
4168  set_start_position_and_type((start_position << kStartPositionShift)
4169  | (start_position_and_type() & ~kStartPositionMask));
4170 }
4171 
4172 
4173 Code* SharedFunctionInfo::code() {
4174  return Code::cast(READ_FIELD(this, kCodeOffset));
4175 }
4176 
4177 
4179  return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
4180 }
4181 
4182 
4183 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
4184  WRITE_FIELD(this, kCodeOffset, value);
4185  CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
4186 }
4187 
4188 
4189 ScopeInfo* SharedFunctionInfo::scope_info() {
4190  return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
4191 }
4192 
4193 
4194 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
4195  WriteBarrierMode mode) {
4196  WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
4198  this,
4199  kScopeInfoOffset,
4200  reinterpret_cast<Object*>(value),
4201  mode);
4202 }
4203 
4204 
4206  return code() !=
4207  Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
4208 }
4209 
4210 
4212  return function_data()->IsFunctionTemplateInfo();
4213 }
4214 
4215 
4216 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
4217  ASSERT(IsApiFunction());
4218  return FunctionTemplateInfo::cast(function_data());
4219 }
4220 
4221 
4223  return function_data()->IsSmi();
4224 }
4225 
4226 
4228  ASSERT(HasBuiltinFunctionId());
4229  return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
4230 }
4231 
4232 
4234  return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
4235 }
4236 
4237 
4239  int hints = compiler_hints() & ~(kCodeAgeMask << kCodeAgeShift);
4240  set_compiler_hints(hints | ((code_age & kCodeAgeMask) << kCodeAgeShift));
4241 }
4242 
4243 
4245  return ICAgeBits::decode(counters());
4246 }
4247 
4248 
4250  set_counters(ICAgeBits::update(counters(), ic_age));
4251 }
4252 
4253 
4255  return DeoptCountBits::decode(counters());
4256 }
4257 
4258 
4260  set_counters(DeoptCountBits::update(counters(), deopt_count));
4261 }
4262 
4263 
4265  int value = counters();
4266  int deopt_count = DeoptCountBits::decode(value);
4267  deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
4268  set_counters(DeoptCountBits::update(value, deopt_count));
4269 }
4270 
4271 
4273  return OptReenableTriesBits::decode(counters());
4274 }
4275 
4276 
4278  set_counters(OptReenableTriesBits::update(counters(), tries));
4279 }
4280 
4281 
4283  Code* code = this->code();
4284  return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
4285 }
4286 
4287 
4289  int tries = opt_reenable_tries();
4290  set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
4291  // We reenable optimization whenever the number of tries is a large
4292  // enough power of 2.
4293  if (tries >= 16 && (((tries - 1) & tries) == 0)) {
4294  set_optimization_disabled(false);
4295  set_opt_count(0);
4296  set_deopt_count(0);
4297  code()->set_optimizable(true);
4298  }
4299 }
4300 
4301 
4303  return context()->global_object()->IsJSBuiltinsObject();
4304 }
4305 
4306 
4308  return shared()->formal_parameter_count() !=
4310 }
4311 
4312 
4314  return code()->kind() == Code::OPTIMIZED_FUNCTION;
4315 }
4316 
4317 
4319  return code()->kind() == Code::FUNCTION && code()->optimizable();
4320 }
4321 
4322 
4324  return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
4325 }
4326 
4327 
4329  return code() ==
4330  GetIsolate()->builtins()->builtin(Builtins::kParallelRecompile);
4331 }
4332 
4333 
4335  return code() == GetIsolate()->builtins()->builtin(
4336  Builtins::kInRecompileQueue);
4337 }
4338 
4339 
4341  return Code::cast(unchecked_code());
4342 }
4343 
4344 
4346  return reinterpret_cast<Code*>(
4347  Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
4348 }
4349 
4350 
4351 void JSFunction::set_code(Code* value) {
4352  ASSERT(!HEAP->InNewSpace(value));
4353  Address entry = value->entry();
4354  WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
4355  GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
4356  this,
4357  HeapObject::RawField(this, kCodeEntryOffset),
4358  value);
4359 }
4360 
4361 
4362 void JSFunction::ReplaceCode(Code* code) {
4363  bool was_optimized = IsOptimized();
4364  bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
4365 
4366  set_code(code);
4367 
4368  // Add/remove the function from the list of optimized functions for this
4369  // context based on the state change.
4370  if (!was_optimized && is_optimized) {
4371  context()->native_context()->AddOptimizedFunction(this);
4372  }
4373  if (was_optimized && !is_optimized) {
4374  context()->native_context()->RemoveOptimizedFunction(this);
4375  }
4376 }
4377 
4378 
4380  return Context::cast(READ_FIELD(this, kContextOffset));
4381 }
4382 
4383 
4385  return READ_FIELD(this, kContextOffset);
4386 }
4387 
4388 
4389 SharedFunctionInfo* JSFunction::unchecked_shared() {
4390  return reinterpret_cast<SharedFunctionInfo*>(
4391  READ_FIELD(this, kSharedFunctionInfoOffset));
4392 }
4393 
4394 
4395 void JSFunction::set_context(Object* value) {
4396  ASSERT(value->IsUndefined() || value->IsContext());
4397  WRITE_FIELD(this, kContextOffset, value);
4398  WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
4399 }
4400 
4401 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
4402  kPrototypeOrInitialMapOffset)
4403 
4404 
4405 Map* JSFunction::initial_map() {
4406  return Map::cast(prototype_or_initial_map());
4407 }
4408 
4409 
4411  set_prototype_or_initial_map(value);
4412 }
4413 
4414 
4416  Map* initial_map) {
4417  Context* native_context = context()->native_context();
4418  Object* array_function =
4419  native_context->get(Context::ARRAY_FUNCTION_INDEX);
4420  if (array_function->IsJSFunction() &&
4421  this == JSFunction::cast(array_function)) {
4422  // Replace all of the cached initial array maps in the native context with
4423  // the appropriate transitioned elements kind maps.
4424  Heap* heap = GetHeap();
4425  MaybeObject* maybe_maps =
4427  FixedArray* maps;
4428  if (!maybe_maps->To(&maps)) return maybe_maps;
4429 
4430  Map* current_map = initial_map;
4431  ElementsKind kind = current_map->elements_kind();
4433  maps->set(kind, current_map);
4434  for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1;
4435  i < kFastElementsKindCount; ++i) {
4436  Map* new_map;
4438  MaybeObject* maybe_new_map =
4439  current_map->CopyAsElementsKind(next_kind, INSERT_TRANSITION);
4440  if (!maybe_new_map->To(&new_map)) return maybe_new_map;
4441  maps->set(next_kind, new_map);
4442  current_map = new_map;
4443  }
4444  native_context->set_js_array_maps(maps);
4445  }
4446  set_initial_map(initial_map);
4447  return this;
4448 }
4449 
4450 
4452  return prototype_or_initial_map()->IsMap();
4453 }
4454 
4455 
4457  return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
4458 }
4459 
4460 
4462  return map()->has_non_instance_prototype() || has_instance_prototype();
4463 }
4464 
4465 
4467  ASSERT(has_instance_prototype());
4468  if (has_initial_map()) return initial_map()->prototype();
4469  // When there is no initial map and the prototype is a JSObject, the
4470  // initial map field is used for the prototype field.
4471  return prototype_or_initial_map();
4472 }
4473 
4474 
4476  ASSERT(has_prototype());
4477  // If the function's prototype property has been set to a non-JSObject
4478  // value, that value is stored in the constructor field of the map.
4479  if (map()->has_non_instance_prototype()) return map()->constructor();
4480  return instance_prototype();
4481 }
4482 
4483 
4485  return map()->function_with_prototype();
4486 }
4487 
4488 
4490  return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
4491 }
4492 
4493 
4494 FixedArray* JSFunction::literals() {
4495  ASSERT(!shared()->bound());
4496  return literals_or_bindings();
4497 }
4498 
4499 
4500 void JSFunction::set_literals(FixedArray* literals) {
4501  ASSERT(!shared()->bound());
4502  set_literals_or_bindings(literals);
4503 }
4504 
4505 
4507  ASSERT(shared()->bound());
4508  return literals_or_bindings();
4509 }
4510 
4511 
4512 void JSFunction::set_function_bindings(FixedArray* bindings) {
4513  ASSERT(shared()->bound());
4514  // Bound function literal may be initialized to the empty fixed array
4515  // before the bindings are set.
4516  ASSERT(bindings == GetHeap()->empty_fixed_array() ||
4517  bindings->map() == GetHeap()->fixed_cow_array_map());
4518  set_literals_or_bindings(bindings);
4519 }
4520 
4521 
4523  ASSERT(!shared()->bound());
4524  return literals()->length();
4525 }
4526 
4527 
4529  ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4530  return READ_FIELD(this, OffsetOfFunctionWithId(id));
4531 }
4532 
4533 
4535  Object* value) {
4536  ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4537  WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
4538  WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
4539 }
4540 
4541 
4543  ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4544  return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
4545 }
4546 
4547 
4549  Code* value) {
4550  ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4551  WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
4552  ASSERT(!HEAP->InNewSpace(value));
4553 }
4554 
4555 
4556 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
4557 ACCESSORS(JSProxy, hash, Object, kHashOffset)
4558 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
4559 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
4560 
4561 
4562 void JSProxy::InitializeBody(int object_size, Object* value) {
4563  ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
4564  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
4565  WRITE_FIELD(this, offset, value);
4566  }
4567 }
4568 
4569 
4570 ACCESSORS(JSSet, table, Object, kTableOffset)
4571 ACCESSORS(JSMap, table, Object, kTableOffset)
4572 ACCESSORS(JSWeakMap, table, Object, kTableOffset)
4573 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
4574 
4575 
4576 Address Foreign::foreign_address() {
4577  return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
4578 }
4579 
4580 
4582  WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
4583 }
4584 
4585 
4586 ACCESSORS(JSModule, context, Object, kContextOffset)
4587 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
4588 
4589 
4590 JSModule* JSModule::cast(Object* obj) {
4591  ASSERT(obj->IsJSModule());
4593  return reinterpret_cast<JSModule*>(obj);
4594 }
4595 
4596 
4597 ACCESSORS(JSValue, value, Object, kValueOffset)
4598 
4599 
4600 JSValue* JSValue::cast(Object* obj) {
4601  ASSERT(obj->IsJSValue());
4603  return reinterpret_cast<JSValue*>(obj);
4604 }
4605 
4606 
4607 ACCESSORS(JSDate, value, Object, kValueOffset)
4608 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
4609 ACCESSORS(JSDate, year, Object, kYearOffset)
4610 ACCESSORS(JSDate, month, Object, kMonthOffset)
4611 ACCESSORS(JSDate, day, Object, kDayOffset)
4612 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
4613 ACCESSORS(JSDate, hour, Object, kHourOffset)
4614 ACCESSORS(JSDate, min, Object, kMinOffset)
4615 ACCESSORS(JSDate, sec, Object, kSecOffset)
4616 
4617 
4618 JSDate* JSDate::cast(Object* obj) {
4619  ASSERT(obj->IsJSDate());
4621  return reinterpret_cast<JSDate*>(obj);
4622 }
4623 
4624 
4625 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
4626 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
4627 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
4628 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
4629 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
4630 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
4631 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
4632 
4633 
4634 JSMessageObject* JSMessageObject::cast(Object* obj) {
4635  ASSERT(obj->IsJSMessageObject());
4637  return reinterpret_cast<JSMessageObject*>(obj);
4638 }
4639 
4640 
4641 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
4642 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
4643 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
4644 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
4645 ACCESSORS(Code, type_feedback_info, Object, kTypeFeedbackInfoOffset)
4646 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
4647 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
4648 
4649 byte* Code::instruction_start() {
4650  return FIELD_ADDR(this, kHeaderSize);
4651 }
4652 
4653 
4655  return instruction_start() + instruction_size();
4656 }
4657 
4658 
4660  return RoundUp(instruction_size(), kObjectAlignment);
4661 }
4662 
4663 
4665  return reinterpret_cast<FixedArray*>(
4666  READ_FIELD(this, kDeoptimizationDataOffset));
4667 }
4668 
4669 
4671  return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
4672 }
4673 
4674 
4676  return unchecked_relocation_info()->GetDataStartAddress();
4677 }
4678 
4679 
4681  return unchecked_relocation_info()->length();
4682 }
4683 
4684 
4686  return instruction_start();
4687 }
4688 
4689 
4690 bool Code::contains(byte* inner_pointer) {
4691  return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
4692 }
4693 
4694 
4695 ACCESSORS(JSArray, length, Object, kLengthOffset)
4696 
4697 
4698 ACCESSORS(JSRegExp, data, Object, kDataOffset)
4699 
4700 
4701 JSRegExp::Type JSRegExp::TypeTag() {
4702  Object* data = this->data();
4703  if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
4704  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
4705  return static_cast<JSRegExp::Type>(smi->value());
4706 }
4707 
4708 
4710  Smi* smi = Smi::cast(DataAtUnchecked(kTagIndex));
4711  return static_cast<JSRegExp::Type>(smi->value());
4712 }
4713 
4714 
4716  switch (TypeTag()) {
4717  case ATOM:
4718  return 0;
4719  case IRREGEXP:
4720  return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
4721  default:
4722  UNREACHABLE();
4723  return -1;
4724  }
4725 }
4726 
4727 
4729  ASSERT(this->data()->IsFixedArray());
4730  Object* data = this->data();
4731  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
4732  return Flags(smi->value());
4733 }
4734 
4735 
4737  ASSERT(this->data()->IsFixedArray());
4738  Object* data = this->data();
4739  String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
4740  return pattern;
4741 }
4742 
4743 
4744 Object* JSRegExp::DataAt(int index) {
4745  ASSERT(TypeTag() != NOT_COMPILED);
4746  return FixedArray::cast(data())->get(index);
4747 }
4748 
4749 
4750 Object* JSRegExp::DataAtUnchecked(int index) {
4751  FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4752  int offset = FixedArray::kHeaderSize + index * kPointerSize;
4753  return READ_FIELD(fa, offset);
4754 }
4755 
4756 
4757 void JSRegExp::SetDataAt(int index, Object* value) {
4758  ASSERT(TypeTag() != NOT_COMPILED);
4759  ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4760  FixedArray::cast(data())->set(index, value);
4761 }
4762 
4763 
4764 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
4765  ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4766  FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4767  if (value->IsSmi()) {
4768  fa->set_unchecked(index, Smi::cast(value));
4769  } else {
4770  // We only do this during GC, so we don't need to notify the write barrier.
4771  fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
4772  }
4773 }
4774 
4775 
4777  ElementsKind kind = map()->elements_kind();
4778 #if DEBUG
4779  FixedArrayBase* fixed_array =
4780  reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
4781  Map* map = fixed_array->map();
4783  (map == GetHeap()->fixed_array_map() ||
4784  map == GetHeap()->fixed_cow_array_map())) ||
4785  (IsFastDoubleElementsKind(kind) &&
4786  (fixed_array->IsFixedDoubleArray() ||
4787  fixed_array == GetHeap()->empty_fixed_array())) ||
4788  (kind == DICTIONARY_ELEMENTS &&
4789  fixed_array->IsFixedArray() &&
4790  fixed_array->IsDictionary()) ||
4791  (kind > DICTIONARY_ELEMENTS));
4793  (elements()->IsFixedArray() && elements()->length() >= 2));
4794 #endif
4795  return kind;
4796 }
4797 
4798 
4800  return ElementsAccessor::ForKind(GetElementsKind());
4801 }
4802 
4803 
4805  return IsFastObjectElementsKind(GetElementsKind());
4806 }
4807 
4808 
4810  return IsFastSmiElementsKind(GetElementsKind());
4811 }
4812 
4813 
4815  return IsFastSmiOrObjectElementsKind(GetElementsKind());
4816 }
4817 
4818 
4820  return IsFastDoubleElementsKind(GetElementsKind());
4821 }
4822 
4823 
4825  return IsFastHoleyElementsKind(GetElementsKind());
4826 }
4827 
4828 
4830  return GetElementsKind() == DICTIONARY_ELEMENTS;
4831 }
4832 
4833 
4835  return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
4836 }
4837 
4838 
4840  HeapObject* array = elements();
4841  ASSERT(array != NULL);
4842  return array->IsExternalArray();
4843 }
4844 
4845 
4846 #define EXTERNAL_ELEMENTS_CHECK(name, type) \
4847 bool JSObject::HasExternal##name##Elements() { \
4848  HeapObject* array = elements(); \
4849  ASSERT(array != NULL); \
4850  if (!array->IsHeapObject()) \
4851  return false; \
4852  return array->map()->instance_type() == type; \
4853 }
4854 
4855 
4859 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
4862 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
4869 
4870 
4871 bool JSObject::HasNamedInterceptor() {
4872  return map()->has_named_interceptor();
4873 }
4874 
4875 
4877  return map()->has_indexed_interceptor();
4878 }
4879 
4880 
4882  ASSERT(HasFastSmiOrObjectElements());
4883  FixedArray* elems = FixedArray::cast(elements());
4884  Isolate* isolate = GetIsolate();
4885  if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
4886  Object* writable_elems;
4887  { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
4888  elems, isolate->heap()->fixed_array_map());
4889  if (!maybe_writable_elems->ToObject(&writable_elems)) {
4890  return maybe_writable_elems;
4891  }
4892  }
4893  set_elements(FixedArray::cast(writable_elems));
4894  isolate->counters()->cow_arrays_converted()->Increment();
4895  return writable_elems;
4896 }
4897 
4898 
4900  ASSERT(!HasFastProperties());
4901  return StringDictionary::cast(properties());
4902 }
4903 
4904 
4906  ASSERT(HasDictionaryElements());
4907  return SeededNumberDictionary::cast(elements());
4908 }
4909 
4910 
4911 bool String::IsHashFieldComputed(uint32_t field) {
4912  return (field & kHashNotComputedMask) == 0;
4913 }
4914 
4915 
4917  return IsHashFieldComputed(hash_field());
4918 }
4919 
4920 
4921 uint32_t String::Hash() {
4922  // Fast case: has hash code already been computed?
4923  uint32_t field = hash_field();
4924  if (IsHashFieldComputed(field)) return field >> kHashShift;
4925  // Slow case: compute hash code and set it.
4926  return ComputeAndSetHash();
4927 }
4928 
4929 
4930 StringHasher::StringHasher(int length, uint32_t seed)
4931  : length_(length),
4932  raw_running_hash_(seed),
4933  array_index_(0),
4934  is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
4935  is_first_char_(true) {
4936  ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
4937 }
4938 
4939 
4941  return length_ > String::kMaxHashCalcLength;
4942 }
4943 
4944 
4945 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint32_t c) {
4946  running_hash += c;
4947  running_hash += (running_hash << 10);
4948  running_hash ^= (running_hash >> 6);
4949  return running_hash;
4950 }
4951 
4952 
4953 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
4954  running_hash += (running_hash << 3);
4955  running_hash ^= (running_hash >> 11);
4956  running_hash += (running_hash << 15);
4957  if ((running_hash & String::kHashBitMask) == 0) {
4958  return kZeroHash;
4959  }
4960  return running_hash;
4961 }
4962 
4963 
4964 void StringHasher::AddCharacter(uint32_t c) {
4966  AddSurrogatePair(c); // Not inlined.
4967  return;
4968  }
4969  // Use the Jenkins one-at-a-time hash function to update the hash
4970  // for the given character.
4971  raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
4972  // Incremental array index computation.
4973  if (is_array_index_) {
4974  if (c < '0' || c > '9') {
4975  is_array_index_ = false;
4976  } else {
4977  int d = c - '0';
4978  if (is_first_char_) {
4979  is_first_char_ = false;
4980  if (c == '0' && length_ > 1) {
4981  is_array_index_ = false;
4982  return;
4983  }
4984  }
4985  if (array_index_ > 429496729U - ((d + 2) >> 3)) {
4986  is_array_index_ = false;
4987  } else {
4988  array_index_ = array_index_ * 10 + d;
4989  }
4990  }
4991  }
4992 }
4993 
4994 
4996  ASSERT(!is_array_index());
4998  AddSurrogatePairNoIndex(c); // Not inlined.
4999  return;
5000  }
5001  raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
5002 }
5003 
5004 
5005 uint32_t StringHasher::GetHash() {
5006  // Get the calculated raw hash value and do some more bit ops to distribute
5007  // the hash further. Ensure that we never return zero as the hash value.
5008  return GetHashCore(raw_running_hash_);
5009 }
5010 
5011 
5012 template <typename schar>
5013 uint32_t HashSequentialString(const schar* chars, int length, uint32_t seed) {
5014  StringHasher hasher(length, seed);
5015  if (!hasher.has_trivial_hash()) {
5016  int i;
5017  for (i = 0; hasher.is_array_index() && (i < length); i++) {
5018  hasher.AddCharacter(chars[i]);
5019  }
5020  for (; i < length; i++) {
5021  hasher.AddCharacterNoIndex(chars[i]);
5022  }
5023  }
5024  return hasher.GetHashField();
5025 }
5026 
5027 
5028 bool String::AsArrayIndex(uint32_t* index) {
5029  uint32_t field = hash_field();
5030  if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
5031  return false;
5032  }
5033  return SlowAsArrayIndex(index);
5034 }
5035 
5036 
5038  return map()->prototype();
5039 }
5040 
5041 
5043  return map()->constructor();
5044 }
5045 
5046 
5047 bool JSReceiver::HasProperty(String* name) {
5048  if (IsJSProxy()) {
5049  return JSProxy::cast(this)->HasPropertyWithHandler(name);
5050  }
5051  return GetPropertyAttribute(name) != ABSENT;
5052 }
5053 
5054 
5055 bool JSReceiver::HasLocalProperty(String* name) {
5056  if (IsJSProxy()) {
5057  return JSProxy::cast(this)->HasPropertyWithHandler(name);
5058  }
5059  return GetLocalPropertyAttribute(name) != ABSENT;
5060 }
5061 
5062 
5064  return GetPropertyAttributeWithReceiver(this, key);
5065 }
5066 
5067 // TODO(504): this may be useful in other places too where JSGlobalProxy
5068 // is used.
5070  if (IsJSGlobalProxy()) {
5071  Object* proto = GetPrototype();
5072  if (proto->IsNull()) return GetHeap()->undefined_value();
5073  ASSERT(proto->IsJSGlobalObject());
5074  return proto;
5075  }
5076  return this;
5077 }
5078 
5079 
5081  return IsJSProxy()
5082  ? JSProxy::cast(this)->GetIdentityHash(flag)
5083  : JSObject::cast(this)->GetIdentityHash(flag);
5084 }
5085 
5086 
5087 bool JSReceiver::HasElement(uint32_t index) {
5088  if (IsJSProxy()) {
5089  return JSProxy::cast(this)->HasElementWithHandler(index);
5090  }
5091  return JSObject::cast(this)->HasElementWithReceiver(this, index);
5092 }
5093 
5094 
5096  return BooleanBit::get(flag(), kAllCanReadBit);
5097 }
5098 
5099 
5101  set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
5102 }
5103 
5104 
5106  return BooleanBit::get(flag(), kAllCanWriteBit);
5107 }
5108 
5109 
5111  set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
5112 }
5113 
5114 
5116  return BooleanBit::get(flag(), kProhibitsOverwritingBit);
5117 }
5118 
5119 
5121  set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
5122 }
5123 
5124 
5126  return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
5127 }
5128 
5129 
5131  set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
5132 }
5133 
5134 
5135 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
5136  Object* function_template = expected_receiver_type();
5137  if (!function_template->IsFunctionTemplateInfo()) return true;
5138  return receiver->IsInstanceOf(FunctionTemplateInfo::cast(function_template));
5139 }
5140 
5141 
5142 template<typename Shape, typename Key>
5144  Object* key,
5145  Object* value) {
5146  SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
5147 }
5148 
5149 
5150 template<typename Shape, typename Key>
5152  Object* key,
5153  Object* value,
5154  PropertyDetails details) {
5155  ASSERT(!key->IsString() ||
5156  details.IsDeleted() ||
5157  details.dictionary_index() > 0);
5158  int index = HashTable<Shape, Key>::EntryToIndex(entry);
5159  AssertNoAllocation no_gc;
5161  FixedArray::set(index, key, mode);
5162  FixedArray::set(index+1, value, mode);
5163  FixedArray::set(index+2, details.AsSmi());
5164 }
5165 
5166 
5167 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
5168  ASSERT(other->IsNumber());
5169  return key == static_cast<uint32_t>(other->Number());
5170 }
5171 
5172 
5173 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
5174  return ComputeIntegerHash(key, 0);
5175 }
5176 
5177 
5179  Object* other) {
5180  ASSERT(other->IsNumber());
5181  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
5182 }
5183 
5184 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
5185  return ComputeIntegerHash(key, seed);
5186 }
5187 
5189  uint32_t seed,
5190  Object* other) {
5191  ASSERT(other->IsNumber());
5192  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
5193 }
5194 
5195 MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
5196  return Isolate::Current()->heap()->NumberFromUint32(key);
5197 }
5198 
5199 
5200 bool StringDictionaryShape::IsMatch(String* key, Object* other) {
5201  // We know that all entries in a hash table had their hash keys created.
5202  // Use that knowledge to have fast failure.
5203  if (key->Hash() != String::cast(other)->Hash()) return false;
5204  return key->Equals(String::cast(other));
5205 }
5206 
5207 
5208 uint32_t StringDictionaryShape::Hash(String* key) {
5209  return key->Hash();
5210 }
5211 
5212 
5213 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
5214  return String::cast(other)->Hash();
5215 }
5216 
5217 
5218 MaybeObject* StringDictionaryShape::AsObject(String* key) {
5219  return key;
5220 }
5221 
5222 
5223 template <int entrysize>
5224 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
5225  return key->SameValue(other);
5226 }
5227 
5228 
5229 template <int entrysize>
5231  MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
5232  return Smi::cast(maybe_hash->ToObjectChecked())->value();
5233 }
5234 
5235 
5236 template <int entrysize>
5238  Object* other) {
5239  MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
5240  return Smi::cast(maybe_hash->ToObjectChecked())->value();
5241 }
5242 
5243 
5244 template <int entrysize>
5246  return key;
5247 }
5248 
5249 
5251  // No write barrier is needed since empty_fixed_array is not in new space.
5252  // Please note this function is used during marking:
5253  // - MarkCompactCollector::MarkUnmarkedObject
5254  // - IncrementalMarking::Step
5255  ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
5256  WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
5257 }
5258 
5259 
5260 void JSArray::EnsureSize(int required_size) {
5262  FixedArray* elts = FixedArray::cast(elements());
5263  const int kArraySizeThatFitsComfortablyInNewSpace = 128;
5264  if (elts->length() < required_size) {
5265  // Doubling in size would be overkill, but leave some slack to avoid
5266  // constantly growing.
5267  Expand(required_size + (required_size >> 3));
5268  // It's a performance benefit to keep a frequently used array in new-space.
5269  } else if (!GetHeap()->new_space()->Contains(elts) &&
5270  required_size < kArraySizeThatFitsComfortablyInNewSpace) {
5271  // Expand will allocate a new backing store in new space even if the size
5272  // we asked for isn't larger than what we had before.
5273  Expand(required_size);
5274  }
5275 }
5276 
5277 
5278 void JSArray::set_length(Smi* length) {
5279  // Don't need a write barrier for a Smi.
5280  set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
5281 }
5282 
5283 
5285  bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
5286  ASSERT(result == !HasExternalArrayElements());
5287  return result;
5288 }
5289 
5290 
5291 MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
5292  MaybeObject* maybe_result = EnsureCanContainElements(
5293  storage, storage->length(), ALLOW_COPIED_DOUBLE_ELEMENTS);
5294  if (maybe_result->IsFailure()) return maybe_result;
5295  ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
5297  ((storage->map() != GetHeap()->fixed_double_array_map()) &&
5300  FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
5301  set_elements(storage);
5302  set_length(Smi::FromInt(storage->length()));
5303  return this;
5304 }
5305 
5306 
5307 MaybeObject* FixedArray::Copy() {
5308  if (length() == 0) return this;
5309  return GetHeap()->CopyFixedArray(this);
5310 }
5311 
5312 
5313 MaybeObject* FixedDoubleArray::Copy() {
5314  if (length() == 0) return this;
5315  return GetHeap()->CopyFixedDoubleArray(this);
5316 }
5317 
5318 
5320  set(1 + index * 2, Smi::FromInt(id.ToInt()));
5321 }
5322 
5323 
5325  return TypeFeedbackId(Smi::cast(get(1 + index * 2))->value());
5326 }
5327 
5328 
5329 void TypeFeedbackCells::SetCell(int index, JSGlobalPropertyCell* cell) {
5330  set(index * 2, cell);
5331 }
5332 
5333 
5334 JSGlobalPropertyCell* TypeFeedbackCells::Cell(int index) {
5335  return JSGlobalPropertyCell::cast(get(index * 2));
5336 }
5337 
5338 
5340  return isolate->factory()->the_hole_value();
5341 }
5342 
5343 
5345  return isolate->factory()->undefined_value();
5346 }
5347 
5348 
5350  return heap->raw_unchecked_the_hole_value();
5351 }
5352 
5353 
5355  int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
5356  return ICTotalCountField::decode(current);
5357 }
5358 
5359 
5361  int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
5362  value = ICTotalCountField::update(value,
5363  ICTotalCountField::decode(count));
5364  WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
5365 }
5366 
5367 
5369  int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
5370  return ICsWithTypeInfoCountField::decode(current);
5371 }
5372 
5373 
5375  int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
5376  int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
5377  // We can get negative count here when the type-feedback info is
5378  // shared between two code objects. The can only happen when
5379  // the debugger made a shallow copy of code object (see Heap::CopyCode).
5380  // Since we do not optimize when the debugger is active, we can skip
5381  // this counter update.
5382  if (new_count >= 0) {
5383  new_count &= ICsWithTypeInfoCountField::kMask;
5384  value = ICsWithTypeInfoCountField::update(value, new_count);
5385  WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
5386  }
5387 }
5388 
5389 
5393 }
5394 
5395 
5397  int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
5398  int checksum = OwnTypeChangeChecksum::decode(value);
5399  checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
5400  value = OwnTypeChangeChecksum::update(value, checksum);
5401  // Ensure packed bit field is in Smi range.
5402  if (value > Smi::kMaxValue) value |= Smi::kMinValue;
5403  if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
5404  WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
5405 }
5406 
5407 
5409  int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
5410  int mask = (1 << kTypeChangeChecksumBits) - 1;
5411  value = InlinedTypeChangeChecksum::update(value, checksum & mask);
5412  // Ensure packed bit field is in Smi range.
5413  if (value > Smi::kMaxValue) value |= Smi::kMinValue;
5414  if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
5415  WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
5416 }
5417 
5418 
5420  int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
5421  return OwnTypeChangeChecksum::decode(value);
5422 }
5423 
5424 
5426  int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
5427  int mask = (1 << kTypeChangeChecksumBits) - 1;
5428  return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
5429 }
5430 
5431 
5433  kTypeFeedbackCellsOffset)
5434 
5435 
5436 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
5437 
5438 
5439 Relocatable::Relocatable(Isolate* isolate) {
5440  ASSERT(isolate == Isolate::Current());
5441  isolate_ = isolate;
5442  prev_ = isolate->relocatable_top();
5443  isolate->set_relocatable_top(this);
5444 }
5445 
5446 
5447 Relocatable::~Relocatable() {
5448  ASSERT(isolate_ == Isolate::Current());
5449  ASSERT_EQ(isolate_->relocatable_top(), this);
5450  isolate_->set_relocatable_top(prev_);
5451 }
5452 
5453 
5455  return map->instance_size();
5456 }
5457 
5458 
5459 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
5460  v->VisitExternalReference(
5461  reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
5462 }
5463 
5464 
5465 template<typename StaticVisitor>
5467  StaticVisitor::VisitExternalReference(
5468  reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
5469 }
5470 
5471 
5474  v->VisitExternalAsciiString(
5475  reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
5476 }
5477 
5478 
5479 template<typename StaticVisitor>
5482  StaticVisitor::VisitExternalAsciiString(
5483  reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
5484 }
5485 
5486 
5489  v->VisitExternalTwoByteString(
5490  reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
5491 }
5492 
5493 
5494 template<typename StaticVisitor>
5497  StaticVisitor::VisitExternalTwoByteString(
5498  reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
5499 }
5500 
5501 
5502 template<int start_offset, int end_offset, int size>
5504  HeapObject* obj,
5505  ObjectVisitor* v) {
5506  v->VisitPointers(HeapObject::RawField(obj, start_offset),
5507  HeapObject::RawField(obj, end_offset));
5508 }
5509 
5510 
5511 template<int start_offset>
5513  int object_size,
5514  ObjectVisitor* v) {
5515  v->VisitPointers(HeapObject::RawField(obj, start_offset),
5516  HeapObject::RawField(obj, object_size));
5517 }
5518 
5519 
5520 #undef TYPE_CHECKER
5521 #undef CAST_ACCESSOR
5522 #undef INT_ACCESSORS
5523 #undef ACCESSORS
5524 #undef ACCESSORS_TO_SMI
5525 #undef SMI_ACCESSORS
5526 #undef BOOL_GETTER
5527 #undef BOOL_ACCESSORS
5528 #undef FIELD_ADDR
5529 #undef READ_FIELD
5530 #undef WRITE_FIELD
5531 #undef WRITE_BARRIER
5532 #undef CONDITIONAL_WRITE_BARRIER
5533 #undef READ_DOUBLE_FIELD
5534 #undef WRITE_DOUBLE_FIELD
5535 #undef READ_INT_FIELD
5536 #undef WRITE_INT_FIELD
5537 #undef READ_INTPTR_FIELD
5538 #undef WRITE_INTPTR_FIELD
5539 #undef READ_UINT32_FIELD
5540 #undef WRITE_UINT32_FIELD
5541 #undef READ_SHORT_FIELD
5542 #undef WRITE_SHORT_FIELD
5543 #undef READ_BYTE_FIELD
5544 #undef WRITE_BYTE_FIELD
5545 
5546 
5547 } } // namespace v8::internal
5548 
5549 #endif // V8_OBJECTS_INL_H_
byte * Address
Definition: globals.h:157
static int SizeOf(Map *map, HeapObject *object)
Definition: objects-inl.h:5454
MUST_USE_RESULT MaybeObject * GetElementWithReceiver(Object *receiver, uint32_t index)
Definition: objects.cc:667
bool FLAG_enable_slow_asserts
#define WRITE_BYTE_FIELD(p, offset, value)
Definition: objects-inl.h:967
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset eval_from_instructions_offset
Definition: objects-inl.h:3892
#define HAS_FAILURE_TAG(value)
Definition: v8globals.h:379
void SetBackPointer(Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:3775
#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode)
Definition: objects-inl.h:886
void set_compare_state(byte value)
Definition: objects-inl.h:3432
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
static bool IsMatch(uint32_t key, Object *other)
Definition: objects-inl.h:5167
void set_prohibits_overwriting(bool value)
Definition: objects-inl.h:5120
void set_null_unchecked(Heap *heap, int index)
Definition: objects-inl.h:1927
Code * builtin(Name name)
Definition: builtins.h:320
PropertyAttributes GetPropertyAttribute(String *name)
Definition: objects-inl.h:5063
JSGlobalPropertyCell * Cell(int index)
Definition: objects-inl.h:5334
#define SLOW_ASSERT(condition)
Definition: checks.h:276
int allow_osr_at_loop_nesting_level()
Definition: objects-inl.h:3292
const intptr_t kSmiTagMask
Definition: v8.h:4016
static const int kExternalAsciiRepresentationTag
Definition: v8.h:4085
static bool is_the_hole_nan(double value)
Definition: objects-inl.h:1766
V8EXPORT bool IsTrue() const
Definition: api.cc:2143
FixedArray * function_bindings()
Definition: objects-inl.h:4506
bool HasElementsTransition()
Definition: objects-inl.h:3670
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit read_only_prototype
Definition: objects-inl.h:3928
static int EntryToIndex(int entry)
Definition: objects.h:2892
static ByteArray * FromDataStartAddress(Address address)
Definition: objects-inl.h:2729
void AddCharacter(uint32_t c)
Definition: objects-inl.h:4964
void set_all_can_write(bool value)
Definition: objects-inl.h:5110
Object * DataAtUnchecked(int index)
Definition: objects-inl.h:4750
void set_has_deoptimization_support(bool value)
Definition: objects-inl.h:3254
static uint32_t Hash(uint32_t key)
Definition: objects-inl.h:5173
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
Definition: heap-inl.h:181
CheckType check_type()
Definition: objects-inl.h:3367
void set(int index, Object *value)
Definition: objects-inl.h:1757
int GetInternalFieldOffset(int index)
Definition: objects-inl.h:1527
void AddSurrogatePair(uc32 c)
Definition: objects.cc:7308
static bool get(Smi *smi, int bit_position)
Definition: objects.h:8940
void RecordWrite(Address address, int offset)
Definition: heap-inl.h:331
static const int kSize
Definition: objects.h:6350
#define ASSERT_TAG_ALIGNED(address)
Definition: v8checks.h:59
void set_all_can_read(bool value)
Definition: objects-inl.h:5100
FixedArray * unchecked_deoptimization_data()
Definition: objects-inl.h:4664
void set_function_with_prototype(bool value)
Definition: objects-inl.h:3063
static double hole_nan_as_double()
Definition: objects-inl.h:1771
bool InNewSpace(Object *object)
Definition: heap-inl.h:288
unsigned stack_slots()
Definition: objects-inl.h:3318
static const int kTransitionsOrBackPointerOffset
Definition: objects.h:5132
static String * cast(Object *obj)
#define FATAL(msg)
Definition: checks.h:46
#define READ_DOUBLE_FIELD(p, offset)
Definition: objects-inl.h:896
#define READ_INTPTR_FIELD(p, offset)
Definition: objects-inl.h:940
MaybeObject * TryFlatten(PretenureFlag pretenure=NOT_TENURED)
Definition: objects-inl.h:2428
static MUST_USE_RESULT MaybeObject * Allocate(int number_of_transitions)
Definition: transitions.cc:49
const uint32_t kTwoByteStringTag
Definition: objects.h:469
const int kFailureTypeTagSize
Definition: objects.h:1081
static const uint32_t kExponentMask
Definition: objects.h:1352
void set_language_mode(LanguageMode language_mode)
Definition: objects-inl.h:4092
bool function_with_prototype()
Definition: objects-inl.h:3068
static DescriptorArray * cast(Object *obj)
static Failure * InternalError()
Definition: objects-inl.h:1019
static int SizeOf(Map *map, HeapObject *object)
Definition: objects.h:2393
void set_unary_op_type(byte value)
Definition: objects-inl.h:3387
Isolate * isolate()
Definition: heap-inl.h:503
int unused_property_fields()
Definition: objects-inl.h:3019
void set_length(Smi *length)
Definition: objects-inl.h:5278
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check, kNeedsAccessCheckBit) BOOL_ACCESSORS(FunctionTemplateInfo
void set_javascript_builtin(Builtins::JavaScript id, Object *value)
Definition: objects-inl.h:4534
Object * InObjectPropertyAt(int index)
Definition: objects-inl.h:1602
static const int kStorage2Offset
Definition: objects.h:6923
static Smi * FromInt(int value)
Definition: objects-inl.h:981
bool IsFastObjectElementsKind(ElementsKind kind)
void IteratePointer(ObjectVisitor *v, int offset)
Definition: objects-inl.h:1193
int BinarySearch(T *array, String *name, int low, int high, int valid_entries)
Definition: objects-inl.h:1956
MUST_USE_RESULT MaybeObject * ToSmi()
Definition: objects-inl.h:823
unsigned stack_check_table_offset()
Definition: objects-inl.h:3351
Map * elements_transition_map()
Definition: objects-inl.h:3681
void set_second(String *second, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:2593
void LookupTransition(JSObject *holder, String *name, LookupResult *result)
Definition: objects-inl.h:2071
static Object * GetObjectFromEntryAddress(Address location_of_address)
Definition: objects-inl.h:3570
void AddSurrogatePairNoIndex(uc32 c)
Definition: objects.cc:7316
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kIsTopLevelBit allows_lazy_compilation
Definition: objects-inl.h:3939
int NumberOfOwnDescriptors()
Definition: objects.h:4944
static MemoryChunk * FromAddress(Address a)
Definition: spaces.h:303
MUST_USE_RESULT MaybeObject * EnsureCanContainHeapObjectElements()
Definition: objects-inl.h:1254
void VerifyApiCallResultType()
Definition: objects-inl.h:1715
static HeapObject * cast(Object *obj)
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2916
#define READ_UINT32_FIELD(p, offset)
Definition: objects-inl.h:946
void set_function_bindings(FixedArray *bindings)
Definition: objects-inl.h:4512
static const byte kArgumentMarker
Definition: objects.h:7968
static const int kMaxHashCalcLength
Definition: objects.h:7390
bool is_access_check_needed()
Definition: objects-inl.h:3082
void set_pre_allocated_property_fields(int value)
Definition: objects-inl.h:3001
static const byte kUndefined
Definition: objects.h:7969
T Max(T a, T b)
Definition: utils.h:222
const int kVariableSizeSentinel
Definition: objects.h:199
static void IterateBody(HeapObject *obj, int object_size, ObjectVisitor *v)
Definition: objects-inl.h:5512
void Get(int descriptor_number, Descriptor *desc)
Definition: objects-inl.h:2164
static Failure * OutOfMemoryException()
Definition: objects-inl.h:1029
JSFunction * GetConstantFunction(int descriptor_number)
Definition: objects-inl.h:2146
static const int kFastPropertiesSoftLimit
Definition: objects.h:2162
PropertyAttributes property_attributes()
Definition: objects-inl.h:5125
bool IsAsciiRepresentation()
Definition: objects-inl.h:290
static ExternalTwoByteString * cast(Object *obj)
int32_t uc32
Definition: globals.h:260
SeededNumberDictionary * element_dictionary()
Definition: objects-inl.h:4905
static Map * cast(Object *obj)
void set_has_debug_break_slots(bool value)
Definition: objects-inl.h:3269
void SetDataAtUnchecked(int index, Object *value, Heap *heap)
Definition: objects-inl.h:4764
bool has_non_instance_prototype()
Definition: objects-inl.h:3058
#define SMI_ACCESSORS(holder, name, offset)
Definition: objects-inl.h:107
static StubType ExtractTypeFromFlags(Flags flags)
Definition: objects-inl.h:3538
static const byte kTheHole
Definition: objects.h:7966
MUST_USE_RESULT MaybeObject * GetPropertyWithReceiver(Object *receiver, String *key, PropertyAttributes *attributes)
Definition: objects.cc:154
static const int kExponentBias
Definition: objects.h:1356
bool attached_to_shared_function_info()
Definition: objects-inl.h:3108
Builtins * builtins()
Definition: isolate.h:924
int int32_t
Definition: unicode.cc:47
void set_context(Object *context)
Definition: objects-inl.h:4395
#define READ_FIELD(p, offset)
Definition: objects-inl.h:873
static bool TryTransitionToField(Handle< JSObject > object, Handle< String > key)
Definition: objects-inl.h:1432
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:5339
void Set(int descriptor_number, Descriptor *desc, const WhitenessWitness &)
Definition: objects-inl.h:2171
bool SameValue(Object *other)
Definition: objects.cc:780
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit is_toplevel
Definition: objects-inl.h:3932
#define MAKE_STRUCT_CAST(NAME, Name, name)
Definition: objects-inl.h:2388
static Failure * Exception()
Definition: objects-inl.h:1024
static const int kSize
Definition: objects.h:6235
static Foreign * cast(Object *obj)
MUST_USE_RESULT MaybeObject * GetElementsTransitionMapSlow(ElementsKind elements_kind)
Definition: objects.cc:2338
void set_map(Map *value)
Definition: objects-inl.h:1143
static bool IsMatch(String *key, Object *other)
Definition: objects-inl.h:5200
byte binary_op_result_type()
Definition: objects-inl.h:3410
FixedArray * literals()
Definition: objects-inl.h:4494
ACCESSORS(AccessorInfo, expected_receiver_type, Object, kExpectedReceiverTypeOffset) ACCESSORS(FunctionTemplateInfo
#define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)
byte * instruction_end()
Definition: objects-inl.h:4654
uint16_t SlicedStringGet(int index)
Definition: objects.cc:6865
static Smi * FromIntptr(intptr_t value)
Definition: objects-inl.h:990
static Handle< Object > TransitionElementsKind(Handle< JSObject > object, ElementsKind to_kind)
Definition: objects.cc:10072
#define READ_BYTE_FIELD(p, offset)
Definition: objects-inl.h:964
static const int kSize
Definition: objects.h:6625
void SetAstId(int index, TypeFeedbackId id)
Definition: objects-inl.h:5319
void change_ic_with_type_info_count(int count)
Definition: objects-inl.h:5374
#define ASSERT(condition)
Definition: checks.h:270
void set_profiler_ticks(int ticks)
Definition: objects-inl.h:3311
static MUST_USE_RESULT MaybeObject * AsObject(String *key)
Definition: objects-inl.h:5218
const int kPointerSizeLog2
Definition: globals.h:232
void set_start_position(int start_position)
Definition: objects-inl.h:4167
#define WRITE_INT_FIELD(p, offset, value)
Definition: objects-inl.h:937
unsigned short uint16_t
Definition: unicode.cc:46
void set_optimizable(bool value)
Definition: objects-inl.h:3241
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
Definition: objects-inl.h:5344
Object * BypassGlobalProxy()
Definition: objects-inl.h:5069
#define READ_INT64_FIELD(p, offset)
Definition: objects-inl.h:952
#define WRITE_UINT32_FIELD(p, offset, value)
Definition: objects-inl.h:949
static Context * cast(Object *context)
Definition: contexts.h:212
static uint32_t HashForObject(Object *key, Object *object)
Definition: objects-inl.h:5237
bool IsMarkedForParallelRecompilation()
Definition: objects-inl.h:4328
EXTERNAL_ELEMENTS_CHECK(UnsignedShort, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(UnsignedInt
kPropertyAccessorsOffset kNamedPropertyHandlerOffset instance_template
Definition: objects-inl.h:3860
#define WRITE_INTPTR_FIELD(p, offset, value)
Definition: objects-inl.h:943
const uint32_t kStringRepresentationMask
Definition: objects.h:474
bool NonFailureIsHeapObject()
Definition: objects-inl.h:166
int SizeFromMap(Map *map)
Definition: objects-inl.h:2954
void set_compiled_optimizable(bool value)
Definition: objects-inl.h:3284
static MUST_USE_RESULT MaybeObject * AsObject(Object *key)
Definition: objects-inl.h:5245
void set(int index, float value)
Definition: objects-inl.h:2902
Object * DataAt(int index)
Definition: objects-inl.h:4744
#define CHECK(condition)
Definition: checks.h:56
Object ** GetKeySlot(int descriptor_number)
Definition: objects-inl.h:2085
bool IsInternalError() const
Definition: objects-inl.h:1002
bool HasSpecificClassOf(String *name)
Definition: objects-inl.h:836
TypeFeedbackId AstId(int index)
Definition: objects-inl.h:5324
int isnan(double x)
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2840
const int kFastElementsKindCount
Definition: elements-kind.h:77
void set_first(String *first, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:2577
void ReplaceCode(Code *code)
Definition: objects-inl.h:4362
void set_map_and_elements(Map *map, FixedArrayBase *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:1360
Factory * factory()
Definition: isolate.h:992
static ExternalAsciiString * cast(Object *obj)
MUST_USE_RESULT MaybeObject * EnsureWritableFastElements()
Definition: objects-inl.h:4881
PropertyAttributes
void set_the_hole(int index)
Definition: objects-inl.h:1900
void init_back_pointer(Object *undefined)
Definition: objects-inl.h:3769
void set_foreign_address(Address value)
Definition: objects-inl.h:4581
MUST_USE_RESULT MaybeObject * Copy()
Definition: objects-inl.h:5307
void SeqTwoByteStringSet(int index, uint16_t value)
Definition: objects-inl.h:2536
static Code * cast(Object *obj)
const uint32_t kAsciiDataHintTag
Definition: objects.h:498
#define CAST_ACCESSOR(type)
Definition: objects-inl.h:78
const uint32_t kShortExternalStringMask
Definition: objects.h:502
void set(int index, uint32_t value)
Definition: objects-inl.h:2883
bool HasElementWithReceiver(JSReceiver *receiver, uint32_t index)
Definition: objects.cc:9277
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
bool AsArrayIndex(uint32_t *index)
Definition: objects-inl.h:5028
Object * GetValue(int descriptor_number)
Definition: objects-inl.h:2123
static const int kSize
Definition: objects.h:6016
static Object ** RawField(HeapObject *obj, int offset)
Definition: objects-inl.h:971
const int kIntSize
Definition: globals.h:217
TransitionArray * unchecked_transition_array()
Definition: objects-inl.h:3792
static Smi * cast(Object *object)
void set_literals(FixedArray *literals)
Definition: objects-inl.h:4500
static void IterateBody(HeapObject *obj, ObjectVisitor *v)
Definition: objects-inl.h:5503
static uint32_t Hash(Object *key)
Definition: objects-inl.h:5230
unsigned int seed
Definition: test-strings.cc:18
void set(int index, uint16_t value)
Definition: objects-inl.h:2845
void ClearCodeCache(Heap *heap)
Definition: objects-inl.h:5250
static const int kZeroHash
Definition: objects.h:7017
bool Equals(String *other)
Definition: objects-inl.h:2419
static const int kHeaderSize
Definition: objects.h:1262
Code * javascript_builtin_code(Builtins::JavaScript id)
Definition: objects-inl.h:4542
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2878
int GetInObjectPropertyOffset(int index)
Definition: objects-inl.h:1594
bool contains(byte *pc)
Definition: objects-inl.h:4690
Object * GetInternalField(int index)
Definition: objects-inl.h:1533
void set_dictionary_map(bool value)
Definition: objects-inl.h:3123
static const int kSize
Definition: objects.h:8333
uint16_t SeqAsciiStringGet(int index)
Definition: objects-inl.h:2497
void LookupDescriptor(JSObject *holder, String *name, LookupResult *result)
Definition: objects-inl.h:2061
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kIsTopLevelBit kAllowLazyCompilation kUsesArguments kFormalParameterCountOffset kStartPositionAndTypeOffset kCompilerHintsOffset stress_deopt_counter
Definition: objects-inl.h:4030
void set_binary_op_type(byte value)
Definition: objects-inl.h:3402
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
Definition: heap.cc:4711
uint8_t byte
Definition: globals.h:156
void set(int index, int16_t value)
Definition: objects-inl.h:2826
Object * InObjectPropertyAtPut(int index, Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:1611
uint16_t ExternalTwoByteStringGet(int index)
Definition: objects-inl.h:2663
Map * GetTransition(int transition_index)
Definition: objects-inl.h:3707
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kIsTopLevelBit compiler_hints
Definition: objects-inl.h:3939
static const int kFirstOffset
Definition: objects.h:7653
void set_null(int index)
Definition: objects-inl.h:1888
ByteArray * unchecked_relocation_info()
Definition: objects-inl.h:4670
MUST_USE_RESULT MaybeObject * GetIdentityHash(CreationFlag flag)
Definition: objects.cc:3558
static const int kKindOffset
Definition: objects.h:7960
bool IsNull() const
Definition: v8.h:4490
const uint32_t kNotStringTag
Definition: objects.h:457
static void NoWriteBarrierSet(FixedArray *array, int index, Object *value)
Definition: objects-inl.h:1864
static const int kParentOffset
Definition: objects.h:7705
String * GetKey(int descriptor_number)
Definition: objects-inl.h:2093
bool HasNonStrictArgumentsElements()
Definition: objects-inl.h:4834
MUST_USE_RESULT MaybeObject * GetIdentityHash(CreationFlag flag)
Definition: objects-inl.h:5080
static const int kTransitionSize
Definition: transitions.h:163
const uint64_t kHoleNanInt64
Definition: v8globals.h:473
void set_is_pregenerated(bool value)
Definition: objects-inl.h:3227
MUST_USE_RESULT MaybeObject * AddTransition(String *key, Map *target, SimpleTransitionFlag flag)
Definition: objects-inl.h:3694
#define READ_SHORT_FIELD(p, offset)
Definition: objects-inl.h:958
#define FIELD_ADDR(p, offset)
Definition: objects-inl.h:870
void set_opt_reenable_tries(int value)
Definition: objects-inl.h:4277
#define UNREACHABLE()
Definition: checks.h:50
Object * GetElementNoExceptionThrown(uint32_t index)
Definition: objects-inl.h:850
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static SeededNumberDictionary * cast(Object *obj)
Definition: objects.h:3236
void Append(Descriptor *desc, const WhitenessWitness &)
Definition: objects-inl.h:2205
virtual void Validate(JSObject *obj)=0
void set_ic_total_count(int count)
Definition: objects-inl.h:5360
static const int kDescriptorLengthOffset
Definition: objects.h:2631
MUST_USE_RESULT MaybeObject * SetContent(FixedArrayBase *storage)
Definition: objects-inl.h:5291
const uint32_t kIsSymbolMask
Definition: objects.h:462
void set_unchecked(int index, Smi *value)
Definition: objects-inl.h:1910
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2821
static const int kExponentShift
Definition: objects.h:1357
bool IsStringObjectWithCharacterAt(uint32_t index)
Definition: objects-inl.h:1701
static const int kValueOffset
Definition: objects.h:1342
const int kFailureTagSize
Definition: v8globals.h:63
String * GetUnderlying()
Definition: objects-inl.h:2486
const uint32_t kHoleNanUpper32
Definition: v8globals.h:469
static InlineCacheHolderFlag ExtractCacheHolderFromFlags(Flags flags)
Definition: objects-inl.h:3548
const int kDoubleSize
Definition: globals.h:218
void set_undefined(int index)
Definition: objects-inl.h:1874
static SlicedString * cast(Object *obj)
MUST_USE_RESULT MaybeObject * SetPrototypeTransitions(FixedArray *prototype_transitions)
Definition: objects-inl.h:3729
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:5791
int pre_allocated_property_fields()
Definition: objects-inl.h:2949
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kIsTopLevelBit BOOL_GETTER(SharedFunctionInfo, compiler_hints, has_only_simple_this_property_assignments, kHasOnlySimpleThisPropertyAssignments) BOOL_ACCESSORS(SharedFunctionInfo
static uint32_t SeededHash(uint32_t key, uint32_t seed)
Definition: objects-inl.h:5184
#define WRITE_BARRIER(heap, object, offset, value)
Definition: objects-inl.h:879
#define HAS_SMI_TAG(value)
Definition: v8globals.h:376
Context * native_context()
Definition: contexts.cc:58
void InitializeBody(int object_size)
Definition: objects-inl.h:1674
#define MAKE_STRUCT_PREDICATE(NAME, Name, name)
Definition: objects-inl.h:771
static const int kFirstOffset
Definition: objects.h:2633
int LinearSearch(T *array, String *name, int len, int valid_entries)
Definition: objects-inl.h:1993
bool IsAsciiRepresentationUnderneath()
Definition: objects-inl.h:302
static Failure * RetryAfterGC()
Definition: objects-inl.h:1040
void IteratePointers(ObjectVisitor *v, int start, int end)
Definition: objects-inl.h:1187
int SeqTwoByteStringSize(InstanceType instance_type)
Definition: objects-inl.h:2542
static const uchar kMaxNonSurrogateCharCode
Definition: unicode.h:133
static const int kNotFound
Definition: transitions.h:137
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1059
void set_resource(const Resource *buffer)
Definition: objects-inl.h:2618
static Failure * cast(MaybeObject *object)
Definition: objects-inl.h:496
const uint32_t kIsIndirectStringMask
Definition: objects.h:481
void set_inlined_type_change_checksum(int checksum)
Definition: objects-inl.h:5408
#define READ_INT_FIELD(p, offset)
Definition: objects-inl.h:934
static const int kMinValue
Definition: objects.h:1048
bool ToArrayIndex(uint32_t *index)
Definition: objects-inl.h:1682
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
int get_int(int index)
Definition: objects-inl.h:2723
MUST_USE_RESULT MaybeObject * ResetElements()
Definition: objects-inl.h:1402
ElementsKind GetElementsKind()
Definition: objects-inl.h:4776
SharedFunctionInfo * unchecked_shared()
Definition: objects-inl.h:4389
static Object * RawUninitializedSentinel(Heap *heap)
Definition: objects-inl.h:5349
static Handle< Map > GetElementsTransitionMap(Handle< JSObject > object, ElementsKind to_kind)
Definition: objects.cc:2329
MUST_USE_RESULT MaybeObject * AllocateFixedArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4827
const int kPointerSize
Definition: globals.h:220
static const int kIsNotArrayIndexMask
Definition: objects.h:7337
#define TYPE_CHECKER(type, instancetype)
Definition: objects-inl.h:71
static Oddball * cast(Object *obj)
static Address & Address_at(Address addr)
Definition: v8memory.h:71
intptr_t OffsetFrom(T x)
Definition: utils.h:126
int GetFieldIndex(int descriptor_number)
Definition: objects-inl.h:2141
const int kHeapObjectTag
Definition: v8.h:4009
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2802
bool IsAligned(T value, U alignment)
Definition: utils.h:206
static SeqAsciiString * cast(Object *obj)
void set_inobject_properties(int value)
Definition: objects-inl.h:2995
unsigned safepoint_table_offset()
Definition: objects-inl.h:3334
void set_hash_field(uint32_t value)
Definition: objects-inl.h:2411
const uint16_t * ExternalTwoByteStringGetData(unsigned start)
Definition: objects-inl.h:2669
bool HasElement(uint32_t index)
Definition: objects-inl.h:5087
#define WRITE_SHORT_FIELD(p, offset, value)
Definition: objects-inl.h:961
const uint32_t kAsciiDataHintMask
Definition: objects.h:497
AllocationSpace allocation_space() const
Definition: objects-inl.h:1012
MUST_USE_RESULT MaybeObject * set_initial_map_and_cache_transitions(Map *value)
Definition: objects-inl.h:4415
bool IsTwoByteRepresentationUnderneath()
Definition: objects-inl.h:318
static FunctionTemplateInfo * cast(Object *obj)
kPropertyAccessorsOffset named_property_handler
Definition: objects-inl.h:3856
static const int kFirstDeoptEntryIndex
Definition: objects.h:4019
static const int kPropertiesOffset
Definition: objects.h:2171
static const int kStorage1Offset
Definition: objects.h:6922
T RoundUp(T x, intptr_t m)
Definition: utils.h:150
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:3491
static FixedDoubleArray * cast(Object *obj)
Object * FastPropertyAt(int index)
Definition: objects-inl.h:1566
bool IsTwoByteRepresentation()
Definition: objects-inl.h:296
uint16_t ExternalAsciiStringGet(int index)
Definition: objects-inl.h:2631
static Code * GetCodeFromTargetAddress(Address address)
Definition: objects-inl.h:3559
bool is_inline_cache_stub()
Definition: objects-inl.h:3485
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kIsTopLevelBit kAllowLazyCompilation kUsesArguments kFormalParameterCountOffset PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, expected_nof_properties, kExpectedNofPropertiesOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo
bool IsFastSmiElementsKind(ElementsKind kind)
static const int kMaxNonCodeHeapObjectSize
Definition: spaces.h:717
const uint32_t kShortExternalStringTag
Definition: objects.h:503
static int SmiValue(internal::Object *value)
Definition: v8.h:4113
ElementsKind FastSmiToObjectElementsKind(ElementsKind from_kind)
int SeqAsciiStringSize(InstanceType instance_type)
Definition: objects-inl.h:2547
Object * FastPropertyAtPut(int index, Object *value)
Definition: objects-inl.h:1579
void set_kind(byte kind)
Definition: objects-inl.h:1463
static int GetIdentityHash(Handle< JSObject > obj)
Definition: objects.cc:3531
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
StringRepresentationTag
Definition: objects.h:475
HeapObject * UncheckedPrototypeTransitions()
Definition: objects-inl.h:3800
static int SizeFor(int length)
Definition: objects.h:2434
static const int kElementsOffset
Definition: objects.h:2172
WriteBarrierMode GetWriteBarrierMode(const AssertNoAllocation &)
Definition: objects-inl.h:1831
void set_resource(const Resource *buffer)
Definition: objects-inl.h:2650
PropertyDetails GetDetails(int descriptor_number)
Definition: objects-inl.h:2129
Object ** GetFirstElementAddress()
Definition: objects-inl.h:1222
static uint32_t HashForObject(uint32_t key, Object *object)
Definition: objects-inl.h:5178
BuiltinFunctionId builtin_function_id()
Definition: objects-inl.h:4227
MUST_USE_RESULT MaybeObject * Copy()
Definition: objects-inl.h:5313
const uint32_t kStringTag
Definition: objects.h:456
byte * relocation_start()
Definition: objects-inl.h:4675
InlineCacheState ic_state()
Definition: objects-inl.h:3168
static uint32_t HashForObject(String *key, Object *object)
Definition: objects-inl.h:5213
bool IsUndefined() const
Definition: v8.h:4472
void set_construction_count(int value)
Definition: objects-inl.h:4040
double get_scalar(int index)
Definition: objects-inl.h:1783
uint16_t ConsStringGet(int index)
Definition: objects.cc:6834
DescriptorLookupCache * descriptor_lookup_cache()
Definition: isolate.h:857
void set_map_no_write_barrier(Map *value)
Definition: objects-inl.h:1154
void set_check_type(CheckType value)
Definition: objects-inl.h:3374
void set_to_boolean_state(byte value)
Definition: objects-inl.h:3462
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kIsTopLevelBit kAllowLazyCompilation kUsesArguments formal_parameter_count
Definition: objects-inl.h:4003
static int OffsetOfElementAt(int index)
Definition: objects.h:2356
void SetTransition(int transition_index, Map *target)
Definition: objects-inl.h:3702
void set(int index, uint8_t value)
Definition: objects-inl.h:2807
PropertyAttributes GetPropertyAttributeWithReceiver(JSReceiver *receiver, String *name)
Definition: objects.cc:3128
static int SizeFor(int length)
Definition: objects.h:2353
#define T(name, string, precedence)
Definition: token.cc:48
static ExtraICState ExtractExtraICStateFromFlags(Flags flags)
Definition: objects-inl.h:3533
static TransitionArray * cast(Object *obj)
Context * context()
Definition: isolate.h:520
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
void SetCell(int index, JSGlobalPropertyCell *cell)
Definition: objects-inl.h:5329
static ElementsAccessor * ForKind(ElementsKind elements_kind)
Definition: elements.h:134
static SeqTwoByteString * cast(Object *obj)
bool HasTransitionArray()
Definition: objects-inl.h:3675
const int kElementsKindCount
Definition: elements-kind.h:76
void SetDataAt(int index, Object *value)
Definition: objects-inl.h:4757
static bool IsMatch(Object *key, Object *other)
Definition: objects-inl.h:5224
static const int kHeaderSize
Definition: objects.h:2296
bool HasElementWithHandler(uint32_t index)
Definition: objects.cc:278
void set(int index, double value)
Definition: objects-inl.h:1808
MUST_USE_RESULT MaybeObject * NumberFromDouble(double value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3081
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit start_position_and_type
Definition: objects-inl.h:3932
SimpleTransitionFlag
Definition: objects.h:184
static InlineCacheState ExtractICStateFromFlags(Flags flags)
Definition: objects-inl.h:3528
bool HasProperty(String *name)
Definition: objects-inl.h:5047
static const int kSize
Definition: objects.h:6386
bool has_deoptimization_support()
Definition: objects-inl.h:3247
static Kind ExtractKindFromFlags(Flags flags)
Definition: objects-inl.h:3523
static const int kMapOffset
Definition: objects.h:1261
bool has_named_interceptor()
Definition: objects.h:4734
static int ExtractArgumentsCountFromFlags(Flags flags)
Definition: objects-inl.h:3543
bool HasPrototypeTransitions()
Definition: objects-inl.h:3743
bool is_the_hole(int index)
Definition: objects-inl.h:1743
void set_instance_type(InstanceType value)
Definition: objects-inl.h:3014
const uint32_t kIsNotStringMask
Definition: objects.h:455
bool IsOutOfMemoryException() const
Definition: objects-inl.h:1007
static HeapNumber * cast(Object *obj)
static MUST_USE_RESULT MaybeObject * NewWith(SimpleTransitionFlag flag, String *key, Map *target, Object *back_pointer)
Definition: transitions.cc:73
int32_t get_scalar(int index)
Definition: objects-inl.h:2852
bool CanHaveMoreTransitions()
Definition: objects-inl.h:3686
byte get(int index)
Definition: objects-inl.h:2711
static StringDictionary * cast(Object *obj)
Definition: objects.h:3182
void set_value(double value)
Definition: objects-inl.h:1203
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
Definition: heap-inl.h:176
static const int kLengthOffset
Definition: objects.h:2295
static double nan_value()
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2859
const int kSpaceTagMask
Definition: v8globals.h:189
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
Definition: utils.h:286
#define STRUCT_LIST(V)
Definition: objects.h:448
AccessorDescriptor * GetCallbacks(int descriptor_number)
Definition: objects-inl.h:2157
V8EXPORT bool IsNumber() const
Definition: api.cc:2183
ExtraICState extra_ic_state()
Definition: objects-inl.h:3181
static int SizeFor(int length)
Definition: objects.h:7548
const intptr_t kObjectAlignment
Definition: v8globals.h:44
void SetInternalField(int index, Object *value)
Definition: objects-inl.h:1542
PropertyType GetType(int descriptor_number)
Definition: objects-inl.h:2136
static JSGlobalPropertyCell * cast(Object *obj)
name_should_print_as_anonymous
Definition: objects-inl.h:4115
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:240
IncrementalMarking * incremental_marking()
Definition: heap.h:1553
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:1799
bool has_indexed_interceptor()
Definition: objects.h:4743
ElementsKind GetInitialFastElementsKind()
static const uint32_t kHashBitMask
Definition: objects.h:7345
bool HasPropertyWithHandler(String *name)
Definition: objects.cc:2522
uint16_t uc16
Definition: globals.h:259
void SetNumberOfDescriptors(int number_of_descriptors)
Definition: objects-inl.h:1946
Object * GetBackPointer()
Definition: objects-inl.h:3659
void AddCharacterNoIndex(uint32_t c)
Definition: objects-inl.h:4995
static const uint32_t kSignMask
Definition: objects.h:1351
void set_bit_field(byte value)
Definition: objects-inl.h:3034
static int SizeFor(int length)
Definition: objects.h:7600
static const int kSize
Definition: objects.h:6543
static const int kMaxNumberOfDescriptors
Definition: objects.h:2665
const int kSmiShiftSize
Definition: v8.h:4060
static JSValue * cast(Object *obj)
Definition: objects-inl.h:4600
const int kSmiTagSize
Definition: v8.h:4015
static const int kHeaderSize
Definition: objects.h:4549
uint32_t HashSequentialString(const schar *chars, int length, uint32_t seed)
Definition: objects-inl.h:5013
FunctionTemplateInfo * get_api_func_data()
Definition: objects-inl.h:4216
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset hidden_prototype
Definition: objects-inl.h:3923
void set_back_pointer_storage(Object *back_pointer, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
void EnsureSize(int minimum_size_of_backing_fixed_array)
Definition: objects-inl.h:5260
void set(int index, double value)
Definition: objects-inl.h:2921
bool is_undetectable()
Definition: objects.h:4757
#define WRITE_FIELD(p, offset, value)
Definition: objects-inl.h:876
static const int kFullStringRepresentationMask
Definition: v8.h:4082
void MemsetPointer(T **dest, U *value, int counter)
Definition: v8utils.h:149
MUST_USE_RESULT MaybeObject * CopyAsElementsKind(ElementsKind kind, TransitionFlag flag)
Definition: objects.cc:5058
void set_major_key(int value)
Definition: objects-inl.h:3209
int Search(T *array, String *name, int valid_entries)
Definition: objects-inl.h:2016
void Set(int index, uint16_t value)
Definition: objects-inl.h:2470
static void NoIncrementalWriteBarrierSet(FixedArray *array, int index, Object *value)
Definition: objects-inl.h:1850
#define HEAP
Definition: isolate.h:1433
V8EXPORT bool IsFalse() const
Definition: api.cc:2149
void set_is_access_check_needed(bool access_check_needed)
Definition: objects-inl.h:3073
MUST_USE_RESULT MaybeObject * GetProperty(String *key)
Definition: objects-inl.h:859
static const int kSize
Definition: objects.h:6477
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
bool is_dictionary_map()
Definition: objects-inl.h:3128
static const byte kNull
Definition: objects.h:7967
void set_owns_descriptors(bool is_shared)
Definition: objects-inl.h:3143
const int kShortSize
Definition: globals.h:216
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset instance_class_name
Definition: objects-inl.h:3912
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2752
InstanceType instance_type()
Definition: objects-inl.h:3009
static JSProxy * cast(Object *obj)
static const int kMaxFastProperties
Definition: objects.h:2163
static bool ShouldZapGarbage()
Definition: heap.h:1287
static HeapObject * FromAddress(Address address)
Definition: objects-inl.h:1171
Counters * counters()
Definition: isolate.h:819
void set(int index, byte value)
Definition: objects-inl.h:2717
static double canonical_not_the_hole_nan_as_double()
Definition: objects-inl.h:1776
#define INT_ACCESSORS(holder, name, offset)
Definition: objects-inl.h:85
const int kSmiTag
Definition: v8.h:4014
bool TooManyFastProperties(int properties, StoreFromKeyed store_mode)
Definition: objects-inl.h:1655
static FixedArray * cast(Object *obj)
void AppendDescriptor(Descriptor *desc, const DescriptorArray::WhitenessWitness &)
Definition: objects-inl.h:3649
StringHasher(int length, uint32_t seed)
Definition: objects-inl.h:4930
static const int kHeaderSize
Definition: objects.h:2173
void set(int index, int8_t value)
Definition: objects-inl.h:2788
static Smi * set(Smi *smi, int bit_position, bool v)
Definition: objects.h:8948
void SeqAsciiStringSet(int index, uint16_t value)
Definition: objects-inl.h:2503
bool IsCompatibleReceiver(Object *receiver)
Definition: objects-inl.h:5135
static HashTable * cast(Object *obj)
Definition: objects-inl.h:2394
void set_is_extensible(bool value)
Definition: objects-inl.h:3087
ElementsKind elements_kind()
Definition: objects.h:4781
void set_is_shared(bool value)
Definition: objects-inl.h:3113
static Handle< Object > GetElement(Handle< Object > object, uint32_t index)
Definition: objects.cc:250
const int kFailureTag
Definition: v8globals.h:62
void set_compare_operation(byte value)
Definition: objects-inl.h:3447
void set_attached_to_shared_function_info(bool value)
Definition: objects-inl.h:3100
void set_stack_slots(unsigned slots)
Definition: objects-inl.h:3325
double FastI2D(int x)
Definition: conversions.h:76
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:232
const uint32_t kIsIndirectStringTag
Definition: objects.h:482
void SetEntry(int entry, Object *key, Object *value)
Definition: objects-inl.h:5143
Object * GetCallbacksObject(int descriptor_number)
Definition: objects-inl.h:2151
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
static const int kStringEncodingMask
Definition: v8.h:4083
void set_instance_size(int value)
Definition: objects-inl.h:2987
Object * get(int index)
Definition: objects-inl.h:1737
JSFunction * unchecked_constructor()
Definition: objects-inl.h:3133
void set(int index, int32_t value)
Definition: objects-inl.h:2864
bool IsFastHoleyElementsKind(ElementsKind kind)
#define BUILTIN(name)
Definition: builtins.cc:142
static uint32_t Hash(String *key)
Definition: objects-inl.h:5208
void set_javascript_builtin_code(Builtins::JavaScript id, Code *value)
Definition: objects-inl.h:4548
static const int kSize
Definition: objects.h:6191
ElementsAccessor * GetElementsAccessor()
Definition: objects-inl.h:4799
bool IsInstanceOf(FunctionTemplateInfo *type)
Definition: objects-inl.h:137
String * TryFlattenGetString(PretenureFlag pretenure=NOT_TENURED)
Definition: objects-inl.h:2436
void set_bit_field2(byte value)
Definition: objects-inl.h:3044
static MUST_USE_RESULT MaybeObject * AsObject(uint32_t key)
Definition: objects-inl.h:5195
void set_finger_index(int finger_index)
Definition: objects-inl.h:2706
void set_map_word(MapWord map_word)
Definition: objects-inl.h:1164
void set_binary_op_result_type(byte value)
Definition: objects-inl.h:3417
bool has_debug_break_slots()
Definition: objects-inl.h:3262
static const int kSize
Definition: objects.h:8238
void set(int index, uint8_t value)
Definition: objects-inl.h:2757
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const byte kNotBooleanMask
Definition: objects.h:7965
int GetSortedKeyIndex(int descriptor_number)
Definition: objects-inl.h:2099
static const int kExternalTwoByteRepresentationTag
Definition: v8.h:4084
const uint32_t kSymbolTag
Definition: objects.h:464
bool HasLocalProperty(String *name)
Definition: objects-inl.h:5055
const int kFailureTypeTagMask
Definition: objects.h:1082
static const byte kFalse
Definition: objects.h:7963
Definition: objects.h:6953
Type type() const
Definition: objects-inl.h:997
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2897
static Flags RemoveTypeFromFlags(Flags flags)
Definition: objects-inl.h:3553
void set_visitor_id(int visitor_id)
Definition: objects-inl.h:2933
static bool HasHeapObjectTag(internal::Object *value)
Definition: v8.h:4104
const uint32_t kAsciiStringTag
Definition: objects.h:470
MUST_USE_RESULT MaybeObject * set_elements_transition_map(Map *transitioned_map)
Definition: objects-inl.h:3712
#define ACCESSORS_TO_SMI(holder, name, offset)
Definition: objects-inl.h:99
String * GetSortedKey(int descriptor_number)
Definition: objects-inl.h:2104
T Min(T a, T b)
Definition: utils.h:229
void set_property_attributes(PropertyAttributes attributes)
Definition: objects-inl.h:5130
signed short int16_t
Definition: unicode.cc:45
static const int kSize
Definition: objects.h:6312
void set_code(Code *code)
Definition: objects-inl.h:4351
MUST_USE_RESULT MaybeObject * GetHash(CreationFlag flag)
Definition: objects.cc:756
static ConsString * cast(Object *obj)
void set_safepoint_table_offset(unsigned offset)
Definition: objects-inl.h:3341
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:1731
bool is_compiled_optimizable()
Definition: objects-inl.h:3277
void set_flags(Flags flags)
Definition: objects-inl.h:3153
static const int kMaxValue
Definition: objects.h:1050
static const int kCodeCacheOffset
Definition: objects.h:5136
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2783
#define WRITE_DOUBLE_FIELD(p, offset, value)
Definition: objects-inl.h:914
static const int kNotFound
Definition: objects.h:2619
void set_non_instance_prototype(bool value)
Definition: objects-inl.h:3049
const uc32 kMaxAsciiCharCode
Definition: globals.h:263
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kIsTopLevelBit kAllowLazyCompilation uses_arguments
Definition: objects-inl.h:3947
uint16_t SeqTwoByteStringGet(int index)
Definition: objects-inl.h:2530
Object ** GetValueSlot(int descriptor_number)
Definition: objects-inl.h:2115
StringDictionary * property_dictionary()
Definition: objects-inl.h:4899
static uint32_t SeededHashForObject(uint32_t key, uint32_t seed, Object *object)
Definition: objects-inl.h:5188
const int kCharSize
Definition: globals.h:215
int Lookup(Map *source, String *name)
Definition: heap.h:2402
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
MUST_USE_RESULT MaybeObject * CopySize(int new_length)
Definition: objects.cc:5989
static const byte kTrue
Definition: objects.h:7964
static const int kExponentOffset
Definition: objects.h:1348
void SetSortedKey(int pointer, int descriptor_number)
Definition: objects-inl.h:2109
void InitializeDescriptors(DescriptorArray *descriptors)
Definition: objects-inl.h:3608
FixedArray * GetPrototypeTransitions()
Definition: objects-inl.h:3720
void set_allow_osr_at_loop_nesting_level(int level)
Definition: objects-inl.h:3298
static void AddFastPropertyUsingMap(Handle< JSObject > object, Handle< Map > map)
Definition: objects.cc:2797
static JSObject * cast(Object *obj)
uint32_t RoundUpToPowerOf2(uint32_t x)
Definition: utils.h:186
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset access_check_info
Definition: objects-inl.h:3866
int64_t get_representation(int index)
Definition: objects-inl.h:1792
bool matches_inlined_type_change_checksum(int checksum)
Definition: objects-inl.h:5425
#define MAKE_STRUCT_CASE(NAME, Name, name)
Object * javascript_builtin(Builtins::JavaScript id)
Definition: objects-inl.h:4528
PropertyAttributes GetLocalPropertyAttribute(String *name)
Definition: objects.cc:3179
int FastD2I(double x)
Definition: conversions.h:69
static Flags ComputeMonomorphicFlags(Kind kind, StubType type, ExtraICState extra_ic_state=kNoExtraICState, InlineCacheHolderFlag holder=OWN_MAP, int argc=-1)
Definition: objects-inl.h:3514
void set_initial_map(Map *value)
Definition: objects-inl.h:4410
bool IsFastDoubleElementsKind(ElementsKind kind)
void set_has_function_cache(bool flag)
Definition: objects-inl.h:3477
MUST_USE_RESULT MaybeObject * EnsureCanContainElements(Object **elements, uint32_t count, EnsureElementsMode mode)
Definition: objects-inl.h:1268
static const int kFirstIndex
Definition: objects.h:2623
void set_unused_property_fields(int value)
Definition: objects-inl.h:3024
const uint32_t kStringEncodingMask
Definition: objects.h:468
void set_stack_check_table_offset(unsigned offset)
Definition: objects-inl.h:3358
static int ComputeCapacity(int at_least_space_for)
Definition: objects-inl.h:2268
void InitializeBody(Map *map, Object *pre_allocated_value, Object *filler_value)
Definition: objects-inl.h:1625
void set_parent(String *parent, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:2557
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag
Definition: objects-inl.h:3923
static JSFunction * cast(Object *obj)