v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
objects-inl.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 //
28 // Review notes:
29 //
30 // - The use of macros in these inline functions may seem superfluous
31 // but it is absolutely needed to make sure gcc generates optimal
32 // code. gcc is not happy when attempting to inline too deep.
33 //
34 
35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_
37 
38 #include "elements.h"
39 #include "objects.h"
40 #include "contexts.h"
41 #include "conversions-inl.h"
42 #include "heap.h"
43 #include "isolate.h"
44 #include "property.h"
45 #include "spaces.h"
46 #include "store-buffer.h"
47 #include "v8memory.h"
48 #include "factory.h"
49 #include "incremental-marking.h"
50 
51 namespace v8 {
52 namespace internal {
53 
54 PropertyDetails::PropertyDetails(Smi* smi) {
55  value_ = smi->value();
56 }
57 
58 
59 Smi* PropertyDetails::AsSmi() {
60  return Smi::FromInt(value_);
61 }
62 
63 
64 PropertyDetails PropertyDetails::AsDeleted() {
65  Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
66  return PropertyDetails(smi);
67 }
68 
69 
70 #define TYPE_CHECKER(type, instancetype) \
71  bool Object::Is##type() { \
72  return Object::IsHeapObject() && \
73  HeapObject::cast(this)->map()->instance_type() == instancetype; \
74  }
75 
76 
77 #define CAST_ACCESSOR(type) \
78  type* type::cast(Object* object) { \
79  ASSERT(object->Is##type()); \
80  return reinterpret_cast<type*>(object); \
81  }
82 
83 
84 #define INT_ACCESSORS(holder, name, offset) \
85  int holder::name() { return READ_INT_FIELD(this, offset); } \
86  void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
87 
88 
89 #define ACCESSORS(holder, name, type, offset) \
90  type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
91  void holder::set_##name(type* value, WriteBarrierMode mode) { \
92  WRITE_FIELD(this, offset, value); \
93  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
94  }
95 
96 
97 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
98 #define ACCESSORS_TO_SMI(holder, name, offset) \
99  Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
100  void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
101  WRITE_FIELD(this, offset, value); \
102  }
103 
104 
105 // Getter that returns a Smi as an int and writes an int as a Smi.
106 #define SMI_ACCESSORS(holder, name, offset) \
107  int holder::name() { \
108  Object* value = READ_FIELD(this, offset); \
109  return Smi::cast(value)->value(); \
110  } \
111  void holder::set_##name(int value) { \
112  WRITE_FIELD(this, offset, Smi::FromInt(value)); \
113  }
114 
115 
116 #define BOOL_GETTER(holder, field, name, offset) \
117  bool holder::name() { \
118  return BooleanBit::get(field(), offset); \
119  } \
120 
121 
122 #define BOOL_ACCESSORS(holder, field, name, offset) \
123  bool holder::name() { \
124  return BooleanBit::get(field(), offset); \
125  } \
126  void holder::set_##name(bool value) { \
127  set_##field(BooleanBit::set(field(), offset, value)); \
128  }
129 
130 
132  return IsFixedArray() || IsFixedDoubleArray();
133 }
134 
135 
137  // There is a constraint on the object; check.
138  if (!this->IsJSObject()) return false;
139  // Fetch the constructor function of the object.
140  Object* cons_obj = JSObject::cast(this)->map()->constructor();
141  if (!cons_obj->IsJSFunction()) return false;
142  JSFunction* fun = JSFunction::cast(cons_obj);
143  // Iterate through the chain of inheriting function templates to
144  // see if the required one occurs.
145  for (Object* type = fun->shared()->function_data();
146  type->IsFunctionTemplateInfo();
147  type = FunctionTemplateInfo::cast(type)->parent_template()) {
148  if (type == expected) return true;
149  }
150  // Didn't find the required type in the inheritance chain.
151  return false;
152 }
153 
154 
155 bool Object::IsSmi() {
156  return HAS_SMI_TAG(this);
157 }
158 
159 
160 bool Object::IsHeapObject() {
161  return Internals::HasHeapObjectTag(this);
162 }
163 
164 
166  ASSERT(!this->IsFailure());
167  return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
168 }
169 
170 
172 
173 
174 bool Object::IsString() {
175  return Object::IsHeapObject()
177 }
178 
179 
180 bool Object::IsSpecObject() {
181  return Object::IsHeapObject()
183 }
184 
185 
186 bool Object::IsSpecFunction() {
187  if (!Object::IsHeapObject()) return false;
189  return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
190 }
191 
192 
193 bool Object::IsSymbol() {
194  if (!this->IsHeapObject()) return false;
195  uint32_t type = HeapObject::cast(this)->map()->instance_type();
196  // Because the symbol tag is non-zero and no non-string types have the
197  // symbol bit set we can test for symbols with a very simple test
198  // operation.
201  return (type & kIsSymbolMask) != 0;
202 }
203 
204 
205 bool Object::IsConsString() {
206  if (!IsString()) return false;
207  return StringShape(String::cast(this)).IsCons();
208 }
209 
210 
211 bool Object::IsSlicedString() {
212  if (!IsString()) return false;
213  return StringShape(String::cast(this)).IsSliced();
214 }
215 
216 
217 bool Object::IsSeqString() {
218  if (!IsString()) return false;
219  return StringShape(String::cast(this)).IsSequential();
220 }
221 
222 
223 bool Object::IsSeqAsciiString() {
224  if (!IsString()) return false;
225  return StringShape(String::cast(this)).IsSequential() &&
227 }
228 
229 
230 bool Object::IsSeqTwoByteString() {
231  if (!IsString()) return false;
232  return StringShape(String::cast(this)).IsSequential() &&
234 }
235 
236 
237 bool Object::IsExternalString() {
238  if (!IsString()) return false;
239  return StringShape(String::cast(this)).IsExternal();
240 }
241 
242 
243 bool Object::IsExternalAsciiString() {
244  if (!IsString()) return false;
245  return StringShape(String::cast(this)).IsExternal() &&
247 }
248 
249 
250 bool Object::IsExternalTwoByteString() {
251  if (!IsString()) return false;
252  return StringShape(String::cast(this)).IsExternal() &&
254 }
255 
257  // Dictionary is covered under FixedArray.
258  return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
259 }
260 
261 StringShape::StringShape(String* str)
262  : type_(str->map()->instance_type()) {
263  set_valid();
265 }
266 
267 
268 StringShape::StringShape(Map* map)
269  : type_(map->instance_type()) {
270  set_valid();
272 }
273 
274 
275 StringShape::StringShape(InstanceType t)
276  : type_(static_cast<uint32_t>(t)) {
277  set_valid();
279 }
280 
281 
282 bool StringShape::IsSymbol() {
283  ASSERT(valid());
285  return (type_ & kIsSymbolMask) != 0;
286 }
287 
288 
290  uint32_t type = map()->instance_type();
291  return (type & kStringEncodingMask) == kAsciiStringTag;
292 }
293 
294 
296  uint32_t type = map()->instance_type();
297  return (type & kStringEncodingMask) == kTwoByteStringTag;
298 }
299 
300 
302  uint32_t type = map()->instance_type();
305  ASSERT(IsFlat());
306  switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
307  case kAsciiStringTag:
308  return true;
309  case kTwoByteStringTag:
310  return false;
311  default: // Cons or sliced string. Need to go deeper.
313  }
314 }
315 
316 
318  uint32_t type = map()->instance_type();
321  ASSERT(IsFlat());
322  switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
323  case kAsciiStringTag:
324  return false;
325  case kTwoByteStringTag:
326  return true;
327  default: // Cons or sliced string. Need to go deeper.
329  }
330 }
331 
332 
334  uint32_t type = map()->instance_type();
335  return (type & kStringEncodingMask) == kAsciiStringTag ||
337 }
338 
339 
340 bool StringShape::IsCons() {
342 }
343 
344 
345 bool StringShape::IsSliced() {
347 }
348 
349 
350 bool StringShape::IsIndirect() {
352 }
353 
354 
355 bool StringShape::IsExternal() {
357 }
358 
359 
360 bool StringShape::IsSequential() {
362 }
363 
364 
365 StringRepresentationTag StringShape::representation_tag() {
366  uint32_t tag = (type_ & kStringRepresentationMask);
367  return static_cast<StringRepresentationTag>(tag);
368 }
369 
370 
371 uint32_t StringShape::encoding_tag() {
372  return type_ & kStringEncodingMask;
373 }
374 
375 
376 uint32_t StringShape::full_representation_tag() {
378 }
379 
380 
383 
384 
385 bool StringShape::IsSequentialAscii() {
386  return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
387 }
388 
389 
390 bool StringShape::IsSequentialTwoByte() {
391  return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
392 }
393 
394 
395 bool StringShape::IsExternalAscii() {
396  return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
397 }
398 
399 
400 bool StringShape::IsExternalTwoByte() {
401  return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
402 }
403 
404 
407 
408 
410  ASSERT(0 <= index && index <= length_);
411  if (is_ascii_) {
412  return static_cast<const byte*>(start_)[index];
413  } else {
414  return static_cast<const uc16*>(start_)[index];
415  }
416 }
417 
418 
419 bool Object::IsNumber() {
420  return IsSmi() || IsHeapNumber();
421 }
422 
423 
424 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
425 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
426 
427 
428 bool Object::IsFiller() {
429  if (!Object::IsHeapObject()) return false;
430  InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
431  return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
432 }
433 
434 
436 
437 
438 bool Object::IsExternalArray() {
439  if (!Object::IsHeapObject())
440  return false;
441  InstanceType instance_type =
442  HeapObject::cast(this)->map()->instance_type();
443  return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
444  instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
445 }
446 
447 
448 TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
449 TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
450 TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
451 TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
452 TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
453 TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
454 TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
455 TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
456 
457 
458 bool MaybeObject::IsFailure() {
459  return HAS_FAILURE_TAG(this);
460 }
461 
462 
463 bool MaybeObject::IsRetryAfterGC() {
464  return HAS_FAILURE_TAG(this)
466 }
467 
468 
469 bool MaybeObject::IsOutOfMemory() {
470  return HAS_FAILURE_TAG(this)
472 }
473 
474 
475 bool MaybeObject::IsException() {
476  return this == Failure::Exception();
477 }
478 
479 
480 bool MaybeObject::IsTheHole() {
481  return !IsFailure() && ToObjectUnchecked()->IsTheHole();
482 }
483 
484 
485 Failure* Failure::cast(MaybeObject* obj) {
486  ASSERT(HAS_FAILURE_TAG(obj));
487  return reinterpret_cast<Failure*>(obj);
488 }
489 
490 
491 bool Object::IsJSReceiver() {
493  return IsHeapObject() &&
495 }
496 
497 
498 bool Object::IsJSObject() {
500  return IsHeapObject() &&
502 }
503 
504 
505 bool Object::IsJSProxy() {
506  if (!Object::IsHeapObject()) return false;
507  InstanceType type = HeapObject::cast(this)->map()->instance_type();
508  return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
509 }
510 
511 
512 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
515 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
516 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
518 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
519 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
520 
521 
522 bool Object::IsDescriptorArray() {
523  return IsFixedArray();
524 }
525 
526 
527 bool Object::IsDeoptimizationInputData() {
528  // Must be a fixed array.
529  if (!IsFixedArray()) return false;
530 
531  // There's no sure way to detect the difference between a fixed array and
532  // a deoptimization data array. Since this is used for asserts we can
533  // check that the length is zero or else the fixed size plus a multiple of
534  // the entry size.
535  int length = FixedArray::cast(this)->length();
536  if (length == 0) return true;
537 
539  return length >= 0 &&
541 }
542 
543 
544 bool Object::IsDeoptimizationOutputData() {
545  if (!IsFixedArray()) return false;
546  // There's actually no way to see the difference between a fixed array and
547  // a deoptimization data array. Since this is used for asserts we can check
548  // that the length is plausible though.
549  if (FixedArray::cast(this)->length() % 2 != 0) return false;
550  return true;
551 }
552 
553 
554 bool Object::IsTypeFeedbackCells() {
555  if (!IsFixedArray()) return false;
556  // There's actually no way to see the difference between a fixed array and
557  // a cache cells array. Since this is used for asserts we can check that
558  // the length is plausible though.
559  if (FixedArray::cast(this)->length() % 2 != 0) return false;
560  return true;
561 }
562 
563 
564 bool Object::IsContext() {
565  if (Object::IsHeapObject()) {
566  Map* map = HeapObject::cast(this)->map();
567  Heap* heap = map->GetHeap();
568  return (map == heap->function_context_map() ||
569  map == heap->catch_context_map() ||
570  map == heap->with_context_map() ||
571  map == heap->global_context_map() ||
572  map == heap->block_context_map() ||
573  map == heap->module_context_map());
574  }
575  return false;
576 }
577 
578 
579 bool Object::IsGlobalContext() {
580  return Object::IsHeapObject() &&
581  HeapObject::cast(this)->map() ==
582  HeapObject::cast(this)->GetHeap()->global_context_map();
583 }
584 
585 
586 bool Object::IsModuleContext() {
587  return Object::IsHeapObject() &&
588  HeapObject::cast(this)->map() ==
589  HeapObject::cast(this)->GetHeap()->module_context_map();
590 }
591 
592 
593 bool Object::IsScopeInfo() {
594  return Object::IsHeapObject() &&
595  HeapObject::cast(this)->map() ==
596  HeapObject::cast(this)->GetHeap()->scope_info_map();
597 }
598 
599 
600 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
601 
602 
603 template <> inline bool Is<JSFunction>(Object* obj) {
604  return obj->IsJSFunction();
605 }
606 
607 
608 TYPE_CHECKER(Code, CODE_TYPE)
609 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
610 TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
611 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
612 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
613 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
614 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
615 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
616 
617 
618 bool Object::IsStringWrapper() {
619  return IsJSValue() && JSValue::cast(this)->value()->IsString();
620 }
621 
622 
623 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
624 
625 
626 bool Object::IsBoolean() {
627  return IsOddball() &&
628  ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
629 }
630 
631 
632 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
633 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
634 
635 
636 template <> inline bool Is<JSArray>(Object* obj) {
637  return obj->IsJSArray();
638 }
639 
640 
641 bool Object::IsHashTable() {
642  return Object::IsHeapObject() &&
643  HeapObject::cast(this)->map() ==
644  HeapObject::cast(this)->GetHeap()->hash_table_map();
645 }
646 
647 
648 bool Object::IsDictionary() {
649  return IsHashTable() &&
650  this != HeapObject::cast(this)->GetHeap()->symbol_table();
651 }
652 
653 
654 bool Object::IsSymbolTable() {
655  return IsHashTable() && this ==
656  HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
657 }
658 
659 
660 bool Object::IsJSFunctionResultCache() {
661  if (!IsFixedArray()) return false;
662  FixedArray* self = FixedArray::cast(this);
663  int length = self->length();
664  if (length < JSFunctionResultCache::kEntriesIndex) return false;
667  return false;
668  }
669 #ifdef DEBUG
670  if (FLAG_verify_heap) {
671  reinterpret_cast<JSFunctionResultCache*>(this)->
672  JSFunctionResultCacheVerify();
673  }
674 #endif
675  return true;
676 }
677 
678 
679 bool Object::IsNormalizedMapCache() {
680  if (!IsFixedArray()) return false;
681  if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
682  return false;
683  }
684 #ifdef DEBUG
685  if (FLAG_verify_heap) {
686  reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
687  }
688 #endif
689  return true;
690 }
691 
692 
693 bool Object::IsCompilationCacheTable() {
694  return IsHashTable();
695 }
696 
697 
698 bool Object::IsCodeCacheHashTable() {
699  return IsHashTable();
700 }
701 
702 
703 bool Object::IsPolymorphicCodeCacheHashTable() {
704  return IsHashTable();
705 }
706 
707 
708 bool Object::IsMapCache() {
709  return IsHashTable();
710 }
711 
712 
713 bool Object::IsPrimitive() {
714  return IsOddball() || IsNumber() || IsString();
715 }
716 
717 
718 bool Object::IsJSGlobalProxy() {
719  bool result = IsHeapObject() &&
720  (HeapObject::cast(this)->map()->instance_type() ==
722  ASSERT(!result || IsAccessCheckNeeded());
723  return result;
724 }
725 
726 
727 bool Object::IsGlobalObject() {
728  if (!IsHeapObject()) return false;
729 
730  InstanceType type = HeapObject::cast(this)->map()->instance_type();
731  return type == JS_GLOBAL_OBJECT_TYPE ||
732  type == JS_BUILTINS_OBJECT_TYPE;
733 }
734 
735 
736 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
737 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
738 
739 
740 bool Object::IsUndetectableObject() {
741  return IsHeapObject()
742  && HeapObject::cast(this)->map()->is_undetectable();
743 }
744 
745 
746 bool Object::IsAccessCheckNeeded() {
747  return IsHeapObject()
749 }
750 
751 
753  if (!IsHeapObject()) return false;
754  switch (HeapObject::cast(this)->map()->instance_type()) {
755 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
757 #undef MAKE_STRUCT_CASE
758  default: return false;
759  }
760 }
761 
762 
763 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
764  bool Object::Is##Name() { \
765  return Object::IsHeapObject() \
766  && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
767  }
769 #undef MAKE_STRUCT_PREDICATE
770 
771 
772 bool Object::IsUndefined() {
773  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
774 }
775 
776 
777 bool Object::IsNull() {
778  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
779 }
780 
781 
782 bool Object::IsTheHole() {
783  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
784 }
785 
786 
787 bool Object::IsTrue() {
788  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
789 }
790 
791 
792 bool Object::IsFalse() {
793  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
794 }
795 
796 
798  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
799 }
800 
801 
802 double Object::Number() {
803  ASSERT(IsNumber());
804  return IsSmi()
805  ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
806  : reinterpret_cast<HeapNumber*>(this)->value();
807 }
808 
809 
811  return this->IsHeapNumber() && isnan(HeapNumber::cast(this)->value());
812 }
813 
814 
815 MaybeObject* Object::ToSmi() {
816  if (IsSmi()) return this;
817  if (IsHeapNumber()) {
818  double value = HeapNumber::cast(this)->value();
819  int int_value = FastD2I(value);
820  if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
821  return Smi::FromInt(int_value);
822  }
823  }
824  return Failure::Exception();
825 }
826 
827 
829  return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
830 }
831 
832 
833 MaybeObject* Object::GetElement(uint32_t index) {
834  // GetElement can trigger a getter which can cause allocation.
835  // This was not always the case. This ASSERT is here to catch
836  // leftover incorrect uses.
837  ASSERT(HEAP->IsAllocationAllowed());
838  return GetElementWithReceiver(this, index);
839 }
840 
841 
843  MaybeObject* maybe = GetElementWithReceiver(this, index);
844  ASSERT(!maybe->IsFailure());
845  Object* result = NULL; // Initialization to please compiler.
846  maybe->ToObject(&result);
847  return result;
848 }
849 
850 
851 MaybeObject* Object::GetProperty(String* key) {
852  PropertyAttributes attributes;
853  return GetPropertyWithReceiver(this, key, &attributes);
854 }
855 
856 
857 MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
858  return GetPropertyWithReceiver(this, key, attributes);
859 }
860 
861 
862 #define FIELD_ADDR(p, offset) \
863  (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
864 
865 #define READ_FIELD(p, offset) \
866  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
867 
868 #define WRITE_FIELD(p, offset, value) \
869  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
870 
871 #define WRITE_BARRIER(heap, object, offset, value) \
872  heap->incremental_marking()->RecordWrite( \
873  object, HeapObject::RawField(object, offset), value); \
874  if (heap->InNewSpace(value)) { \
875  heap->RecordWrite(object->address(), offset); \
876  }
877 
878 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
879  if (mode == UPDATE_WRITE_BARRIER) { \
880  heap->incremental_marking()->RecordWrite( \
881  object, HeapObject::RawField(object, offset), value); \
882  if (heap->InNewSpace(value)) { \
883  heap->RecordWrite(object->address(), offset); \
884  } \
885  }
886 
887 #ifndef V8_TARGET_ARCH_MIPS
888  #define READ_DOUBLE_FIELD(p, offset) \
889  (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
890 #else // V8_TARGET_ARCH_MIPS
891  // Prevent gcc from using load-double (mips ldc1) on (possibly)
892  // non-64-bit aligned HeapNumber::value.
893  static inline double read_double_field(void* p, int offset) {
894  union conversion {
895  double d;
896  uint32_t u[2];
897  } c;
898  c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
899  c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
900  return c.d;
901  }
902  #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
903 #endif // V8_TARGET_ARCH_MIPS
904 
905 #ifndef V8_TARGET_ARCH_MIPS
906  #define WRITE_DOUBLE_FIELD(p, offset, value) \
907  (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
908 #else // V8_TARGET_ARCH_MIPS
909  // Prevent gcc from using store-double (mips sdc1) on (possibly)
910  // non-64-bit aligned HeapNumber::value.
911  static inline void write_double_field(void* p, int offset,
912  double value) {
913  union conversion {
914  double d;
915  uint32_t u[2];
916  } c;
917  c.d = value;
918  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
919  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
920  }
921  #define WRITE_DOUBLE_FIELD(p, offset, value) \
922  write_double_field(p, offset, value)
923 #endif // V8_TARGET_ARCH_MIPS
924 
925 
926 #define READ_INT_FIELD(p, offset) \
927  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
928 
929 #define WRITE_INT_FIELD(p, offset, value) \
930  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
931 
932 #define READ_INTPTR_FIELD(p, offset) \
933  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
934 
935 #define WRITE_INTPTR_FIELD(p, offset, value) \
936  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
937 
938 #define READ_UINT32_FIELD(p, offset) \
939  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
940 
941 #define WRITE_UINT32_FIELD(p, offset, value) \
942  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
943 
944 #define READ_INT64_FIELD(p, offset) \
945  (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
946 
947 #define WRITE_INT64_FIELD(p, offset, value) \
948  (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
949 
950 #define READ_SHORT_FIELD(p, offset) \
951  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
952 
953 #define WRITE_SHORT_FIELD(p, offset, value) \
954  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
955 
956 #define READ_BYTE_FIELD(p, offset) \
957  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
958 
959 #define WRITE_BYTE_FIELD(p, offset, value) \
960  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
961 
962 
963 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
964  return &READ_FIELD(obj, byte_offset);
965 }
966 
967 
968 int Smi::value() {
969  return Internals::SmiValue(this);
970 }
971 
972 
973 Smi* Smi::FromInt(int value) {
974  ASSERT(Smi::IsValid(value));
975  int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
976  intptr_t tagged_value =
977  (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
978  return reinterpret_cast<Smi*>(tagged_value);
979 }
980 
981 
982 Smi* Smi::FromIntptr(intptr_t value) {
983  ASSERT(Smi::IsValid(value));
984  int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
985  return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
986 }
987 
988 
990  return static_cast<Type>(value() & kFailureTypeTagMask);
991 }
992 
993 
995  return type() == INTERNAL_ERROR;
996 }
997 
998 
1000  return type() == OUT_OF_MEMORY_EXCEPTION;
1001 }
1002 
1003 
1006  return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
1007  & kSpaceTagMask);
1008 }
1009 
1010 
1012  return Construct(INTERNAL_ERROR);
1013 }
1014 
1015 
1017  return Construct(EXCEPTION);
1018 }
1019 
1020 
1022  return Construct(OUT_OF_MEMORY_EXCEPTION);
1023 }
1024 
1025 
1026 intptr_t Failure::value() const {
1027  return static_cast<intptr_t>(
1028  reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
1029 }
1030 
1031 
1033  return RetryAfterGC(NEW_SPACE);
1034 }
1035 
1036 
1038  ASSERT((space & ~kSpaceTagMask) == 0);
1039  return Construct(RETRY_AFTER_GC, space);
1040 }
1041 
1042 
1043 Failure* Failure::Construct(Type type, intptr_t value) {
1044  uintptr_t info =
1045  (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1046  ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1047  return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
1048 }
1049 
1050 
1051 bool Smi::IsValid(intptr_t value) {
1052 #ifdef DEBUG
1053  bool in_range = (value >= kMinValue) && (value <= kMaxValue);
1054 #endif
1055 
1056 #ifdef V8_TARGET_ARCH_X64
1057  // To be representable as a long smi, the value must be a 32-bit integer.
1058  bool result = (value == static_cast<int32_t>(value));
1059 #else
1060  // To be representable as an tagged small integer, the two
1061  // most-significant bits of 'value' must be either 00 or 11 due to
1062  // sign-extension. To check this we add 01 to the two
1063  // most-significant bits, and check if the most-significant bit is 0
1064  //
1065  // CAUTION: The original code below:
1066  // bool result = ((value + 0x40000000) & 0x80000000) == 0;
1067  // may lead to incorrect results according to the C language spec, and
1068  // in fact doesn't work correctly with gcc4.1.1 in some cases: The
1069  // compiler may produce undefined results in case of signed integer
1070  // overflow. The computation must be done w/ unsigned ints.
1071  bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
1072 #endif
1073  ASSERT(result == in_range);
1074  return result;
1075 }
1076 
1077 
1078 MapWord MapWord::FromMap(Map* map) {
1079  return MapWord(reinterpret_cast<uintptr_t>(map));
1080 }
1081 
1082 
1083 Map* MapWord::ToMap() {
1084  return reinterpret_cast<Map*>(value_);
1085 }
1086 
1087 
1088 bool MapWord::IsForwardingAddress() {
1089  return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1090 }
1091 
1092 
1093 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1094  Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1095  return MapWord(reinterpret_cast<uintptr_t>(raw));
1096 }
1097 
1098 
1099 HeapObject* MapWord::ToForwardingAddress() {
1100  ASSERT(IsForwardingAddress());
1101  return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1102 }
1103 
1104 
1105 #ifdef DEBUG
1106 void HeapObject::VerifyObjectField(int offset) {
1107  VerifyPointer(READ_FIELD(this, offset));
1108 }
1109 
1110 void HeapObject::VerifySmiField(int offset) {
1111  ASSERT(READ_FIELD(this, offset)->IsSmi());
1112 }
1113 #endif
1114 
1115 
1117  Heap* heap =
1118  MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1119  ASSERT(heap != NULL);
1120  ASSERT(heap->isolate() == Isolate::Current());
1121  return heap;
1122 }
1123 
1124 
1126  return GetHeap()->isolate();
1127 }
1128 
1129 
1131  return map_word().ToMap();
1132 }
1133 
1134 
1135 void HeapObject::set_map(Map* value) {
1136  set_map_word(MapWord::FromMap(value));
1137  if (value != NULL) {
1138  // TODO(1600) We are passing NULL as a slot because maps can never be on
1139  // evacuation candidate.
1140  value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1141  }
1142 }
1143 
1144 
1145 // Unsafe accessor omitting write barrier.
1147  set_map_word(MapWord::FromMap(value));
1148 }
1149 
1150 
1152  return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1153 }
1154 
1155 
1156 void HeapObject::set_map_word(MapWord map_word) {
1157  // WRITE_FIELD does not invoke write barrier, but there is no need
1158  // here.
1159  WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1160 }
1161 
1162 
1164  ASSERT_TAG_ALIGNED(address);
1165  return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1166 }
1167 
1168 
1170  return reinterpret_cast<Address>(this) - kHeapObjectTag;
1171 }
1172 
1173 
1175  return SizeFromMap(map());
1176 }
1177 
1178 
1179 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1180  v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1181  reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1182 }
1183 
1184 
1185 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1186  v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1187 }
1188 
1189 
1191  return READ_DOUBLE_FIELD(this, kValueOffset);
1192 }
1193 
1194 
1195 void HeapNumber::set_value(double value) {
1196  WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1197 }
1198 
1199 
1201  return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1203 }
1204 
1205 
1207  return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1208 }
1209 
1210 
1211 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1212 
1213 
1214 Object** FixedArray::GetFirstElementAddress() {
1215  return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1216 }
1217 
1218 
1220  Object* the_hole = GetHeap()->the_hole_value();
1221  Object** current = GetFirstElementAddress();
1222  for (int i = 0; i < length(); ++i) {
1223  Object* candidate = *current++;
1224  if (!candidate->IsSmi() && candidate != the_hole) return false;
1225  }
1226  return true;
1227 }
1228 
1229 
1230 FixedArrayBase* JSObject::elements() {
1231  Object* array = READ_FIELD(this, kElementsOffset);
1232  return static_cast<FixedArrayBase*>(array);
1233 }
1234 
1235 
1237 #if DEBUG
1239  ElementsAccessor* accessor = GetElementsAccessor();
1240  accessor->Validate(this);
1241  }
1242 #endif
1243 }
1244 
1245 
1247  ValidateElements();
1248  ElementsKind elements_kind = map()->elements_kind();
1249  if (!IsFastObjectElementsKind(elements_kind)) {
1250  if (IsFastHoleyElementsKind(elements_kind)) {
1252  } else {
1254  }
1255  }
1256  return this;
1257 }
1258 
1259 
1261  uint32_t count,
1262  EnsureElementsMode mode) {
1263  ElementsKind current_kind = map()->elements_kind();
1264  ElementsKind target_kind = current_kind;
1266  bool is_holey = IsFastHoleyElementsKind(current_kind);
1267  if (current_kind == FAST_HOLEY_ELEMENTS) return this;
1268  Heap* heap = GetHeap();
1269  Object* the_hole = heap->the_hole_value();
1270  for (uint32_t i = 0; i < count; ++i) {
1271  Object* current = *objects++;
1272  if (current == the_hole) {
1273  is_holey = true;
1274  target_kind = GetHoleyElementsKind(target_kind);
1275  } else if (!current->IsSmi()) {
1276  if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1277  if (IsFastSmiElementsKind(target_kind)) {
1278  if (is_holey) {
1279  target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1280  } else {
1281  target_kind = FAST_DOUBLE_ELEMENTS;
1282  }
1283  }
1284  } else if (is_holey) {
1285  target_kind = FAST_HOLEY_ELEMENTS;
1286  break;
1287  } else {
1288  target_kind = FAST_ELEMENTS;
1289  }
1290  }
1291  }
1292 
1293  if (target_kind != current_kind) {
1294  return TransitionElementsKind(target_kind);
1295  }
1296  return this;
1297 }
1298 
1299 
1301  uint32_t length,
1302  EnsureElementsMode mode) {
1303  if (elements->map() != GetHeap()->fixed_double_array_map()) {
1304  ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
1305  elements->map() == GetHeap()->fixed_cow_array_map());
1306  if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1308  }
1309  Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
1310  return EnsureCanContainElements(objects, length, mode);
1311  }
1312 
1316  } else if (GetElementsKind() == FAST_SMI_ELEMENTS) {
1317  FixedDoubleArray* double_array = FixedDoubleArray::cast(elements);
1318  for (uint32_t i = 0; i < length; ++i) {
1319  if (double_array->is_the_hole(i)) {
1321  }
1322  }
1324  }
1325 
1326  return this;
1327 }
1328 
1329 
1331  ElementsKind to_kind) {
1332  Map* current_map = map();
1333  ElementsKind from_kind = current_map->elements_kind();
1334  if (from_kind == to_kind) return current_map;
1335 
1336  Context* global_context = isolate->context()->global_context();
1337  Object* maybe_array_maps = global_context->js_array_maps();
1338  if (maybe_array_maps->IsFixedArray()) {
1339  FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
1340  if (array_maps->get(from_kind) == current_map) {
1341  Object* maybe_transitioned_map = array_maps->get(to_kind);
1342  if (maybe_transitioned_map->IsMap()) {
1343  return Map::cast(maybe_transitioned_map);
1344  }
1345  }
1346  }
1347 
1348  return GetElementsTransitionMapSlow(to_kind);
1349 }
1350 
1351 
1353  FixedArrayBase* value,
1354  WriteBarrierMode mode) {
1355  ASSERT(value->HasValidElements());
1356  if (new_map != NULL) {
1357  if (mode == UPDATE_WRITE_BARRIER) {
1358  set_map(new_map);
1359  } else {
1360  ASSERT(mode == SKIP_WRITE_BARRIER);
1361  set_map_no_write_barrier(new_map);
1362  }
1363  }
1364  ASSERT((map()->has_fast_smi_or_object_elements() ||
1365  (value == GetHeap()->empty_fixed_array())) ==
1366  (value->map() == GetHeap()->fixed_array_map() ||
1367  value->map() == GetHeap()->fixed_cow_array_map()));
1368  ASSERT((value == GetHeap()->empty_fixed_array()) ||
1369  (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
1370  WRITE_FIELD(this, kElementsOffset, value);
1371  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1372 }
1373 
1374 
1375 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1376  set_map_and_elements(NULL, value, mode);
1377 }
1378 
1379 
1380 void JSObject::initialize_properties() {
1381  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1382  WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1383 }
1384 
1385 
1387  ASSERT(map()->has_fast_smi_or_object_elements() ||
1388  map()->has_fast_double_elements());
1389  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1390  WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1391 }
1392 
1393 
1394 MaybeObject* JSObject::ResetElements() {
1395  Object* obj;
1396  ElementsKind elements_kind = GetInitialFastElementsKind();
1397  if (!FLAG_smi_only_arrays) {
1398  elements_kind = FastSmiToObjectElementsKind(elements_kind);
1399  }
1400  MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
1401  elements_kind);
1402  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1403  set_map(Map::cast(obj));
1405  return this;
1406 }
1407 
1408 
1409 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1410 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1411 
1412 
1413 byte Oddball::kind() {
1414  return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1415 }
1416 
1417 
1419  WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1420 }
1421 
1422 
1423 Object* JSGlobalPropertyCell::value() {
1424  return READ_FIELD(this, kValueOffset);
1425 }
1426 
1427 
1428 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1429  // The write barrier is not used for global property cells.
1430  ASSERT(!val->IsJSGlobalPropertyCell());
1431  WRITE_FIELD(this, kValueOffset, val);
1432 }
1433 
1434 
1436  InstanceType type = map()->instance_type();
1437  // Check for the most common kind of JavaScript object before
1438  // falling into the generic switch. This speeds up the internal
1439  // field operations considerably on average.
1440  if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1441  switch (type) {
1442  case JS_MODULE_TYPE:
1443  return JSModule::kSize;
1444  case JS_GLOBAL_PROXY_TYPE:
1445  return JSGlobalProxy::kSize;
1446  case JS_GLOBAL_OBJECT_TYPE:
1447  return JSGlobalObject::kSize;
1449  return JSBuiltinsObject::kSize;
1450  case JS_FUNCTION_TYPE:
1451  return JSFunction::kSize;
1452  case JS_VALUE_TYPE:
1453  return JSValue::kSize;
1454  case JS_DATE_TYPE:
1455  return JSDate::kSize;
1456  case JS_ARRAY_TYPE:
1457  return JSArray::kSize;
1458  case JS_WEAK_MAP_TYPE:
1459  return JSWeakMap::kSize;
1460  case JS_REGEXP_TYPE:
1461  return JSRegExp::kSize;
1463  return JSObject::kHeaderSize;
1465  return JSMessageObject::kSize;
1466  default:
1467  UNREACHABLE();
1468  return 0;
1469  }
1470 }
1471 
1472 
1475  // Make sure to adjust for the number of in-object properties. These
1476  // properties do contribute to the size, but are not internal fields.
1477  return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1478  map()->inobject_properties();
1479 }
1480 
1481 
1483  ASSERT(index < GetInternalFieldCount() && index >= 0);
1484  return GetHeaderSize() + (kPointerSize * index);
1485 }
1486 
1487 
1489  ASSERT(index < GetInternalFieldCount() && index >= 0);
1490  // Internal objects do follow immediately after the header, whereas in-object
1491  // properties are at the end of the object. Therefore there is no need
1492  // to adjust the index here.
1493  return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1494 }
1495 
1496 
1497 void JSObject::SetInternalField(int index, Object* value) {
1498  ASSERT(index < GetInternalFieldCount() && index >= 0);
1499  // Internal objects do follow immediately after the header, whereas in-object
1500  // properties are at the end of the object. Therefore there is no need
1501  // to adjust the index here.
1502  int offset = GetHeaderSize() + (kPointerSize * index);
1503  WRITE_FIELD(this, offset, value);
1504  WRITE_BARRIER(GetHeap(), this, offset, value);
1505 }
1506 
1507 
1508 void JSObject::SetInternalField(int index, Smi* value) {
1509  ASSERT(index < GetInternalFieldCount() && index >= 0);
1510  // Internal objects do follow immediately after the header, whereas in-object
1511  // properties are at the end of the object. Therefore there is no need
1512  // to adjust the index here.
1513  int offset = GetHeaderSize() + (kPointerSize * index);
1514  WRITE_FIELD(this, offset, value);
1515 }
1516 
1517 
1518 // Access fast-case object properties at index. The use of these routines
1519 // is needed to correctly distinguish between properties stored in-object and
1520 // properties stored in the properties array.
1522  // Adjust for the number of properties stored in the object.
1523  index -= map()->inobject_properties();
1524  if (index < 0) {
1525  int offset = map()->instance_size() + (index * kPointerSize);
1526  return READ_FIELD(this, offset);
1527  } else {
1528  ASSERT(index < properties()->length());
1529  return properties()->get(index);
1530  }
1531 }
1532 
1533 
1535  // Adjust for the number of properties stored in the object.
1536  index -= map()->inobject_properties();
1537  if (index < 0) {
1538  int offset = map()->instance_size() + (index * kPointerSize);
1539  WRITE_FIELD(this, offset, value);
1540  WRITE_BARRIER(GetHeap(), this, offset, value);
1541  } else {
1542  ASSERT(index < properties()->length());
1543  properties()->set(index, value);
1544  }
1545  return value;
1546 }
1547 
1548 
1550  // Adjust for the number of properties stored in the object.
1551  index -= map()->inobject_properties();
1552  ASSERT(index < 0);
1553  return map()->instance_size() + (index * kPointerSize);
1554 }
1555 
1556 
1558  // Adjust for the number of properties stored in the object.
1559  index -= map()->inobject_properties();
1560  ASSERT(index < 0);
1561  int offset = map()->instance_size() + (index * kPointerSize);
1562  return READ_FIELD(this, offset);
1563 }
1564 
1565 
1567  Object* value,
1568  WriteBarrierMode mode) {
1569  // Adjust for the number of properties stored in the object.
1570  index -= map()->inobject_properties();
1571  ASSERT(index < 0);
1572  int offset = map()->instance_size() + (index * kPointerSize);
1573  WRITE_FIELD(this, offset, value);
1574  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1575  return value;
1576 }
1577 
1578 
1579 
1581  Object* pre_allocated_value,
1582  Object* filler_value) {
1583  ASSERT(!filler_value->IsHeapObject() ||
1584  !GetHeap()->InNewSpace(filler_value));
1585  ASSERT(!pre_allocated_value->IsHeapObject() ||
1586  !GetHeap()->InNewSpace(pre_allocated_value));
1587  int size = map->instance_size();
1588  int offset = kHeaderSize;
1589  if (filler_value != pre_allocated_value) {
1590  int pre_allocated = map->pre_allocated_property_fields();
1591  ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
1592  for (int i = 0; i < pre_allocated; i++) {
1593  WRITE_FIELD(this, offset, pre_allocated_value);
1594  offset += kPointerSize;
1595  }
1596  }
1597  while (offset < size) {
1598  WRITE_FIELD(this, offset, filler_value);
1599  offset += kPointerSize;
1600  }
1601 }
1602 
1603 
1605  return !properties()->IsDictionary();
1606 }
1607 
1608 
1610  JSObject::StoreFromKeyed store_mode) {
1611  // Allow extra fast properties if the object has more than
1612  // kFastPropertiesSoftLimit in-object properties. When this is the case,
1613  // it is very unlikely that the object is being used as a dictionary
1614  // and there is a good chance that allowing more map transitions
1615  // will be worth it.
1616  int inobject = map()->inobject_properties();
1617 
1618  int limit;
1619  if (store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ||
1620  map()->used_for_prototype()) {
1621  limit = Max(inobject, kMaxFastProperties);
1622  } else {
1623  limit = Max(inobject, kFastPropertiesSoftLimit);
1624  }
1625  return properties > limit;
1626 }
1627 
1628 
1629 void Struct::InitializeBody(int object_size) {
1630  Object* value = GetHeap()->undefined_value();
1631  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1632  WRITE_FIELD(this, offset, value);
1633  }
1634 }
1635 
1636 
1637 bool Object::ToArrayIndex(uint32_t* index) {
1638  if (IsSmi()) {
1639  int value = Smi::cast(this)->value();
1640  if (value < 0) return false;
1641  *index = value;
1642  return true;
1643  }
1644  if (IsHeapNumber()) {
1645  double value = HeapNumber::cast(this)->value();
1646  uint32_t uint_value = static_cast<uint32_t>(value);
1647  if (value == static_cast<double>(uint_value)) {
1648  *index = uint_value;
1649  return true;
1650  }
1651  }
1652  return false;
1653 }
1654 
1655 
1657  if (!this->IsJSValue()) return false;
1658 
1659  JSValue* js_value = JSValue::cast(this);
1660  if (!js_value->value()->IsString()) return false;
1661 
1662  String* str = String::cast(js_value->value());
1663  if (index >= (uint32_t)str->length()) return false;
1664 
1665  return true;
1666 }
1667 
1668 
1670  ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
1671  return reinterpret_cast<FixedArrayBase*>(object);
1672 }
1673 
1674 
1676  ASSERT(index >= 0 && index < this->length());
1677  return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1678 }
1679 
1680 
1681 bool FixedArray::is_the_hole(int index) {
1682  return get(index) == GetHeap()->the_hole_value();
1683 }
1684 
1685 
1686 void FixedArray::set(int index, Smi* value) {
1687  ASSERT(map() != HEAP->fixed_cow_array_map());
1688  ASSERT(index >= 0 && index < this->length());
1689  ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1690  int offset = kHeaderSize + index * kPointerSize;
1691  WRITE_FIELD(this, offset, value);
1692 }
1693 
1694 
1695 void FixedArray::set(int index, Object* value) {
1696  ASSERT(map() != HEAP->fixed_cow_array_map());
1697  ASSERT(index >= 0 && index < this->length());
1698  int offset = kHeaderSize + index * kPointerSize;
1699  WRITE_FIELD(this, offset, value);
1700  WRITE_BARRIER(GetHeap(), this, offset, value);
1701 }
1702 
1703 
1704 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1705  return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1706 }
1707 
1708 
1710  return BitCast<double, uint64_t>(kHoleNanInt64);
1711 }
1712 
1713 
1715  ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
1716  ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
1717  return OS::nan_value();
1718 }
1719 
1720 
1721 double FixedDoubleArray::get_scalar(int index) {
1722  ASSERT(map() != HEAP->fixed_cow_array_map() &&
1723  map() != HEAP->fixed_array_map());
1724  ASSERT(index >= 0 && index < this->length());
1725  double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
1726  ASSERT(!is_the_hole_nan(result));
1727  return result;
1728 }
1729 
1731  ASSERT(map() != HEAP->fixed_cow_array_map() &&
1732  map() != HEAP->fixed_array_map());
1733  ASSERT(index >= 0 && index < this->length());
1734  return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
1735 }
1736 
1737 MaybeObject* FixedDoubleArray::get(int index) {
1738  if (is_the_hole(index)) {
1739  return GetHeap()->the_hole_value();
1740  } else {
1741  return GetHeap()->NumberFromDouble(get_scalar(index));
1742  }
1743 }
1744 
1745 
1746 void FixedDoubleArray::set(int index, double value) {
1747  ASSERT(map() != HEAP->fixed_cow_array_map() &&
1748  map() != HEAP->fixed_array_map());
1749  int offset = kHeaderSize + index * kDoubleSize;
1750  if (isnan(value)) value = canonical_not_the_hole_nan_as_double();
1751  WRITE_DOUBLE_FIELD(this, offset, value);
1752 }
1753 
1754 
1756  ASSERT(map() != HEAP->fixed_cow_array_map() &&
1757  map() != HEAP->fixed_array_map());
1758  int offset = kHeaderSize + index * kDoubleSize;
1759  WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1760 }
1761 
1762 
1764  int offset = kHeaderSize + index * kDoubleSize;
1765  return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
1766 }
1767 
1768 
1770  Heap* heap = GetHeap();
1771  if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
1772  if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1773  return UPDATE_WRITE_BARRIER;
1774 }
1775 
1776 
1777 void FixedArray::set(int index,
1778  Object* value,
1779  WriteBarrierMode mode) {
1780  ASSERT(map() != HEAP->fixed_cow_array_map());
1781  ASSERT(index >= 0 && index < this->length());
1782  int offset = kHeaderSize + index * kPointerSize;
1783  WRITE_FIELD(this, offset, value);
1784  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1785 }
1786 
1787 
1789  int index,
1790  Object* value) {
1791  ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1792  ASSERT(index >= 0 && index < array->length());
1793  int offset = kHeaderSize + index * kPointerSize;
1794  WRITE_FIELD(array, offset, value);
1795  Heap* heap = array->GetHeap();
1796  if (heap->InNewSpace(value)) {
1797  heap->RecordWrite(array->address(), offset);
1798  }
1799 }
1800 
1801 
1803  int index,
1804  Object* value) {
1805  ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1806  ASSERT(index >= 0 && index < array->length());
1807  ASSERT(!HEAP->InNewSpace(value));
1808  WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1809 }
1810 
1811 
1812 void FixedArray::set_undefined(int index) {
1813  ASSERT(map() != HEAP->fixed_cow_array_map());
1814  set_undefined(GetHeap(), index);
1815 }
1816 
1817 
1818 void FixedArray::set_undefined(Heap* heap, int index) {
1819  ASSERT(index >= 0 && index < this->length());
1820  ASSERT(!heap->InNewSpace(heap->undefined_value()));
1821  WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1822  heap->undefined_value());
1823 }
1824 
1825 
1826 void FixedArray::set_null(int index) {
1827  set_null(GetHeap(), index);
1828 }
1829 
1830 
1831 void FixedArray::set_null(Heap* heap, int index) {
1832  ASSERT(index >= 0 && index < this->length());
1833  ASSERT(!heap->InNewSpace(heap->null_value()));
1834  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1835 }
1836 
1837 
1838 void FixedArray::set_the_hole(int index) {
1839  ASSERT(map() != HEAP->fixed_cow_array_map());
1840  ASSERT(index >= 0 && index < this->length());
1841  ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1842  WRITE_FIELD(this,
1843  kHeaderSize + index * kPointerSize,
1844  GetHeap()->the_hole_value());
1845 }
1846 
1847 
1848 void FixedArray::set_unchecked(int index, Smi* value) {
1849  ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1850  int offset = kHeaderSize + index * kPointerSize;
1851  WRITE_FIELD(this, offset, value);
1852 }
1853 
1854 
1856  int index,
1857  Object* value,
1858  WriteBarrierMode mode) {
1859  int offset = kHeaderSize + index * kPointerSize;
1860  WRITE_FIELD(this, offset, value);
1861  CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
1862 }
1863 
1864 
1865 void FixedArray::set_null_unchecked(Heap* heap, int index) {
1866  ASSERT(index >= 0 && index < this->length());
1867  ASSERT(!HEAP->InNewSpace(heap->null_value()));
1868  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1869 }
1870 
1871 
1873  return HeapObject::RawField(this, kHeaderSize);
1874 }
1875 
1876 
1878  ASSERT(this->IsSmi() ||
1879  this->MayContainTransitions() ||
1880  this == HEAP->empty_descriptor_array());
1881  return this->IsSmi() || length() < kFirstIndex;
1882 }
1883 
1884 
1886  return length() >= kTransitionsIndex;
1887 }
1888 
1889 
1891  Object* storage = READ_FIELD(this, kBitField3StorageOffset);
1892  return Smi::cast(storage)->value();
1893 }
1894 
1896  ASSERT(this->MayContainTransitions());
1898 }
1899 
1900 
1901 void DescriptorArray::NoIncrementalWriteBarrierSwap(FixedArray* array,
1902  int first,
1903  int second) {
1904  Object* tmp = array->get(first);
1905  NoIncrementalWriteBarrierSet(array, first, array->get(second));
1906  NoIncrementalWriteBarrierSet(array, second, tmp);
1907 }
1908 
1909 
1910 int DescriptorArray::Search(String* name) {
1911  SLOW_ASSERT(IsSortedNoDuplicates());
1912 
1913  // Check for empty descriptor array.
1914  int nof = number_of_descriptors();
1915  if (nof == 0) return kNotFound;
1916 
1917  // Fast case: do linear search for small arrays.
1918  const int kMaxElementsForLinearSearch = 8;
1919  if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1920  return LinearSearch(EXPECT_SORTED, name, nof);
1921  }
1922 
1923  // Slow case: perform binary search.
1924  return BinarySearch(name, 0, nof - 1);
1925 }
1926 
1927 
1928 int DescriptorArray::SearchWithCache(String* name) {
1929  int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1930  if (number == DescriptorLookupCache::kAbsent) {
1931  number = Search(name);
1932  GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1933  }
1934  return number;
1935 }
1936 
1937 
1938 Map* DescriptorArray::elements_transition_map() {
1939  if (!this->MayContainTransitions()) {
1940  return NULL;
1941  }
1942  Object* transition_map = get(kTransitionsIndex);
1943  if (transition_map == Smi::FromInt(0)) {
1944  return NULL;
1945  } else {
1946  return Map::cast(transition_map);
1947  }
1948 }
1949 
1950 
1951 void DescriptorArray::set_elements_transition_map(
1952  Map* transition_map, WriteBarrierMode mode) {
1953  ASSERT(this->length() > kTransitionsIndex);
1954  Heap* heap = GetHeap();
1955  WRITE_FIELD(this, kTransitionsOffset, transition_map);
1957  heap, this, kTransitionsOffset, transition_map, mode);
1959 }
1960 
1961 
1962 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
1963  ASSERT(descriptor_number < number_of_descriptors());
1964  return HeapObject::RawField(
1965  reinterpret_cast<HeapObject*>(this),
1966  OffsetOfElementAt(ToKeyIndex(descriptor_number)));
1967 }
1968 
1969 
1970 String* DescriptorArray::GetKey(int descriptor_number) {
1971  ASSERT(descriptor_number < number_of_descriptors());
1972  return String::cast(get(ToKeyIndex(descriptor_number)));
1973 }
1974 
1975 
1976 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
1977  ASSERT(descriptor_number < number_of_descriptors());
1978  return HeapObject::RawField(
1979  reinterpret_cast<HeapObject*>(this),
1980  OffsetOfElementAt(ToValueIndex(descriptor_number)));
1981 }
1982 
1983 
1984 Object* DescriptorArray::GetValue(int descriptor_number) {
1985  ASSERT(descriptor_number < number_of_descriptors());
1986  return get(ToValueIndex(descriptor_number));
1987 }
1988 
1989 
1990 void DescriptorArray::SetNullValueUnchecked(int descriptor_number, Heap* heap) {
1991  ASSERT(descriptor_number < number_of_descriptors());
1992  set_null_unchecked(heap, ToValueIndex(descriptor_number));
1993 }
1994 
1995 
1996 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
1997  ASSERT(descriptor_number < number_of_descriptors());
1998  Object* details = get(ToDetailsIndex(descriptor_number));
1999  return PropertyDetails(Smi::cast(details));
2000 }
2001 
2002 
2003 void DescriptorArray::SetDetailsUnchecked(int descriptor_number, Smi* value) {
2004  ASSERT(descriptor_number < number_of_descriptors());
2005  set_unchecked(ToDetailsIndex(descriptor_number), value);
2006 }
2007 
2008 
2009 PropertyType DescriptorArray::GetType(int descriptor_number) {
2010  return GetDetails(descriptor_number).type();
2011 }
2012 
2013 
2014 int DescriptorArray::GetFieldIndex(int descriptor_number) {
2015  return Descriptor::IndexFromValue(GetValue(descriptor_number));
2016 }
2017 
2018 
2020  return JSFunction::cast(GetValue(descriptor_number));
2021 }
2022 
2023 
2025  ASSERT(GetType(descriptor_number) == CALLBACKS);
2026  return GetValue(descriptor_number);
2027 }
2028 
2029 
2031  ASSERT(GetType(descriptor_number) == CALLBACKS);
2032  Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
2033  return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
2034 }
2035 
2036 
2037 bool DescriptorArray::IsProperty(int descriptor_number) {
2038  Entry entry(this, descriptor_number);
2039  return IsPropertyDescriptor(&entry);
2040 }
2041 
2042 
2043 bool DescriptorArray::IsTransitionOnly(int descriptor_number) {
2044  switch (GetType(descriptor_number)) {
2045  case MAP_TRANSITION:
2046  case CONSTANT_TRANSITION:
2047  return true;
2048  case CALLBACKS: {
2049  Object* value = GetValue(descriptor_number);
2050  if (!value->IsAccessorPair()) return false;
2051  AccessorPair* accessors = AccessorPair::cast(value);
2052  return accessors->getter()->IsMap() && accessors->setter()->IsMap();
2053  }
2054  case NORMAL:
2055  case FIELD:
2056  case CONSTANT_FUNCTION:
2057  case HANDLER:
2058  case INTERCEPTOR:
2059  case NULL_DESCRIPTOR:
2060  return false;
2061  }
2062  UNREACHABLE(); // Keep the compiler happy.
2063  return false;
2064 }
2065 
2066 
2067 bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
2068  return GetType(descriptor_number) == NULL_DESCRIPTOR;
2069 }
2070 
2071 
2072 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2073  desc->Init(GetKey(descriptor_number),
2074  GetValue(descriptor_number),
2075  GetDetails(descriptor_number));
2076 }
2077 
2078 
2079 void DescriptorArray::Set(int descriptor_number,
2080  Descriptor* desc,
2081  const WhitenessWitness&) {
2082  // Range check.
2083  ASSERT(descriptor_number < number_of_descriptors());
2084 
2086  ToKeyIndex(descriptor_number),
2087  desc->GetKey());
2089  ToValueIndex(descriptor_number),
2090  desc->GetValue());
2092  ToDetailsIndex(descriptor_number),
2093  desc->GetDetails().AsSmi());
2094 }
2095 
2096 
2097 void DescriptorArray::NoIncrementalWriteBarrierSwapDescriptors(
2098  int first, int second) {
2099  NoIncrementalWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
2100  NoIncrementalWriteBarrierSwap(this,
2101  ToValueIndex(first),
2102  ToValueIndex(second));
2103  NoIncrementalWriteBarrierSwap(this,
2104  ToDetailsIndex(first),
2105  ToDetailsIndex(second));
2106 }
2107 
2108 
2110  : marking_(array->GetHeap()->incremental_marking()) {
2111  marking_->EnterNoMarkingScope();
2112  if (array->number_of_descriptors() > 0) {
2113  ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
2114  }
2115 }
2116 
2117 
2119  marking_->LeaveNoMarkingScope();
2120 }
2121 
2122 
2123 template<typename Shape, typename Key>
2124 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
2125  const int kMinCapacity = 32;
2126  int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2127  if (capacity < kMinCapacity) {
2128  capacity = kMinCapacity; // Guarantee min capacity.
2129  }
2130  return capacity;
2131 }
2132 
2133 
2134 template<typename Shape, typename Key>
2136  return FindEntry(GetIsolate(), key);
2137 }
2138 
2139 
2140 // Find entry for key otherwise return kNotFound.
2141 template<typename Shape, typename Key>
2143  uint32_t capacity = Capacity();
2144  uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
2145  uint32_t count = 1;
2146  // EnsureCapacity will guarantee the hash table is never full.
2147  while (true) {
2148  Object* element = KeyAt(entry);
2149  // Empty entry.
2150  if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2151  if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2152  Shape::IsMatch(key, element)) return entry;
2153  entry = NextProbe(entry, count++, capacity);
2154  }
2155  return kNotFound;
2156 }
2157 
2158 
2160  Object* max_index_object = get(kMaxNumberKeyIndex);
2161  if (!max_index_object->IsSmi()) return false;
2162  return 0 !=
2163  (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2164 }
2165 
2167  ASSERT(!requires_slow_elements());
2168  Object* max_index_object = get(kMaxNumberKeyIndex);
2169  if (!max_index_object->IsSmi()) return 0;
2170  uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2171  return value >> kRequiresSlowElementsTagSize;
2172 }
2173 
2175  set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2176 }
2177 
2178 
2179 // ------------------------------------
2180 // Cast operations
2181 
2182 
2211 CAST_ACCESSOR(Oddball)
2212 CAST_ACCESSOR(JSGlobalPropertyCell)
2213 CAST_ACCESSOR(SharedFunctionInfo)
2219 CAST_ACCESSOR(JSBuiltinsObject)
2242 
2243 
2244 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2246 #undef MAKE_STRUCT_CAST
2247 
2248 
2249 template <typename Shape, typename Key>
2251  ASSERT(obj->IsHashTable());
2252  return reinterpret_cast<HashTable*>(obj);
2253 }
2254 
2255 
2257 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2258 
2260 
2261 
2262 uint32_t String::hash_field() {
2263  return READ_UINT32_FIELD(this, kHashFieldOffset);
2264 }
2265 
2266 
2267 void String::set_hash_field(uint32_t value) {
2268  WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2269 #if V8_HOST_ARCH_64_BIT
2270  WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2271 #endif
2272 }
2273 
2274 
2275 bool String::Equals(String* other) {
2276  if (other == this) return true;
2277  if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
2278  return false;
2279  }
2280  return SlowEquals(other);
2281 }
2282 
2283 
2284 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2285  if (!StringShape(this).IsCons()) return this;
2286  ConsString* cons = ConsString::cast(this);
2287  if (cons->IsFlat()) return cons->first();
2288  return SlowTryFlatten(pretenure);
2289 }
2290 
2291 
2293  MaybeObject* flat = TryFlatten(pretenure);
2294  Object* successfully_flattened;
2295  if (!flat->ToObject(&successfully_flattened)) return this;
2296  return String::cast(successfully_flattened);
2297 }
2298 
2299 
2300 uint16_t String::Get(int index) {
2301  ASSERT(index >= 0 && index < length());
2302  switch (StringShape(this).full_representation_tag()) {
2304  return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
2306  return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2309  return ConsString::cast(this)->ConsStringGet(index);
2316  return SlicedString::cast(this)->SlicedStringGet(index);
2317  default:
2318  break;
2319  }
2320 
2321  UNREACHABLE();
2322  return 0;
2323 }
2324 
2325 
2326 void String::Set(int index, uint16_t value) {
2327  ASSERT(index >= 0 && index < length());
2328  ASSERT(StringShape(this).IsSequential());
2329 
2330  return this->IsAsciiRepresentation()
2331  ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
2332  : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2333 }
2334 
2335 
2337  if (!StringShape(this).IsCons()) return true;
2338  return ConsString::cast(this)->second()->length() == 0;
2339 }
2340 
2341 
2343  // Giving direct access to underlying string only makes sense if the
2344  // wrapping string is already flattened.
2345  ASSERT(this->IsFlat());
2346  ASSERT(StringShape(this).IsIndirect());
2348  const int kUnderlyingOffset = SlicedString::kParentOffset;
2349  return String::cast(READ_FIELD(this, kUnderlyingOffset));
2350 }
2351 
2352 
2354  ASSERT(index >= 0 && index < length());
2355  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2356 }
2357 
2358 
2360  ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
2361  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2362  static_cast<byte>(value));
2363 }
2364 
2365 
2367  return FIELD_ADDR(this, kHeaderSize);
2368 }
2369 
2370 
2372  return reinterpret_cast<char*>(GetCharsAddress());
2373 }
2374 
2375 
2377  return FIELD_ADDR(this, kHeaderSize);
2378 }
2379 
2380 
2382  return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2383 }
2384 
2385 
2387  ASSERT(index >= 0 && index < length());
2388  return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2389 }
2390 
2391 
2393  ASSERT(index >= 0 && index < length());
2394  WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2395 }
2396 
2397 
2399  return SizeFor(length());
2400 }
2401 
2402 
2404  return SizeFor(length());
2405 }
2406 
2407 
2409  return String::cast(READ_FIELD(this, kParentOffset));
2410 }
2411 
2412 
2414  ASSERT(parent->IsSeqString() || parent->IsExternalString());
2415  WRITE_FIELD(this, kParentOffset, parent);
2416 }
2417 
2418 
2419 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
2420 
2421 
2422 String* ConsString::first() {
2423  return String::cast(READ_FIELD(this, kFirstOffset));
2424 }
2425 
2426 
2428  return READ_FIELD(this, kFirstOffset);
2429 }
2430 
2431 
2433  WRITE_FIELD(this, kFirstOffset, value);
2434  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
2435 }
2436 
2437 
2439  return String::cast(READ_FIELD(this, kSecondOffset));
2440 }
2441 
2442 
2444  return READ_FIELD(this, kSecondOffset);
2445 }
2446 
2447 
2449  WRITE_FIELD(this, kSecondOffset, value);
2450  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
2451 }
2452 
2453 
2455  InstanceType type = map()->instance_type();
2457 }
2458 
2459 
2461  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2462 }
2463 
2464 
2466  if (is_short()) return;
2467  const char** data_field =
2468  reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
2469  *data_field = resource()->data();
2470 }
2471 
2472 
2474  const ExternalAsciiString::Resource* resource) {
2475  *reinterpret_cast<const Resource**>(
2476  FIELD_ADDR(this, kResourceOffset)) = resource;
2477  if (resource != NULL) update_data_cache();
2478 }
2479 
2480 
2482  return resource()->data();
2483 }
2484 
2485 
2487  ASSERT(index >= 0 && index < length());
2488  return GetChars()[index];
2489 }
2490 
2491 
2493  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2494 }
2495 
2496 
2498  if (is_short()) return;
2499  const uint16_t** data_field =
2500  reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
2501  *data_field = resource()->data();
2502 }
2503 
2504 
2506  const ExternalTwoByteString::Resource* resource) {
2507  *reinterpret_cast<const Resource**>(
2508  FIELD_ADDR(this, kResourceOffset)) = resource;
2509  if (resource != NULL) update_data_cache();
2510 }
2511 
2512 
2514  return resource()->data();
2515 }
2516 
2517 
2519  ASSERT(index >= 0 && index < length());
2520  return GetChars()[index];
2521 }
2522 
2523 
2525  unsigned start) {
2526  return GetChars() + start;
2527 }
2528 
2529 
2531  set_finger_index(kEntriesIndex);
2532  set_size(kEntriesIndex);
2533 }
2534 
2535 
2537  int cache_size = size();
2538  Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2539  MemsetPointer(entries_start,
2540  GetHeap()->the_hole_value(),
2541  cache_size - kEntriesIndex);
2542  MakeZeroSize();
2543 }
2544 
2545 
2547  return Smi::cast(get(kCacheSizeIndex))->value();
2548 }
2549 
2550 
2552  set(kCacheSizeIndex, Smi::FromInt(size));
2553 }
2554 
2555 
2557  return Smi::cast(get(kFingerIndex))->value();
2558 }
2559 
2560 
2562  set(kFingerIndex, Smi::FromInt(finger_index));
2563 }
2564 
2565 
2566 byte ByteArray::get(int index) {
2567  ASSERT(index >= 0 && index < this->length());
2568  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2569 }
2570 
2571 
2572 void ByteArray::set(int index, byte value) {
2573  ASSERT(index >= 0 && index < this->length());
2574  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2575 }
2576 
2577 
2578 int ByteArray::get_int(int index) {
2579  ASSERT(index >= 0 && (index * kIntSize) < this->length());
2580  return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2581 }
2582 
2583 
2585  ASSERT_TAG_ALIGNED(address);
2586  return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2587 }
2588 
2589 
2591  return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2592 }
2593 
2594 
2596  return reinterpret_cast<uint8_t*>(external_pointer());
2597 }
2598 
2599 
2601  ASSERT((index >= 0) && (index < this->length()));
2602  uint8_t* ptr = external_pixel_pointer();
2603  return ptr[index];
2604 }
2605 
2606 
2607 MaybeObject* ExternalPixelArray::get(int index) {
2608  return Smi::FromInt(static_cast<int>(get_scalar(index)));
2609 }
2610 
2611 
2612 void ExternalPixelArray::set(int index, uint8_t value) {
2613  ASSERT((index >= 0) && (index < this->length()));
2614  uint8_t* ptr = external_pixel_pointer();
2615  ptr[index] = value;
2616 }
2617 
2618 
2619 void* ExternalArray::external_pointer() {
2620  intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2621  return reinterpret_cast<void*>(ptr);
2622 }
2623 
2624 
2625 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2626  intptr_t ptr = reinterpret_cast<intptr_t>(value);
2627  WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2628 }
2629 
2630 
2632  ASSERT((index >= 0) && (index < this->length()));
2633  int8_t* ptr = static_cast<int8_t*>(external_pointer());
2634  return ptr[index];
2635 }
2636 
2637 
2638 MaybeObject* ExternalByteArray::get(int index) {
2639  return Smi::FromInt(static_cast<int>(get_scalar(index)));
2640 }
2641 
2642 
2643 void ExternalByteArray::set(int index, int8_t value) {
2644  ASSERT((index >= 0) && (index < this->length()));
2645  int8_t* ptr = static_cast<int8_t*>(external_pointer());
2646  ptr[index] = value;
2647 }
2648 
2649 
2651  ASSERT((index >= 0) && (index < this->length()));
2652  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2653  return ptr[index];
2654 }
2655 
2656 
2657 MaybeObject* ExternalUnsignedByteArray::get(int index) {
2658  return Smi::FromInt(static_cast<int>(get_scalar(index)));
2659 }
2660 
2661 
2662 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2663  ASSERT((index >= 0) && (index < this->length()));
2664  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2665  ptr[index] = value;
2666 }
2667 
2668 
2670  ASSERT((index >= 0) && (index < this->length()));
2671  int16_t* ptr = static_cast<int16_t*>(external_pointer());
2672  return ptr[index];
2673 }
2674 
2675 
2676 MaybeObject* ExternalShortArray::get(int index) {
2677  return Smi::FromInt(static_cast<int>(get_scalar(index)));
2678 }
2679 
2680 
2681 void ExternalShortArray::set(int index, int16_t value) {
2682  ASSERT((index >= 0) && (index < this->length()));
2683  int16_t* ptr = static_cast<int16_t*>(external_pointer());
2684  ptr[index] = value;
2685 }
2686 
2687 
2689  ASSERT((index >= 0) && (index < this->length()));
2690  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2691  return ptr[index];
2692 }
2693 
2694 
2695 MaybeObject* ExternalUnsignedShortArray::get(int index) {
2696  return Smi::FromInt(static_cast<int>(get_scalar(index)));
2697 }
2698 
2699 
2701  ASSERT((index >= 0) && (index < this->length()));
2702  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2703  ptr[index] = value;
2704 }
2705 
2706 
2708  ASSERT((index >= 0) && (index < this->length()));
2709  int32_t* ptr = static_cast<int32_t*>(external_pointer());
2710  return ptr[index];
2711 }
2712 
2713 
2714 MaybeObject* ExternalIntArray::get(int index) {
2715  return GetHeap()->NumberFromInt32(get_scalar(index));
2716 }
2717 
2718 
2719 void ExternalIntArray::set(int index, int32_t value) {
2720  ASSERT((index >= 0) && (index < this->length()));
2721  int32_t* ptr = static_cast<int32_t*>(external_pointer());
2722  ptr[index] = value;
2723 }
2724 
2725 
2727  ASSERT((index >= 0) && (index < this->length()));
2728  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2729  return ptr[index];
2730 }
2731 
2732 
2733 MaybeObject* ExternalUnsignedIntArray::get(int index) {
2734  return GetHeap()->NumberFromUint32(get_scalar(index));
2735 }
2736 
2737 
2738 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2739  ASSERT((index >= 0) && (index < this->length()));
2740  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2741  ptr[index] = value;
2742 }
2743 
2744 
2746  ASSERT((index >= 0) && (index < this->length()));
2747  float* ptr = static_cast<float*>(external_pointer());
2748  return ptr[index];
2749 }
2750 
2751 
2752 MaybeObject* ExternalFloatArray::get(int index) {
2753  return GetHeap()->NumberFromDouble(get_scalar(index));
2754 }
2755 
2756 
2757 void ExternalFloatArray::set(int index, float value) {
2758  ASSERT((index >= 0) && (index < this->length()));
2759  float* ptr = static_cast<float*>(external_pointer());
2760  ptr[index] = value;
2761 }
2762 
2763 
2765  ASSERT((index >= 0) && (index < this->length()));
2766  double* ptr = static_cast<double*>(external_pointer());
2767  return ptr[index];
2768 }
2769 
2770 
2771 MaybeObject* ExternalDoubleArray::get(int index) {
2772  return GetHeap()->NumberFromDouble(get_scalar(index));
2773 }
2774 
2775 
2776 void ExternalDoubleArray::set(int index, double value) {
2777  ASSERT((index >= 0) && (index < this->length()));
2778  double* ptr = static_cast<double*>(external_pointer());
2779  ptr[index] = value;
2780 }
2781 
2782 
2784  return READ_BYTE_FIELD(this, kVisitorIdOffset);
2785 }
2786 
2787 
2788 void Map::set_visitor_id(int id) {
2789  ASSERT(0 <= id && id < 256);
2790  WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2791 }
2792 
2793 
2795  return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2796 }
2797 
2798 
2800  return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2801 }
2802 
2803 
2805  return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2806 }
2807 
2808 
2810  int instance_size = map->instance_size();
2811  if (instance_size != kVariableSizeSentinel) return instance_size;
2812  // We can ignore the "symbol" bit becase it is only set for symbols
2813  // and implies a string type.
2814  int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
2815  // Only inline the most frequent cases.
2816  if (instance_type == FIXED_ARRAY_TYPE) {
2817  return FixedArray::BodyDescriptor::SizeOf(map, this);
2818  }
2819  if (instance_type == ASCII_STRING_TYPE) {
2820  return SeqAsciiString::SizeFor(
2821  reinterpret_cast<SeqAsciiString*>(this)->length());
2822  }
2823  if (instance_type == BYTE_ARRAY_TYPE) {
2824  return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2825  }
2826  if (instance_type == FREE_SPACE_TYPE) {
2827  return reinterpret_cast<FreeSpace*>(this)->size();
2828  }
2829  if (instance_type == STRING_TYPE) {
2831  reinterpret_cast<SeqTwoByteString*>(this)->length());
2832  }
2833  if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2835  reinterpret_cast<FixedDoubleArray*>(this)->length());
2836  }
2837  ASSERT(instance_type == CODE_TYPE);
2838  return reinterpret_cast<Code*>(this)->CodeSize();
2839 }
2840 
2841 
2842 void Map::set_instance_size(int value) {
2843  ASSERT_EQ(0, value & (kPointerSize - 1));
2844  value >>= kPointerSizeLog2;
2845  ASSERT(0 <= value && value < 256);
2846  WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2847 }
2848 
2849 
2851  ASSERT(0 <= value && value < 256);
2852  WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2853 }
2854 
2855 
2857  ASSERT(0 <= value && value < 256);
2858  WRITE_BYTE_FIELD(this,
2859  kPreAllocatedPropertyFieldsOffset,
2860  static_cast<byte>(value));
2861 }
2862 
2863 
2865  return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2866 }
2867 
2868 
2870  WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2871 }
2872 
2873 
2875  return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2876 }
2877 
2878 
2880  WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2881 }
2882 
2883 
2885  return READ_BYTE_FIELD(this, kBitFieldOffset);
2886 }
2887 
2888 
2890  WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2891 }
2892 
2893 
2895  return READ_BYTE_FIELD(this, kBitField2Offset);
2896 }
2897 
2898 
2900  WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2901 }
2902 
2903 
2905  if (value) {
2906  set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2907  } else {
2908  set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2909  }
2910 }
2911 
2912 
2914  return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2915 }
2916 
2917 
2919  if (value) {
2920  set_bit_field3(bit_field3() | (1 << kFunctionWithPrototype));
2921  } else {
2922  set_bit_field3(bit_field3() & ~(1 << kFunctionWithPrototype));
2923  }
2924 }
2925 
2926 
2928  return ((1 << kFunctionWithPrototype) & bit_field3()) != 0;
2929 }
2930 
2931 
2932 void Map::set_is_access_check_needed(bool access_check_needed) {
2933  if (access_check_needed) {
2934  set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2935  } else {
2936  set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2937  }
2938 }
2939 
2940 
2942  return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2943 }
2944 
2945 
2946 void Map::set_is_extensible(bool value) {
2947  if (value) {
2948  set_bit_field2(bit_field2() | (1 << kIsExtensible));
2949  } else {
2950  set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2951  }
2952 }
2953 
2955  return ((1 << kIsExtensible) & bit_field2()) != 0;
2956 }
2957 
2958 
2960  if (value) {
2961  set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
2962  } else {
2963  set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
2964  }
2965 }
2966 
2968  return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
2969 }
2970 
2971 
2972 void Map::set_is_shared(bool value) {
2973  if (value) {
2974  set_bit_field3(bit_field3() | (1 << kIsShared));
2975  } else {
2976  set_bit_field3(bit_field3() & ~(1 << kIsShared));
2977  }
2978 }
2979 
2981  return ((1 << kIsShared) & bit_field3()) != 0;
2982 }
2983 
2984 
2986  if (value) {
2987  set_bit_field3(bit_field3() | (1 << kUsedForPrototype));
2988  } else {
2989  set_bit_field3(bit_field3() & ~(1 << kUsedForPrototype));
2990  }
2991 }
2992 
2993 
2995  return ((1 << kUsedForPrototype) & bit_field3()) != 0;
2996 }
2997 
2998 
3000  return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
3001 }
3002 
3003 
3005  return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
3006 }
3007 
3008 
3010  STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
3011  // Make sure that all call stubs have an arguments count.
3012  ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
3013  ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
3014  ExtractArgumentsCountFromFlags(flags) >= 0);
3015  WRITE_INT_FIELD(this, kFlagsOffset, flags);
3016 }
3017 
3018 
3020  return ExtractKindFromFlags(flags());
3021 }
3022 
3023 
3025  InlineCacheState result = ExtractICStateFromFlags(flags());
3026  // Only allow uninitialized or debugger states for non-IC code
3027  // objects. This is used in the debugger to determine whether or not
3028  // a call to code object has been replaced with a debug break call.
3029  ASSERT(is_inline_cache_stub() ||
3030  result == UNINITIALIZED ||
3031  result == DEBUG_BREAK ||
3032  result == DEBUG_PREPARE_STEP_IN);
3033  return result;
3034 }
3035 
3036 
3038  ASSERT(is_inline_cache_stub());
3039  return ExtractExtraICStateFromFlags(flags());
3040 }
3041 
3042 
3044  return ExtractTypeFromFlags(flags());
3045 }
3046 
3047 
3049  ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
3050  return ExtractArgumentsCountFromFlags(flags());
3051 }
3052 
3053 
3055  ASSERT(kind() == STUB ||
3056  kind() == UNARY_OP_IC ||
3057  kind() == BINARY_OP_IC ||
3058  kind() == COMPARE_IC ||
3059  kind() == TO_BOOLEAN_IC);
3060  return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
3061 }
3062 
3063 
3064 void Code::set_major_key(int major) {
3065  ASSERT(kind() == STUB ||
3066  kind() == UNARY_OP_IC ||
3067  kind() == BINARY_OP_IC ||
3068  kind() == COMPARE_IC ||
3069  kind() == TO_BOOLEAN_IC);
3070  ASSERT(0 <= major && major < 256);
3071  WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
3072 }
3073 
3074 
3076  return kind() == STUB && IsPregeneratedField::decode(flags());
3077 }
3078 
3079 
3080 void Code::set_is_pregenerated(bool value) {
3081  ASSERT(kind() == STUB);
3082  Flags f = flags();
3083  f = static_cast<Flags>(IsPregeneratedField::update(f, value));
3084  set_flags(f);
3085 }
3086 
3087 
3089  ASSERT_EQ(FUNCTION, kind());
3090  return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
3091 }
3092 
3093 
3094 void Code::set_optimizable(bool value) {
3095  ASSERT_EQ(FUNCTION, kind());
3096  WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
3097 }
3098 
3099 
3101  ASSERT_EQ(FUNCTION, kind());
3102  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3103  return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
3104 }
3105 
3106 
3108  ASSERT_EQ(FUNCTION, kind());
3109  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3110  flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
3111  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3112 }
3113 
3114 
3116  ASSERT_EQ(FUNCTION, kind());
3117  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3118  return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
3119 }
3120 
3121 
3123  ASSERT_EQ(FUNCTION, kind());
3124  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3125  flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
3126  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3127 }
3128 
3129 
3131  ASSERT_EQ(FUNCTION, kind());
3132  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3133  return FullCodeFlagsIsCompiledOptimizable::decode(flags);
3134 }
3135 
3136 
3138  ASSERT_EQ(FUNCTION, kind());
3139  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3140  flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
3141  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3142 }
3143 
3144 
3146  ASSERT_EQ(FUNCTION, kind());
3147  return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
3148 }
3149 
3150 
3152  ASSERT_EQ(FUNCTION, kind());
3153  ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
3154  WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
3155 }
3156 
3157 
3159  ASSERT_EQ(FUNCTION, kind());
3160  return READ_BYTE_FIELD(this, kProfilerTicksOffset);
3161 }
3162 
3163 
3164 void Code::set_profiler_ticks(int ticks) {
3165  ASSERT_EQ(FUNCTION, kind());
3166  ASSERT(ticks < 256);
3167  WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
3168 }
3169 
3170 
3171 unsigned Code::stack_slots() {
3172  ASSERT(kind() == OPTIMIZED_FUNCTION);
3173  return READ_UINT32_FIELD(this, kStackSlotsOffset);
3174 }
3175 
3176 
3177 void Code::set_stack_slots(unsigned slots) {
3178  ASSERT(kind() == OPTIMIZED_FUNCTION);
3179  WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
3180 }
3181 
3182 
3184  ASSERT(kind() == OPTIMIZED_FUNCTION);
3185  return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
3186 }
3187 
3188 
3189 void Code::set_safepoint_table_offset(unsigned offset) {
3190  ASSERT(kind() == OPTIMIZED_FUNCTION);
3191  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3192  WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
3193 }
3194 
3195 
3197  ASSERT_EQ(FUNCTION, kind());
3198  return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
3199 }
3200 
3201 
3202 void Code::set_stack_check_table_offset(unsigned offset) {
3203  ASSERT_EQ(FUNCTION, kind());
3204  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
3205  WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
3206 }
3207 
3208 
3210  ASSERT(is_call_stub() || is_keyed_call_stub());
3211  byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
3212  return static_cast<CheckType>(type);
3213 }
3214 
3215 
3217  ASSERT(is_call_stub() || is_keyed_call_stub());
3218  WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
3219 }
3220 
3221 
3223  ASSERT(is_unary_op_stub());
3224  return READ_BYTE_FIELD(this, kUnaryOpTypeOffset);
3225 }
3226 
3227 
3229  ASSERT(is_unary_op_stub());
3230  WRITE_BYTE_FIELD(this, kUnaryOpTypeOffset, value);
3231 }
3232 
3233 
3235  ASSERT(is_binary_op_stub());
3236  return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
3237 }
3238 
3239 
3241  ASSERT(is_binary_op_stub());
3242  WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
3243 }
3244 
3245 
3247  ASSERT(is_binary_op_stub());
3248  return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
3249 }
3250 
3251 
3253  ASSERT(is_binary_op_stub());
3254  WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
3255 }
3256 
3257 
3259  ASSERT(is_compare_ic_stub());
3260  return READ_BYTE_FIELD(this, kCompareStateOffset);
3261 }
3262 
3263 
3265  ASSERT(is_compare_ic_stub());
3266  WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
3267 }
3268 
3269 
3271  ASSERT(is_compare_ic_stub());
3272  return READ_BYTE_FIELD(this, kCompareOperationOffset);
3273 }
3274 
3275 
3277  ASSERT(is_compare_ic_stub());
3278  WRITE_BYTE_FIELD(this, kCompareOperationOffset, value);
3279 }
3280 
3281 
3283  ASSERT(is_to_boolean_ic_stub());
3284  return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
3285 }
3286 
3287 
3289  ASSERT(is_to_boolean_ic_stub());
3290  WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
3291 }
3292 
3293 
3295  ASSERT(kind() == STUB);
3296  return READ_BYTE_FIELD(this, kHasFunctionCacheOffset) != 0;
3297 }
3298 
3299 
3301  ASSERT(kind() == STUB);
3302  WRITE_BYTE_FIELD(this, kHasFunctionCacheOffset, flag);
3303 }
3304 
3305 
3307  Kind kind = this->kind();
3308  return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
3309 }
3310 
3311 
3313  InlineCacheState ic_state,
3314  ExtraICState extra_ic_state,
3315  PropertyType type,
3316  int argc,
3317  InlineCacheHolderFlag holder) {
3318  // Extra IC state is only allowed for call IC stubs or for store IC
3319  // stubs.
3320  ASSERT(extra_ic_state == kNoExtraICState ||
3321  kind == CALL_IC ||
3322  kind == STORE_IC ||
3323  kind == KEYED_STORE_IC);
3324  // Compute the bit mask.
3325  int bits = KindField::encode(kind)
3326  | ICStateField::encode(ic_state)
3327  | TypeField::encode(type)
3328  | ExtraICStateField::encode(extra_ic_state)
3329  | (argc << kArgumentsCountShift)
3330  | CacheHolderField::encode(holder);
3331  return static_cast<Flags>(bits);
3332 }
3333 
3334 
3336  PropertyType type,
3337  ExtraICState extra_ic_state,
3338  InlineCacheHolderFlag holder,
3339  int argc) {
3340  return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
3341 }
3342 
3343 
3345  return KindField::decode(flags);
3346 }
3347 
3348 
3350  return ICStateField::decode(flags);
3351 }
3352 
3353 
3355  return ExtraICStateField::decode(flags);
3356 }
3357 
3358 
3360  return TypeField::decode(flags);
3361 }
3362 
3363 
3365  return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
3366 }
3367 
3368 
3370  return CacheHolderField::decode(flags);
3371 }
3372 
3373 
3375  int bits = flags & ~TypeField::kMask;
3376  return static_cast<Flags>(bits);
3377 }
3378 
3379 
3382  // GetCodeFromTargetAddress might be called when marking objects during mark
3383  // sweep. reinterpret_cast is therefore used instead of the more appropriate
3384  // Code::cast. Code::cast does not work when the object's map is
3385  // marked.
3386  Code* result = reinterpret_cast<Code*>(code);
3387  return result;
3388 }
3389 
3390 
3392  return HeapObject::
3393  FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
3394 }
3395 
3396 
3397 Object* Map::prototype() {
3398  return READ_FIELD(this, kPrototypeOffset);
3399 }
3400 
3401 
3402 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
3403  ASSERT(value->IsNull() || value->IsJSReceiver());
3404  WRITE_FIELD(this, kPrototypeOffset, value);
3405  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
3406 }
3407 
3408 
3409 DescriptorArray* Map::instance_descriptors() {
3410  Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
3411  if (object->IsSmi()) {
3412  return GetHeap()->empty_descriptor_array();
3413  } else {
3414  return DescriptorArray::cast(object);
3415  }
3416 }
3417 
3418 
3420  WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, Smi::FromInt(0));
3421 }
3422 
3423 
3425  Object* object = READ_FIELD(this,
3426  kInstanceDescriptorsOrBitField3Offset);
3427  if (!object->IsSmi()) {
3428 #ifdef DEBUG
3429  ZapInstanceDescriptors();
3430 #endif
3431  WRITE_FIELD(
3432  this,
3433  kInstanceDescriptorsOrBitField3Offset,
3435  }
3436 }
3437 
3438 
3439 void Map::set_instance_descriptors(DescriptorArray* value,
3440  WriteBarrierMode mode) {
3441  Object* object = READ_FIELD(this,
3442  kInstanceDescriptorsOrBitField3Offset);
3443  Heap* heap = GetHeap();
3444  if (value == heap->empty_descriptor_array()) {
3445  clear_instance_descriptors();
3446  return;
3447  } else {
3448  if (object->IsSmi()) {
3449  value->set_bit_field3_storage(Smi::cast(object)->value());
3450  } else {
3451  value->set_bit_field3_storage(
3453  }
3454  }
3455  ASSERT(!is_shared());
3456 #ifdef DEBUG
3457  if (value != instance_descriptors()) {
3458  ZapInstanceDescriptors();
3459  }
3460 #endif
3461  WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
3463  heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
3464 }
3465 
3466 
3468  Object* object = READ_FIELD(this,
3469  kInstanceDescriptorsOrBitField3Offset);
3470  if (object->IsSmi()) {
3471  return Smi::cast(object)->value();
3472  } else {
3473  return DescriptorArray::cast(object)->bit_field3_storage();
3474  }
3475 }
3476 
3477 
3478 void Map::set_bit_field3(int value) {
3479  ASSERT(Smi::IsValid(value));
3480  Object* object = READ_FIELD(this,
3481  kInstanceDescriptorsOrBitField3Offset);
3482  if (object->IsSmi()) {
3483  WRITE_FIELD(this,
3484  kInstanceDescriptorsOrBitField3Offset,
3485  Smi::FromInt(value));
3486  } else {
3488  }
3489 }
3490 
3491 
3493  Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
3494  if (object->IsFixedArray()) {
3495  return FixedArray::cast(object)->get(kProtoTransitionBackPointerOffset);
3496  } else {
3497  return object;
3498  }
3499 }
3500 
3501 
3503  return instance_descriptors()->elements_transition_map();
3504 }
3505 
3506 
3507 void Map::set_elements_transition_map(Map* transitioned_map) {
3508  return instance_descriptors()->set_elements_transition_map(transitioned_map);
3509 }
3510 
3511 
3513  Heap* heap = GetHeap();
3514  ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
3515  ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
3516  (value->IsMap() && GetBackPointer()->IsUndefined()));
3517  Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
3518  if (object->IsFixedArray()) {
3519  FixedArray::cast(object)->set(
3520  kProtoTransitionBackPointerOffset, value, mode);
3521  } else {
3522  WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
3524  heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
3525  }
3526 }
3527 
3528 
3529 FixedArray* Map::prototype_transitions() {
3530  Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
3531  if (object->IsFixedArray()) {
3532  return FixedArray::cast(object);
3533  } else {
3534  return GetHeap()->empty_fixed_array();
3535  }
3536 }
3537 
3538 
3539 void Map::set_prototype_transitions(FixedArray* value, WriteBarrierMode mode) {
3540  Heap* heap = GetHeap();
3541  ASSERT(value != heap->empty_fixed_array());
3542  value->set(kProtoTransitionBackPointerOffset, GetBackPointer());
3543 #ifdef DEBUG
3544  if (value != prototype_transitions()) {
3545  ZapPrototypeTransitions();
3546  }
3547 #endif
3548  WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
3550  heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
3551 }
3552 
3553 
3555  ASSERT(undefined->IsUndefined());
3556  WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, undefined);
3557 }
3558 
3559 
3561  Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
3562  return reinterpret_cast<HeapObject*>(object);
3563 }
3564 
3565 
3566 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
3567 ACCESSORS(Map, constructor, Object, kConstructorOffset)
3568 
3569 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
3570 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
3571 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
3572 
3573 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
3574 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3575 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
3576 
3577 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
3578 
3579 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
3580 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3581 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
3582 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
3583 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
3584 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
3585  kExpectedReceiverTypeOffset)
3586 
3587 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
3588 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
3589 
3590 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
3591 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
3592 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
3593 
3594 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
3595 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
3596 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
3597 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
3598 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
3599 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
3600 
3601 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
3602 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
3603 
3604 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
3605 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
3606 
3607 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
3608 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
3609 ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
3610  kPropertyAccessorsOffset)
3611 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
3612  kPrototypeTemplateOffset)
3613 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
3614 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
3615  kNamedPropertyHandlerOffset)
3616 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
3617  kIndexedPropertyHandlerOffset)
3618 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
3619  kInstanceTemplateOffset)
3620 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
3621 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
3622 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
3623  kInstanceCallHandlerOffset)
3624 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
3625  kAccessCheckInfoOffset)
3626 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
3627 
3628 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
3629 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
3630  kInternalFieldCountOffset)
3631 
3632 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
3633 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
3634 
3635 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
3636 
3637 ACCESSORS(Script, source, Object, kSourceOffset)
3638 ACCESSORS(Script, name, Object, kNameOffset)
3639 ACCESSORS(Script, id, Object, kIdOffset)
3640 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
3641 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
3642 ACCESSORS(Script, data, Object, kDataOffset)
3643 ACCESSORS(Script, context_data, Object, kContextOffset)
3644 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
3645 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
3646 ACCESSORS_TO_SMI(Script, compilation_type, kCompilationTypeOffset)
3647 ACCESSORS_TO_SMI(Script, compilation_state, kCompilationStateOffset)
3648 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
3649 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
3651  kEvalFrominstructionsOffsetOffset)
3652 
3653 #ifdef ENABLE_DEBUGGER_SUPPORT
3654 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
3655 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
3656 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3657 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3658 
3659 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
3660 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
3661 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
3662 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3663 #endif
3664 
3665 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3666 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3667 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3668 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3669  kInstanceClassNameOffset)
3670 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3671 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3672 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3673 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3674 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3675  kThisPropertyAssignmentsOffset)
3676 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
3677 
3678 
3679 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
3680  kHiddenPrototypeBit)
3681 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
3682 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
3683  kNeedsAccessCheckBit)
3684 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
3685  kReadOnlyPrototypeBit)
3687  kIsExpressionBit)
3688 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
3689  kIsTopLevelBit)
3690 BOOL_GETTER(SharedFunctionInfo,
3693  kHasOnlySimpleThisPropertyAssignments)
3694 BOOL_ACCESSORS(SharedFunctionInfo,
3696  allows_lazy_compilation,
3697  kAllowLazyCompilation)
3698 BOOL_ACCESSORS(SharedFunctionInfo,
3699  compiler_hints,
3701  kUsesArguments)
3702 BOOL_ACCESSORS(SharedFunctionInfo,
3703  compiler_hints,
3704  has_duplicate_parameters,
3705  kHasDuplicateParameters)
3706 
3707 
3708 #if V8_HOST_ARCH_32_BIT
3709 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
3710 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
3711  kFormalParameterCountOffset)
3712 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
3713  kExpectedNofPropertiesOffset)
3714 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3715 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
3716  kStartPositionAndTypeOffset)
3717 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
3718 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
3719  kFunctionTokenPositionOffset)
3720 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
3721  kCompilerHintsOffset)
3722 SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
3723  kThisPropertyAssignmentsCountOffset)
3724 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
3725 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
3726 SMI_ACCESSORS(SharedFunctionInfo,
3728  kStressDeoptCounterOffset)
3729 #else
3730 
3731 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
3732  STATIC_ASSERT(holder::offset % kPointerSize == 0); \
3733  int holder::name() { \
3734  int value = READ_INT_FIELD(this, offset); \
3735  ASSERT(kHeapObjectTag == 1); \
3736  ASSERT((value & kHeapObjectTag) == 0); \
3737  return value >> 1; \
3738  } \
3739  void holder::set_##name(int value) { \
3740  ASSERT(kHeapObjectTag == 1); \
3741  ASSERT((value & 0xC0000000) == 0xC0000000 || \
3742  (value & 0xC0000000) == 0x000000000); \
3743  WRITE_INT_FIELD(this, \
3744  offset, \
3745  (value << 1) & ~kHeapObjectTag); \
3746  }
3747 
3748 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
3749  STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
3750  INT_ACCESSORS(holder, name, offset)
3751 
3752 
3753 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
3754 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3755  formal_parameter_count,
3756  kFormalParameterCountOffset)
3757 
3758 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3759  expected_nof_properties,
3760  kExpectedNofPropertiesOffset)
3761 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
3762 
3763 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
3764 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3765  start_position_and_type,
3766  kStartPositionAndTypeOffset)
3767 
3768 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3769  function_token_position,
3770  kFunctionTokenPositionOffset)
3771 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3772  compiler_hints,
3773  kCompilerHintsOffset)
3774 
3775 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
3776  this_property_assignments_count,
3777  kThisPropertyAssignmentsCountOffset)
3778 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
3779 
3780 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, counters, kCountersOffset)
3781 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
3782  stress_deopt_counter,
3783  kStressDeoptCounterOffset)
3784 #endif
3785 
3786 
3788  return READ_BYTE_FIELD(this, kConstructionCountOffset);
3789 }
3790 
3791 
3793  ASSERT(0 <= value && value < 256);
3794  WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
3795 }
3796 
3797 
3798 BOOL_ACCESSORS(SharedFunctionInfo,
3799  compiler_hints,
3800  live_objects_may_exist,
3801  kLiveObjectsMayExist)
3802 
3803 
3804 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3805  return initial_map() != GetHeap()->undefined_value();
3806 }
3807 
3808 
3809 BOOL_GETTER(SharedFunctionInfo,
3810  compiler_hints,
3811  optimization_disabled,
3812  kOptimizationDisabled)
3813 
3814 
3815 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3816  set_compiler_hints(BooleanBit::set(compiler_hints(),
3817  kOptimizationDisabled,
3818  disable));
3819  // If disabling optimizations we reflect that in the code object so
3820  // it will not be counted as optimizable code.
3821  if ((code()->kind() == Code::FUNCTION) && disable) {
3822  code()->set_optimizable(false);
3823  }
3824 }
3825 
3826 
3828  if (code()->kind() != Code::FUNCTION) return 0;
3829  return code()->profiler_ticks();
3830 }
3831 
3832 
3834  int hints = compiler_hints();
3835  if (BooleanBit::get(hints, kExtendedModeFunction)) {
3836  ASSERT(BooleanBit::get(hints, kStrictModeFunction));
3837  return EXTENDED_MODE;
3838  }
3839  return BooleanBit::get(hints, kStrictModeFunction)
3841 }
3842 
3843 
3845  // We only allow language mode transitions that go set the same language mode
3846  // again or go up in the chain:
3847  // CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
3848  ASSERT(this->language_mode() == CLASSIC_MODE ||
3849  this->language_mode() == language_mode ||
3850  language_mode == EXTENDED_MODE);
3851  int hints = compiler_hints();
3852  hints = BooleanBit::set(
3853  hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
3854  hints = BooleanBit::set(
3855  hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
3856  set_compiler_hints(hints);
3857 }
3858 
3859 
3861  return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
3862 }
3863 
3864 BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
3865  kExtendedModeFunction)
3866 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
3867 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
3869  kNameShouldPrintAsAnonymous)
3870 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
3871 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
3872 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
3873 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
3874  kDontOptimize)
3875 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
3876 
3877 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
3878 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
3879 
3880 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
3881 
3882 bool Script::HasValidSource() {
3883  Object* src = this->source();
3884  if (!src->IsString()) return true;
3885  String* src_str = String::cast(src);
3886  if (!StringShape(src_str).IsExternal()) return true;
3887  if (src_str->IsAsciiRepresentation()) {
3888  return ExternalAsciiString::cast(src)->resource() != NULL;
3889  } else if (src_str->IsTwoByteRepresentation()) {
3890  return ExternalTwoByteString::cast(src)->resource() != NULL;
3891  }
3892  return true;
3893 }
3894 
3895 
3897  ASSERT(code()->kind() == Code::BUILTIN);
3898  set_formal_parameter_count(kDontAdaptArgumentsSentinel);
3899 }
3900 
3901 
3903  return start_position_and_type() >> kStartPositionShift;
3904 }
3905 
3906 
3907 void SharedFunctionInfo::set_start_position(int start_position) {
3908  set_start_position_and_type((start_position << kStartPositionShift)
3909  | (start_position_and_type() & ~kStartPositionMask));
3910 }
3911 
3912 
3913 Code* SharedFunctionInfo::code() {
3914  return Code::cast(READ_FIELD(this, kCodeOffset));
3915 }
3916 
3917 
3919  return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3920 }
3921 
3922 
3923 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3924  WRITE_FIELD(this, kCodeOffset, value);
3925  CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
3926 }
3927 
3928 
3929 ScopeInfo* SharedFunctionInfo::scope_info() {
3930  return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
3931 }
3932 
3933 
3934 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
3935  WriteBarrierMode mode) {
3936  WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3938  this,
3939  kScopeInfoOffset,
3940  reinterpret_cast<Object*>(value),
3941  mode);
3942 }
3943 
3944 
3946  return code() !=
3947  Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
3948 }
3949 
3950 
3952  return function_data()->IsFunctionTemplateInfo();
3953 }
3954 
3955 
3956 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3957  ASSERT(IsApiFunction());
3958  return FunctionTemplateInfo::cast(function_data());
3959 }
3960 
3961 
3963  return function_data()->IsSmi();
3964 }
3965 
3966 
3968  ASSERT(HasBuiltinFunctionId());
3969  return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
3970 }
3971 
3972 
3974  return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3975 }
3976 
3977 
3979  int hints = compiler_hints() & ~(kCodeAgeMask << kCodeAgeShift);
3980  set_compiler_hints(hints | ((code_age & kCodeAgeMask) << kCodeAgeShift));
3981 }
3982 
3983 
3985  return ICAgeBits::decode(counters());
3986 }
3987 
3988 
3990  set_counters(ICAgeBits::update(counters(), ic_age));
3991 }
3992 
3993 
3995  return DeoptCountBits::decode(counters());
3996 }
3997 
3998 
4000  set_counters(DeoptCountBits::update(counters(), deopt_count));
4001 }
4002 
4003 
4005  int value = counters();
4006  int deopt_count = DeoptCountBits::decode(value);
4007  deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
4008  set_counters(DeoptCountBits::update(value, deopt_count));
4009 }
4010 
4011 
4013  return OptReenableTriesBits::decode(counters());
4014 }
4015 
4016 
4018  set_counters(OptReenableTriesBits::update(counters(), tries));
4019 }
4020 
4021 
4023  Code* code = this->code();
4024  return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
4025 }
4026 
4027 
4029  int tries = opt_reenable_tries();
4030  set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
4031  // We reenable optimization whenever the number of tries is a large
4032  // enough power of 2.
4033  if (tries >= 16 && (((tries - 1) & tries) == 0)) {
4034  set_optimization_disabled(false);
4035  set_opt_count(0);
4036  set_deopt_count(0);
4037  code()->set_optimizable(true);
4038  }
4039 }
4040 
4041 
4043  return context()->global()->IsJSBuiltinsObject();
4044 }
4045 
4046 
4048  return shared()->formal_parameter_count() !=
4050 }
4051 
4052 
4054  return code()->kind() == Code::OPTIMIZED_FUNCTION;
4055 }
4056 
4057 
4059  return code()->kind() == Code::FUNCTION && code()->optimizable();
4060 }
4061 
4062 
4064  return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
4065 }
4066 
4067 
4069  return Code::cast(unchecked_code());
4070 }
4071 
4072 
4074  return reinterpret_cast<Code*>(
4075  Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
4076 }
4077 
4078 
4079 void JSFunction::set_code(Code* value) {
4080  ASSERT(!HEAP->InNewSpace(value));
4081  Address entry = value->entry();
4082  WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
4083  GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
4084  this,
4085  HeapObject::RawField(this, kCodeEntryOffset),
4086  value);
4087 }
4088 
4089 
4090 void JSFunction::ReplaceCode(Code* code) {
4091  bool was_optimized = IsOptimized();
4092  bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
4093 
4094  set_code(code);
4095 
4096  // Add/remove the function from the list of optimized functions for this
4097  // context based on the state change.
4098  if (!was_optimized && is_optimized) {
4099  context()->global_context()->AddOptimizedFunction(this);
4100  }
4101  if (was_optimized && !is_optimized) {
4102  context()->global_context()->RemoveOptimizedFunction(this);
4103  }
4104 }
4105 
4106 
4108  return Context::cast(READ_FIELD(this, kContextOffset));
4109 }
4110 
4111 
4113  return READ_FIELD(this, kContextOffset);
4114 }
4115 
4116 
4117 SharedFunctionInfo* JSFunction::unchecked_shared() {
4118  return reinterpret_cast<SharedFunctionInfo*>(
4119  READ_FIELD(this, kSharedFunctionInfoOffset));
4120 }
4121 
4122 
4123 void JSFunction::set_context(Object* value) {
4124  ASSERT(value->IsUndefined() || value->IsContext());
4125  WRITE_FIELD(this, kContextOffset, value);
4126  WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
4127 }
4128 
4129 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
4130  kPrototypeOrInitialMapOffset)
4131 
4132 
4133 Map* JSFunction::initial_map() {
4134  return Map::cast(prototype_or_initial_map());
4135 }
4136 
4137 
4139  set_prototype_or_initial_map(value);
4140 }
4141 
4142 
4144  Map* initial_map) {
4145  Context* global_context = context()->global_context();
4146  Object* array_function =
4147  global_context->get(Context::ARRAY_FUNCTION_INDEX);
4148  if (array_function->IsJSFunction() &&
4149  this == JSFunction::cast(array_function)) {
4150  // Replace all of the cached initial array maps in the global context with
4151  // the appropriate transitioned elements kind maps.
4152  Heap* heap = GetHeap();
4153  MaybeObject* maybe_maps =
4155  FixedArray* maps;
4156  if (!maybe_maps->To(&maps)) return maybe_maps;
4157 
4158  Map* current_map = initial_map;
4159  ElementsKind kind = current_map->elements_kind();
4161  maps->set(kind, current_map);
4162  for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1;
4163  i < kFastElementsKindCount; ++i) {
4164  Map* new_map;
4166  MaybeObject* maybe_new_map =
4167  current_map->CreateNextElementsTransition(next_kind);
4168  if (!maybe_new_map->To(&new_map)) return maybe_new_map;
4169  maps->set(next_kind, new_map);
4170  current_map = new_map;
4171  }
4172  global_context->set_js_array_maps(maps);
4173  }
4174  set_initial_map(initial_map);
4175  return this;
4176 }
4177 
4178 
4180  return prototype_or_initial_map()->IsMap();
4181 }
4182 
4183 
4185  return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
4186 }
4187 
4188 
4190  return map()->has_non_instance_prototype() || has_instance_prototype();
4191 }
4192 
4193 
4195  ASSERT(has_instance_prototype());
4196  if (has_initial_map()) return initial_map()->prototype();
4197  // When there is no initial map and the prototype is a JSObject, the
4198  // initial map field is used for the prototype field.
4199  return prototype_or_initial_map();
4200 }
4201 
4202 
4204  ASSERT(has_prototype());
4205  // If the function's prototype property has been set to a non-JSObject
4206  // value, that value is stored in the constructor field of the map.
4207  if (map()->has_non_instance_prototype()) return map()->constructor();
4208  return instance_prototype();
4209 }
4210 
4211 
4213  return map()->function_with_prototype();
4214 }
4215 
4216 
4218  return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
4219 }
4220 
4221 
4222 FixedArray* JSFunction::literals() {
4223  ASSERT(!shared()->bound());
4224  return literals_or_bindings();
4225 }
4226 
4227 
4228 void JSFunction::set_literals(FixedArray* literals) {
4229  ASSERT(!shared()->bound());
4230  set_literals_or_bindings(literals);
4231 }
4232 
4233 
4235  ASSERT(shared()->bound());
4236  return literals_or_bindings();
4237 }
4238 
4239 
4240 void JSFunction::set_function_bindings(FixedArray* bindings) {
4241  ASSERT(shared()->bound());
4242  // Bound function literal may be initialized to the empty fixed array
4243  // before the bindings are set.
4244  ASSERT(bindings == GetHeap()->empty_fixed_array() ||
4245  bindings->map() == GetHeap()->fixed_cow_array_map());
4246  set_literals_or_bindings(bindings);
4247 }
4248 
4249 
4251  ASSERT(!shared()->bound());
4252  return literals()->length();
4253 }
4254 
4255 
4257  ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4258  return READ_FIELD(this, OffsetOfFunctionWithId(id));
4259 }
4260 
4261 
4263  Object* value) {
4264  ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4265  WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
4266  WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
4267 }
4268 
4269 
4271  ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4272  return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
4273 }
4274 
4275 
4277  Code* value) {
4278  ASSERT(id < kJSBuiltinsCount); // id is unsigned.
4279  WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
4280  ASSERT(!HEAP->InNewSpace(value));
4281 }
4282 
4283 
4284 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
4285 ACCESSORS(JSProxy, hash, Object, kHashOffset)
4286 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
4287 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
4288 
4289 
4290 void JSProxy::InitializeBody(int object_size, Object* value) {
4291  ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
4292  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
4293  WRITE_FIELD(this, offset, value);
4294  }
4295 }
4296 
4297 
4298 ACCESSORS(JSSet, table, Object, kTableOffset)
4299 ACCESSORS(JSMap, table, Object, kTableOffset)
4300 ACCESSORS(JSWeakMap, table, Object, kTableOffset)
4301 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
4302 
4303 
4304 Address Foreign::foreign_address() {
4305  return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
4306 }
4307 
4308 
4310  WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
4311 }
4312 
4313 
4314 ACCESSORS(JSModule, context, Object, kContextOffset)
4315 
4316 
4317 JSModule* JSModule::cast(Object* obj) {
4318  ASSERT(obj->IsJSModule());
4320  return reinterpret_cast<JSModule*>(obj);
4321 }
4322 
4323 
4324 ACCESSORS(JSValue, value, Object, kValueOffset)
4325 
4326 
4327 JSValue* JSValue::cast(Object* obj) {
4328  ASSERT(obj->IsJSValue());
4330  return reinterpret_cast<JSValue*>(obj);
4331 }
4332 
4333 
4334 ACCESSORS(JSDate, value, Object, kValueOffset)
4335 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
4336 ACCESSORS(JSDate, year, Object, kYearOffset)
4337 ACCESSORS(JSDate, month, Object, kMonthOffset)
4338 ACCESSORS(JSDate, day, Object, kDayOffset)
4339 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
4340 ACCESSORS(JSDate, hour, Object, kHourOffset)
4341 ACCESSORS(JSDate, min, Object, kMinOffset)
4342 ACCESSORS(JSDate, sec, Object, kSecOffset)
4343 
4344 
4345 JSDate* JSDate::cast(Object* obj) {
4346  ASSERT(obj->IsJSDate());
4348  return reinterpret_cast<JSDate*>(obj);
4349 }
4350 
4351 
4352 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
4353 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
4354 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
4355 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
4356 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
4357 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
4358 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
4359 
4360 
4361 JSMessageObject* JSMessageObject::cast(Object* obj) {
4362  ASSERT(obj->IsJSMessageObject());
4364  return reinterpret_cast<JSMessageObject*>(obj);
4365 }
4366 
4367 
4368 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
4369 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
4370 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
4371 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
4372 ACCESSORS(Code, type_feedback_info, Object, kTypeFeedbackInfoOffset)
4373 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
4374 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
4375 
4376 byte* Code::instruction_start() {
4377  return FIELD_ADDR(this, kHeaderSize);
4378 }
4379 
4380 
4382  return instruction_start() + instruction_size();
4383 }
4384 
4385 
4387  return RoundUp(instruction_size(), kObjectAlignment);
4388 }
4389 
4390 
4392  return reinterpret_cast<FixedArray*>(
4393  READ_FIELD(this, kDeoptimizationDataOffset));
4394 }
4395 
4396 
4398  return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
4399 }
4400 
4401 
4403  return unchecked_relocation_info()->GetDataStartAddress();
4404 }
4405 
4406 
4408  return unchecked_relocation_info()->length();
4409 }
4410 
4411 
4413  return instruction_start();
4414 }
4415 
4416 
4417 bool Code::contains(byte* inner_pointer) {
4418  return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
4419 }
4420 
4421 
4422 ACCESSORS(JSArray, length, Object, kLengthOffset)
4423 
4424 
4425 ACCESSORS(JSRegExp, data, Object, kDataOffset)
4426 
4427 
4428 JSRegExp::Type JSRegExp::TypeTag() {
4429  Object* data = this->data();
4430  if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
4431  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
4432  return static_cast<JSRegExp::Type>(smi->value());
4433 }
4434 
4435 
4437  Smi* smi = Smi::cast(DataAtUnchecked(kTagIndex));
4438  return static_cast<JSRegExp::Type>(smi->value());
4439 }
4440 
4441 
4443  switch (TypeTag()) {
4444  case ATOM:
4445  return 0;
4446  case IRREGEXP:
4447  return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
4448  default:
4449  UNREACHABLE();
4450  return -1;
4451  }
4452 }
4453 
4454 
4456  ASSERT(this->data()->IsFixedArray());
4457  Object* data = this->data();
4458  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
4459  return Flags(smi->value());
4460 }
4461 
4462 
4464  ASSERT(this->data()->IsFixedArray());
4465  Object* data = this->data();
4466  String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
4467  return pattern;
4468 }
4469 
4470 
4471 Object* JSRegExp::DataAt(int index) {
4472  ASSERT(TypeTag() != NOT_COMPILED);
4473  return FixedArray::cast(data())->get(index);
4474 }
4475 
4476 
4477 Object* JSRegExp::DataAtUnchecked(int index) {
4478  FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4479  int offset = FixedArray::kHeaderSize + index * kPointerSize;
4480  return READ_FIELD(fa, offset);
4481 }
4482 
4483 
4484 void JSRegExp::SetDataAt(int index, Object* value) {
4485  ASSERT(TypeTag() != NOT_COMPILED);
4486  ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4487  FixedArray::cast(data())->set(index, value);
4488 }
4489 
4490 
4491 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
4492  ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4493  FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4494  if (value->IsSmi()) {
4495  fa->set_unchecked(index, Smi::cast(value));
4496  } else {
4497  // We only do this during GC, so we don't need to notify the write barrier.
4498  fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
4499  }
4500 }
4501 
4502 
4504  ElementsKind kind = map()->elements_kind();
4505 #if DEBUG
4506  FixedArrayBase* fixed_array =
4507  reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
4508  Map* map = fixed_array->map();
4510  (map == GetHeap()->fixed_array_map() ||
4511  map == GetHeap()->fixed_cow_array_map())) ||
4512  (IsFastDoubleElementsKind(kind) &&
4513  (fixed_array->IsFixedDoubleArray() ||
4514  fixed_array == GetHeap()->empty_fixed_array())) ||
4515  (kind == DICTIONARY_ELEMENTS &&
4516  fixed_array->IsFixedArray() &&
4517  fixed_array->IsDictionary()) ||
4518  (kind > DICTIONARY_ELEMENTS));
4520  (elements()->IsFixedArray() && elements()->length() >= 2));
4521 #endif
4522  return kind;
4523 }
4524 
4525 
4527  return ElementsAccessor::ForKind(GetElementsKind());
4528 }
4529 
4530 
4532  return IsFastObjectElementsKind(GetElementsKind());
4533 }
4534 
4535 
4537  return IsFastSmiElementsKind(GetElementsKind());
4538 }
4539 
4540 
4542  return IsFastSmiOrObjectElementsKind(GetElementsKind());
4543 }
4544 
4545 
4547  return IsFastDoubleElementsKind(GetElementsKind());
4548 }
4549 
4550 
4552  return IsFastHoleyElementsKind(GetElementsKind());
4553 }
4554 
4555 
4557  return GetElementsKind() == DICTIONARY_ELEMENTS;
4558 }
4559 
4560 
4562  return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
4563 }
4564 
4565 
4567  HeapObject* array = elements();
4568  ASSERT(array != NULL);
4569  return array->IsExternalArray();
4570 }
4571 
4572 
4573 #define EXTERNAL_ELEMENTS_CHECK(name, type) \
4574 bool JSObject::HasExternal##name##Elements() { \
4575  HeapObject* array = elements(); \
4576  ASSERT(array != NULL); \
4577  if (!array->IsHeapObject()) \
4578  return false; \
4579  return array->map()->instance_type() == type; \
4580 }
4581 
4582 
4586 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
4589 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
4596 
4597 
4598 bool JSObject::HasNamedInterceptor() {
4599  return map()->has_named_interceptor();
4600 }
4601 
4602 
4604  return map()->has_indexed_interceptor();
4605 }
4606 
4607 
4609  ASSERT(HasFastSmiOrObjectElements());
4610  FixedArray* elems = FixedArray::cast(elements());
4611  Isolate* isolate = GetIsolate();
4612  if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
4613  Object* writable_elems;
4614  { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
4615  elems, isolate->heap()->fixed_array_map());
4616  if (!maybe_writable_elems->ToObject(&writable_elems)) {
4617  return maybe_writable_elems;
4618  }
4619  }
4620  set_elements(FixedArray::cast(writable_elems));
4621  isolate->counters()->cow_arrays_converted()->Increment();
4622  return writable_elems;
4623 }
4624 
4625 
4627  ASSERT(!HasFastProperties());
4628  return StringDictionary::cast(properties());
4629 }
4630 
4631 
4633  ASSERT(HasDictionaryElements());
4634  return SeededNumberDictionary::cast(elements());
4635 }
4636 
4637 
4638 bool String::IsHashFieldComputed(uint32_t field) {
4639  return (field & kHashNotComputedMask) == 0;
4640 }
4641 
4642 
4644  return IsHashFieldComputed(hash_field());
4645 }
4646 
4647 
4648 uint32_t String::Hash() {
4649  // Fast case: has hash code already been computed?
4650  uint32_t field = hash_field();
4651  if (IsHashFieldComputed(field)) return field >> kHashShift;
4652  // Slow case: compute hash code and set it.
4653  return ComputeAndSetHash();
4654 }
4655 
4656 
4657 StringHasher::StringHasher(int length, uint32_t seed)
4658  : length_(length),
4659  raw_running_hash_(seed),
4660  array_index_(0),
4661  is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
4662  is_first_char_(true),
4663  is_valid_(true) {
4664  ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
4665 }
4666 
4667 
4669  return length_ > String::kMaxHashCalcLength;
4670 }
4671 
4672 
4673 void StringHasher::AddCharacter(uint32_t c) {
4675  AddSurrogatePair(c); // Not inlined.
4676  return;
4677  }
4678  // Use the Jenkins one-at-a-time hash function to update the hash
4679  // for the given character.
4680  raw_running_hash_ += c;
4681  raw_running_hash_ += (raw_running_hash_ << 10);
4682  raw_running_hash_ ^= (raw_running_hash_ >> 6);
4683  // Incremental array index computation.
4684  if (is_array_index_) {
4685  if (c < '0' || c > '9') {
4686  is_array_index_ = false;
4687  } else {
4688  int d = c - '0';
4689  if (is_first_char_) {
4690  is_first_char_ = false;
4691  if (c == '0' && length_ > 1) {
4692  is_array_index_ = false;
4693  return;
4694  }
4695  }
4696  if (array_index_ > 429496729U - ((d + 2) >> 3)) {
4697  is_array_index_ = false;
4698  } else {
4699  array_index_ = array_index_ * 10 + d;
4700  }
4701  }
4702  }
4703 }
4704 
4705 
4707  ASSERT(!is_array_index());
4709  AddSurrogatePairNoIndex(c); // Not inlined.
4710  return;
4711  }
4712  raw_running_hash_ += c;
4713  raw_running_hash_ += (raw_running_hash_ << 10);
4714  raw_running_hash_ ^= (raw_running_hash_ >> 6);
4715 }
4716 
4717 
4718 uint32_t StringHasher::GetHash() {
4719  // Get the calculated raw hash value and do some more bit ops to distribute
4720  // the hash further. Ensure that we never return zero as the hash value.
4721  uint32_t result = raw_running_hash_;
4722  result += (result << 3);
4723  result ^= (result >> 11);
4724  result += (result << 15);
4725  if ((result & String::kHashBitMask) == 0) {
4726  result = 27;
4727  }
4728  return result;
4729 }
4730 
4731 
4732 template <typename schar>
4733 uint32_t HashSequentialString(const schar* chars, int length, uint32_t seed) {
4734  StringHasher hasher(length, seed);
4735  if (!hasher.has_trivial_hash()) {
4736  int i;
4737  for (i = 0; hasher.is_array_index() && (i < length); i++) {
4738  hasher.AddCharacter(chars[i]);
4739  }
4740  for (; i < length; i++) {
4741  hasher.AddCharacterNoIndex(chars[i]);
4742  }
4743  }
4744  return hasher.GetHashField();
4745 }
4746 
4747 
4748 bool String::AsArrayIndex(uint32_t* index) {
4749  uint32_t field = hash_field();
4750  if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
4751  return false;
4752  }
4753  return SlowAsArrayIndex(index);
4754 }
4755 
4756 
4758  return HeapObject::cast(this)->map()->prototype();
4759 }
4760 
4761 
4762 bool JSReceiver::HasProperty(String* name) {
4763  if (IsJSProxy()) {
4764  return JSProxy::cast(this)->HasPropertyWithHandler(name);
4765  }
4766  return GetPropertyAttribute(name) != ABSENT;
4767 }
4768 
4769 
4770 bool JSReceiver::HasLocalProperty(String* name) {
4771  if (IsJSProxy()) {
4772  return JSProxy::cast(this)->HasPropertyWithHandler(name);
4773  }
4774  return GetLocalPropertyAttribute(name) != ABSENT;
4775 }
4776 
4777 
4779  return GetPropertyAttributeWithReceiver(this, key);
4780 }
4781 
4782 // TODO(504): this may be useful in other places too where JSGlobalProxy
4783 // is used.
4785  if (IsJSGlobalProxy()) {
4786  Object* proto = GetPrototype();
4787  if (proto->IsNull()) return GetHeap()->undefined_value();
4788  ASSERT(proto->IsJSGlobalObject());
4789  return proto;
4790  }
4791  return this;
4792 }
4793 
4794 
4796  return IsJSProxy()
4797  ? JSProxy::cast(this)->GetIdentityHash(flag)
4798  : JSObject::cast(this)->GetIdentityHash(flag);
4799 }
4800 
4801 
4802 bool JSReceiver::HasElement(uint32_t index) {
4803  if (IsJSProxy()) {
4804  return JSProxy::cast(this)->HasElementWithHandler(index);
4805  }
4806  return JSObject::cast(this)->HasElementWithReceiver(this, index);
4807 }
4808 
4809 
4811  return BooleanBit::get(flag(), kAllCanReadBit);
4812 }
4813 
4814 
4816  set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
4817 }
4818 
4819 
4821  return BooleanBit::get(flag(), kAllCanWriteBit);
4822 }
4823 
4824 
4826  set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
4827 }
4828 
4829 
4831  return BooleanBit::get(flag(), kProhibitsOverwritingBit);
4832 }
4833 
4834 
4836  set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
4837 }
4838 
4839 
4841  return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
4842 }
4843 
4844 
4846  set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
4847 }
4848 
4849 
4850 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
4851  Object* function_template = expected_receiver_type();
4852  if (!function_template->IsFunctionTemplateInfo()) return true;
4853  return receiver->IsInstanceOf(FunctionTemplateInfo::cast(function_template));
4854 }
4855 
4856 
4857 template<typename Shape, typename Key>
4859  Object* key,
4860  Object* value) {
4861  SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
4862 }
4863 
4864 
4865 template<typename Shape, typename Key>
4867  Object* key,
4868  Object* value,
4869  PropertyDetails details) {
4870  ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
4871  int index = HashTable<Shape, Key>::EntryToIndex(entry);
4872  AssertNoAllocation no_gc;
4874  FixedArray::set(index, key, mode);
4875  FixedArray::set(index+1, value, mode);
4876  FixedArray::set(index+2, details.AsSmi());
4877 }
4878 
4879 
4880 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
4881  ASSERT(other->IsNumber());
4882  return key == static_cast<uint32_t>(other->Number());
4883 }
4884 
4885 
4886 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
4887  return ComputeIntegerHash(key, 0);
4888 }
4889 
4890 
4892  Object* other) {
4893  ASSERT(other->IsNumber());
4894  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
4895 }
4896 
4897 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
4898  return ComputeIntegerHash(key, seed);
4899 }
4900 
4902  uint32_t seed,
4903  Object* other) {
4904  ASSERT(other->IsNumber());
4905  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
4906 }
4907 
4908 MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
4909  return Isolate::Current()->heap()->NumberFromUint32(key);
4910 }
4911 
4912 
4913 bool StringDictionaryShape::IsMatch(String* key, Object* other) {
4914  // We know that all entries in a hash table had their hash keys created.
4915  // Use that knowledge to have fast failure.
4916  if (key->Hash() != String::cast(other)->Hash()) return false;
4917  return key->Equals(String::cast(other));
4918 }
4919 
4920 
4921 uint32_t StringDictionaryShape::Hash(String* key) {
4922  return key->Hash();
4923 }
4924 
4925 
4926 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
4927  return String::cast(other)->Hash();
4928 }
4929 
4930 
4931 MaybeObject* StringDictionaryShape::AsObject(String* key) {
4932  return key;
4933 }
4934 
4935 
4936 template <int entrysize>
4937 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
4938  return key->SameValue(other);
4939 }
4940 
4941 
4942 template <int entrysize>
4944  MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
4945  return Smi::cast(maybe_hash->ToObjectChecked())->value();
4946 }
4947 
4948 
4949 template <int entrysize>
4951  Object* other) {
4952  MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
4953  return Smi::cast(maybe_hash->ToObjectChecked())->value();
4954 }
4955 
4956 
4957 template <int entrysize>
4959  return key;
4960 }
4961 
4962 
4964  // No write barrier is needed since empty_fixed_array is not in new space.
4965  // Please note this function is used during marking:
4966  // - MarkCompactCollector::MarkUnmarkedObject
4967  // - IncrementalMarking::Step
4968  ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
4969  WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
4970 }
4971 
4972 
4973 void JSArray::EnsureSize(int required_size) {
4975  FixedArray* elts = FixedArray::cast(elements());
4976  const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4977  if (elts->length() < required_size) {
4978  // Doubling in size would be overkill, but leave some slack to avoid
4979  // constantly growing.
4980  Expand(required_size + (required_size >> 3));
4981  // It's a performance benefit to keep a frequently used array in new-space.
4982  } else if (!GetHeap()->new_space()->Contains(elts) &&
4983  required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4984  // Expand will allocate a new backing store in new space even if the size
4985  // we asked for isn't larger than what we had before.
4986  Expand(required_size);
4987  }
4988 }
4989 
4990 
4991 void JSArray::set_length(Smi* length) {
4992  // Don't need a write barrier for a Smi.
4993  set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4994 }
4995 
4996 
4998  bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
4999  ASSERT(result == !HasExternalArrayElements());
5000  return result;
5001 }
5002 
5003 
5004 MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
5005  MaybeObject* maybe_result = EnsureCanContainElements(
5006  storage, storage->length(), ALLOW_COPIED_DOUBLE_ELEMENTS);
5007  if (maybe_result->IsFailure()) return maybe_result;
5008  ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
5010  ((storage->map() != GetHeap()->fixed_double_array_map()) &&
5013  FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
5014  set_elements(storage);
5015  set_length(Smi::FromInt(storage->length()));
5016  return this;
5017 }
5018 
5019 
5020 MaybeObject* FixedArray::Copy() {
5021  if (length() == 0) return this;
5022  return GetHeap()->CopyFixedArray(this);
5023 }
5024 
5025 
5026 MaybeObject* FixedDoubleArray::Copy() {
5027  if (length() == 0) return this;
5028  return GetHeap()->CopyFixedDoubleArray(this);
5029 }
5030 
5031 
5032 void TypeFeedbackCells::SetAstId(int index, Smi* id) {
5033  set(1 + index * 2, id);
5034 }
5035 
5036 
5038  return Smi::cast(get(1 + index * 2));
5039 }
5040 
5041 
5042 void TypeFeedbackCells::SetCell(int index, JSGlobalPropertyCell* cell) {
5043  set(index * 2, cell);
5044 }
5045 
5046 
5047 JSGlobalPropertyCell* TypeFeedbackCells::Cell(int index) {
5048  return JSGlobalPropertyCell::cast(get(index * 2));
5049 }
5050 
5051 
5053  return isolate->factory()->the_hole_value();
5054 }
5055 
5056 
5058  return isolate->factory()->undefined_value();
5059 }
5060 
5061 
5063  return heap->raw_unchecked_the_hole_value();
5064 }
5065 
5066 
5067 SMI_ACCESSORS(TypeFeedbackInfo, ic_total_count, kIcTotalCountOffset)
5068 SMI_ACCESSORS(TypeFeedbackInfo, ic_with_type_info_count,
5069  kIcWithTypeinfoCountOffset)
5071  kTypeFeedbackCellsOffset)
5072 
5073 
5074 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
5075 
5076 
5077 Relocatable::Relocatable(Isolate* isolate) {
5078  ASSERT(isolate == Isolate::Current());
5079  isolate_ = isolate;
5080  prev_ = isolate->relocatable_top();
5081  isolate->set_relocatable_top(this);
5082 }
5083 
5084 
5085 Relocatable::~Relocatable() {
5086  ASSERT(isolate_ == Isolate::Current());
5087  ASSERT_EQ(isolate_->relocatable_top(), this);
5088  isolate_->set_relocatable_top(prev_);
5089 }
5090 
5091 
5093  return map->instance_size();
5094 }
5095 
5096 
5097 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
5098  v->VisitExternalReference(
5099  reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
5100 }
5101 
5102 
5103 template<typename StaticVisitor>
5105  StaticVisitor::VisitExternalReference(
5106  reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
5107 }
5108 
5109 
5112  v->VisitExternalAsciiString(
5113  reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
5114 }
5115 
5116 
5117 template<typename StaticVisitor>
5120  StaticVisitor::VisitExternalAsciiString(
5121  reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
5122 }
5123 
5124 
5127  v->VisitExternalTwoByteString(
5128  reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
5129 }
5130 
5131 
5132 template<typename StaticVisitor>
5135  StaticVisitor::VisitExternalTwoByteString(
5136  reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
5137 }
5138 
5139 #define SLOT_ADDR(obj, offset) \
5140  reinterpret_cast<Object**>((obj)->address() + offset)
5141 
5142 template<int start_offset, int end_offset, int size>
5144  HeapObject* obj,
5145  ObjectVisitor* v) {
5146  v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
5147 }
5148 
5149 
5150 template<int start_offset>
5152  int object_size,
5153  ObjectVisitor* v) {
5154  v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
5155 }
5156 
5157 #undef SLOT_ADDR
5158 
5159 #undef TYPE_CHECKER
5160 #undef CAST_ACCESSOR
5161 #undef INT_ACCESSORS
5162 #undef ACCESSORS
5163 #undef ACCESSORS_TO_SMI
5164 #undef SMI_ACCESSORS
5165 #undef BOOL_GETTER
5166 #undef BOOL_ACCESSORS
5167 #undef FIELD_ADDR
5168 #undef READ_FIELD
5169 #undef WRITE_FIELD
5170 #undef WRITE_BARRIER
5171 #undef CONDITIONAL_WRITE_BARRIER
5172 #undef READ_DOUBLE_FIELD
5173 #undef WRITE_DOUBLE_FIELD
5174 #undef READ_INT_FIELD
5175 #undef WRITE_INT_FIELD
5176 #undef READ_INTPTR_FIELD
5177 #undef WRITE_INTPTR_FIELD
5178 #undef READ_UINT32_FIELD
5179 #undef WRITE_UINT32_FIELD
5180 #undef READ_SHORT_FIELD
5181 #undef WRITE_SHORT_FIELD
5182 #undef READ_BYTE_FIELD
5183 #undef WRITE_BYTE_FIELD
5184 
5185 
5186 } } // namespace v8::internal
5187 
5188 #endif // V8_OBJECTS_INL_H_
byte * Address
Definition: globals.h:172
static int SizeOf(Map *map, HeapObject *object)
Definition: objects-inl.h:5092
MUST_USE_RESULT MaybeObject * GetElementWithReceiver(Object *receiver, uint32_t index)
Definition: objects.cc:651
bool FLAG_enable_slow_asserts
#define WRITE_BYTE_FIELD(p, offset, value)
Definition: objects-inl.h:959
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset eval_from_instructions_offset
Definition: objects-inl.h:3650
#define HAS_FAILURE_TAG(value)
Definition: v8globals.h:393
void SetBackPointer(Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:3512
#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode)
Definition: objects-inl.h:878
void set_compare_state(byte value)
Definition: objects-inl.h:3264
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
static bool IsMatch(uint32_t key, Object *other)
Definition: objects-inl.h:4880
int LinearSearch(SearchMode mode, String *name, int len)
Definition: objects.cc:6053
void set_prohibits_overwriting(bool value)
Definition: objects-inl.h:4835
void set_null_unchecked(Heap *heap, int index)
Definition: objects-inl.h:1865
Code * builtin(Name name)
Definition: builtins.h:312
PropertyAttributes GetPropertyAttribute(String *name)
Definition: objects-inl.h:4778
JSGlobalPropertyCell * Cell(int index)
Definition: objects-inl.h:5047
#define SLOW_ASSERT(condition)
Definition: checks.h:276
int allow_osr_at_loop_nesting_level()
Definition: objects-inl.h:3145
const intptr_t kSmiTagMask
Definition: v8.h:3855
static bool is_the_hole_nan(double value)
Definition: objects-inl.h:1704
V8EXPORT bool IsTrue() const
Definition: api.cc:2135
FixedArray * function_bindings()
Definition: objects-inl.h:4234
static int EntryToIndex(int entry)
Definition: objects.h:2867
static ByteArray * FromDataStartAddress(Address address)
Definition: objects-inl.h:2584
void AddCharacter(uint32_t c)
Definition: objects-inl.h:4673
void set_all_can_write(bool value)
Definition: objects-inl.h:4825
Object * DataAtUnchecked(int index)
Definition: objects-inl.h:4477
void set_has_deoptimization_support(bool value)
Definition: objects-inl.h:3107
static uint32_t Hash(uint32_t key)
Definition: objects-inl.h:4886
MUST_USE_RESULT MaybeObject * CopyFixedDoubleArray(FixedDoubleArray *src)
Definition: heap-inl.h:178
CheckType check_type()
Definition: objects-inl.h:3209
void set(int index, Object *value)
Definition: objects-inl.h:1695
int GetInternalFieldOffset(int index)
Definition: objects-inl.h:1482
void AddSurrogatePair(uc32 c)
Definition: objects.cc:7236
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit kIsExpressionBit kHasOnlySimpleThisPropertyAssignments kUsesArguments kFormalParameterCountOffset PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, expected_nof_properties, kExpectedNofPropertiesOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo
static bool get(Smi *smi, int bit_position)
Definition: objects.h:8744
void RecordWrite(Address address, int offset)
Definition: heap-inl.h:335
static const int kSize
Definition: objects.h:6151
#define ASSERT_TAG_ALIGNED(address)
Definition: v8checks.h:59
void set_all_can_read(bool value)
Definition: objects-inl.h:4815
FixedArray * unchecked_deoptimization_data()
Definition: objects-inl.h:4391
void set_function_with_prototype(bool value)
Definition: objects-inl.h:2918
static double hole_nan_as_double()
Definition: objects-inl.h:1709
bool InNewSpace(Object *object)
Definition: heap-inl.h:292
unsigned stack_slots()
Definition: objects-inl.h:3171
static String * cast(Object *obj)
#define READ_DOUBLE_FIELD(p, offset)
Definition: objects-inl.h:888
#define READ_INTPTR_FIELD(p, offset)
Definition: objects-inl.h:932
int Lookup(DescriptorArray *array, String *name)
Definition: heap.h:2287
MaybeObject * TryFlatten(PretenureFlag pretenure=NOT_TENURED)
Definition: objects-inl.h:2284
const uint32_t kTwoByteStringTag
Definition: objects.h:450
const int kFailureTypeTagSize
Definition: objects.h:1037
static const uint32_t kExponentMask
Definition: objects.h:1317
void set_language_mode(LanguageMode language_mode)
Definition: objects-inl.h:3844
bool function_with_prototype()
Definition: objects-inl.h:2927
static DescriptorArray * cast(Object *obj)
static Failure * InternalError()
Definition: objects-inl.h:1011
static int SizeOf(Map *map, HeapObject *object)
Definition: objects.h:2328
void set_unary_op_type(byte value)
Definition: objects-inl.h:3228
void clear_instance_descriptors()
Definition: objects-inl.h:3424
Isolate * isolate()
Definition: heap-inl.h:494
int unused_property_fields()
Definition: objects-inl.h:2874
void set_length(Smi *length)
Definition: objects-inl.h:4991
void set_javascript_builtin(Builtins::JavaScript id, Object *value)
Definition: objects-inl.h:4262
Object * InObjectPropertyAt(int index)
Definition: objects-inl.h:1557
static Smi * FromInt(int value)
Definition: objects-inl.h:973
bool IsFastObjectElementsKind(ElementsKind kind)
void IteratePointer(ObjectVisitor *v, int offset)
Definition: objects-inl.h:1185
MUST_USE_RESULT MaybeObject * ToSmi()
Definition: objects-inl.h:815
unsigned stack_check_table_offset()
Definition: objects-inl.h:3196
Map * elements_transition_map()
Definition: objects-inl.h:3502
void set_second(String *second, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:2448
static Object * GetObjectFromEntryAddress(Address location_of_address)
Definition: objects-inl.h:3391
void AddSurrogatePairNoIndex(uc32 c)
Definition: objects.cc:7244
void SetAstId(int index, Smi *id)
Definition: objects-inl.h:5032
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit kIsExpressionBit kHasOnlySimpleThisPropertyAssignments kUsesArguments formal_parameter_count
Definition: objects-inl.h:3755
static MemoryChunk * FromAddress(Address a)
Definition: spaces.h:304
MUST_USE_RESULT MaybeObject * EnsureCanContainHeapObjectElements()
Definition: objects-inl.h:1246
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
void init_instance_descriptors()
Definition: objects-inl.h:3419
static HeapObject * cast(Object *obj)
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2771
#define READ_UINT32_FIELD(p, offset)
Definition: objects-inl.h:938
void set_function_bindings(FixedArray *bindings)
Definition: objects-inl.h:4240
static const byte kArgumentMarker
Definition: objects.h:7740
static const int kMaxHashCalcLength
Definition: objects.h:7170
bool is_access_check_needed()
Definition: objects-inl.h:2941
static PropertyType ExtractTypeFromFlags(Flags flags)
Definition: objects-inl.h:3359
void set_pre_allocated_property_fields(int value)
Definition: objects-inl.h:2856
static const byte kUndefined
Definition: objects.h:7741
T Max(T a, T b)
Definition: utils.h:222
static AccessorPair * cast(Object *obj)
const int kVariableSizeSentinel
Definition: objects.h:182
static void IterateBody(HeapObject *obj, int object_size, ObjectVisitor *v)
Definition: objects-inl.h:5151
void Get(int descriptor_number, Descriptor *desc)
Definition: objects-inl.h:2072
static Failure * OutOfMemoryException()
Definition: objects-inl.h:1021
JSFunction * GetConstantFunction(int descriptor_number)
Definition: objects-inl.h:2019
static const int kFastPropertiesSoftLimit
Definition: objects.h:2104
PropertyAttributes property_attributes()
Definition: objects-inl.h:4840
bool IsAsciiRepresentation()
Definition: objects-inl.h:289
static ExternalTwoByteString * cast(Object *obj)
int32_t uc32
Definition: globals.h:274
SeededNumberDictionary * element_dictionary()
Definition: objects-inl.h:4632
static Map * cast(Object *obj)
void set_has_debug_break_slots(bool value)
Definition: objects-inl.h:3122
void SetDataAtUnchecked(int index, Object *value, Heap *heap)
Definition: objects-inl.h:4491
bool has_non_instance_prototype()
Definition: objects-inl.h:2913
static const byte kTheHole
Definition: objects.h:7738
MUST_USE_RESULT MaybeObject * GetPropertyWithReceiver(Object *receiver, String *key, PropertyAttributes *attributes)
Definition: objects.cc:151
int BinarySearch(String *name, int low, int high)
Definition: objects.cc:6026
Flag flags[]
Definition: flags.cc:1467
static const int kExponentBias
Definition: objects.h:1321
bool attached_to_shared_function_info()
Definition: objects-inl.h:2967
Builtins * builtins()
Definition: isolate.h:909
int int32_t
Definition: unicode.cc:47
void set_context(Object *context)
Definition: objects-inl.h:4123
#define READ_FIELD(p, offset)
Definition: objects-inl.h:865
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:5052
bool IsTransitionOnly(int descriptor_number)
Definition: objects-inl.h:2043
void Set(int descriptor_number, Descriptor *desc, const WhitenessWitness &)
Definition: objects-inl.h:2079
bool SameValue(Object *other)
Definition: objects.cc:764
#define MAKE_STRUCT_CAST(NAME, Name, name)
Definition: objects-inl.h:2244
static Failure * Exception()
Definition: objects-inl.h:1016
static const int kSize
Definition: objects.h:6036
static Foreign * cast(Object *obj)
MUST_USE_RESULT MaybeObject * GetElementsTransitionMapSlow(ElementsKind elements_kind)
Definition: objects.cc:2328
void set_map(Map *value)
Definition: objects-inl.h:1135
static bool IsMatch(String *key, Object *other)
Definition: objects-inl.h:4913
static const int kTransitionsOffset
Definition: objects.h:2621
byte binary_op_result_type()
Definition: objects-inl.h:3246
FixedArray * literals()
Definition: objects-inl.h:4222
ACCESSORS(AccessorInfo, expected_receiver_type, Object, kExpectedReceiverTypeOffset) ACCESSORS(FunctionTemplateInfo
#define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype, kHiddenPrototypeBit) BOOL_ACCESSORS(FunctionTemplateInfo
byte * instruction_end()
Definition: objects-inl.h:4381
uint16_t SlicedStringGet(int index)
Definition: objects.cc:6793
static Smi * FromIntptr(intptr_t value)
Definition: objects-inl.h:982
Context * global_context()
Definition: contexts.cc:58
static Handle< Object > TransitionElementsKind(Handle< JSObject > object, ElementsKind to_kind)
Definition: objects.cc:9848
#define READ_BYTE_FIELD(p, offset)
Definition: objects-inl.h:956
static const int kSize
Definition: objects.h:6433
FlagType type_
Definition: flags.cc:1351
#define ASSERT(condition)
Definition: checks.h:270
void set_profiler_ticks(int ticks)
Definition: objects-inl.h:3164
static MUST_USE_RESULT MaybeObject * AsObject(String *key)
Definition: objects-inl.h:4931
const int kPointerSizeLog2
Definition: globals.h:246
void set_start_position(int start_position)
Definition: objects-inl.h:3907
#define WRITE_INT_FIELD(p, offset, value)
Definition: objects-inl.h:929
unsigned short uint16_t
Definition: unicode.cc:46
void set_optimizable(bool value)
Definition: objects-inl.h:3094
void SetNullValueUnchecked(int descriptor_number, Heap *heap)
Definition: objects-inl.h:1990
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
Definition: objects-inl.h:5057
Object * BypassGlobalProxy()
Definition: objects-inl.h:4784
#define READ_INT64_FIELD(p, offset)
Definition: objects-inl.h:944
#define WRITE_UINT32_FIELD(p, offset, value)
Definition: objects-inl.h:941
static Context * cast(Object *context)
Definition: contexts.h:207
static Flags ComputeMonomorphicFlags(Kind kind, PropertyType type, ExtraICState extra_ic_state=kNoExtraICState, InlineCacheHolderFlag holder=OWN_MAP, int argc=-1)
Definition: objects-inl.h:3335
static uint32_t HashForObject(Object *key, Object *object)
Definition: objects-inl.h:4950
EXTERNAL_ELEMENTS_CHECK(UnsignedShort, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(UnsignedInt
kPropertyAccessorsOffset kNamedPropertyHandlerOffset instance_template
Definition: objects-inl.h:3618
#define WRITE_INTPTR_FIELD(p, offset, value)
Definition: objects-inl.h:935
const uint32_t kStringRepresentationMask
Definition: objects.h:455
bool NonFailureIsHeapObject()
Definition: objects-inl.h:165
PropertyType type()
Definition: objects-inl.h:3043
int SizeFromMap(Map *map)
Definition: objects-inl.h:2809
void set_compiled_optimizable(bool value)
Definition: objects-inl.h:3137
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit kIsExpressionBit kHasOnlySimpleThisPropertyAssignments uses_arguments
Definition: objects-inl.h:3699
static MUST_USE_RESULT MaybeObject * AsObject(Object *key)
Definition: objects-inl.h:4958
void set(int index, float value)
Definition: objects-inl.h:2757
Object * DataAt(int index)
Definition: objects-inl.h:4471
Object ** GetKeySlot(int descriptor_number)
Definition: objects-inl.h:1962
bool IsInternalError() const
Definition: objects-inl.h:994
bool HasSpecificClassOf(String *name)
Definition: objects-inl.h:828
int isnan(double x)
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2695
const int kFastElementsKindCount
Definition: elements-kind.h:77
void set_first(String *first, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:2432
void ReplaceCode(Code *code)
Definition: objects-inl.h:4090
void set_map_and_elements(Map *map, FixedArrayBase *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:1352
Factory * factory()
Definition: isolate.h:977
static ExternalAsciiString * cast(Object *obj)
MUST_USE_RESULT MaybeObject * EnsureWritableFastElements()
Definition: objects-inl.h:4608
PropertyAttributes
void set_the_hole(int index)
Definition: objects-inl.h:1838
void set_foreign_address(Address value)
Definition: objects-inl.h:4309
MUST_USE_RESULT MaybeObject * Copy()
Definition: objects-inl.h:5020
void SeqTwoByteStringSet(int index, uint16_t value)
Definition: objects-inl.h:2392
static Code * cast(Object *obj)
const uint32_t kAsciiDataHintTag
Definition: objects.h:479
#define CAST_ACCESSOR(type)
Definition: objects-inl.h:77
const uint32_t kShortExternalStringMask
Definition: objects.h:483
void set(int index, uint32_t value)
Definition: objects-inl.h:2738
bool HasElementWithReceiver(JSReceiver *receiver, uint32_t index)
Definition: objects.cc:9058
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
bool AsArrayIndex(uint32_t *index)
Definition: objects-inl.h:4748
Object * GetValue(int descriptor_number)
Definition: objects-inl.h:1984
BOOL_GETTER(SharedFunctionInfo, compiler_hints, optimization_disabled, kOptimizationDisabled) void SharedFunctionInfo
Definition: objects-inl.h:3809
static const int kSize
Definition: objects.h:5821
static Object ** RawField(HeapObject *obj, int offset)
Definition: objects-inl.h:963
const int kIntSize
Definition: globals.h:231
static Smi * cast(Object *object)
void set_literals(FixedArray *literals)
Definition: objects-inl.h:4228
static void IterateBody(HeapObject *obj, ObjectVisitor *v)
Definition: objects-inl.h:5143
static uint32_t Hash(Object *key)
Definition: objects-inl.h:4943
unsigned int seed
Definition: test-strings.cc:17
void set(int index, uint16_t value)
Definition: objects-inl.h:2700
void ClearCodeCache(Heap *heap)
Definition: objects-inl.h:4963
bool Equals(String *other)
Definition: objects-inl.h:2275
static const int kHeaderSize
Definition: objects.h:1220
void set_used_for_prototype(bool value)
Definition: objects-inl.h:2985
Code * javascript_builtin_code(Builtins::JavaScript id)
Definition: objects-inl.h:4270
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2733
int GetInObjectPropertyOffset(int index)
Definition: objects-inl.h:1549
bool contains(byte *pc)
Definition: objects-inl.h:4417
Object * GetInternalField(int index)
Definition: objects-inl.h:1488
static const int kSize
Definition: objects.h:8112
uint16_t SeqAsciiStringGet(int index)
Definition: objects-inl.h:2353
void set_binary_op_type(byte value)
Definition: objects-inl.h:3240
MUST_USE_RESULT MaybeObject * CopyFixedArrayWithMap(FixedArray *src, Map *map)
Definition: heap.cc:4604
uint8_t byte
Definition: globals.h:171
void set(int index, int16_t value)
Definition: objects-inl.h:2681
Object * InObjectPropertyAtPut(int index, Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition: objects-inl.h:1566
uint16_t ExternalTwoByteStringGet(int index)
Definition: objects-inl.h:2518
static const int kFirstOffset
Definition: objects.h:7420
bool IsPropertyDescriptor(T *desc)
Definition: property.h:157
void set_null(int index)
Definition: objects-inl.h:1826
ByteArray * unchecked_relocation_info()
Definition: objects-inl.h:4397
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset this_property_assignments
Definition: objects-inl.h:3674
MUST_USE_RESULT MaybeObject * GetIdentityHash(CreationFlag flag)
Definition: objects.cc:3518
static const int kKindOffset
Definition: objects.h:7732
bool IsNull() const
Definition: v8.h:4295
const uint32_t kNotStringTag
Definition: objects.h:438
static void NoWriteBarrierSet(FixedArray *array, int index, Object *value)
Definition: objects-inl.h:1802
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit kIsExpressionBit has_only_simple_this_property_assignments
Definition: objects-inl.h:3691
static const int kParentOffset
Definition: objects.h:7473
String * GetKey(int descriptor_number)
Definition: objects-inl.h:1970
bool HasNonStrictArgumentsElements()
Definition: objects-inl.h:4561
MUST_USE_RESULT MaybeObject * GetIdentityHash(CreationFlag flag)
Definition: objects-inl.h:4795
const uint64_t kHoleNanInt64
Definition: v8globals.h:480
void set_is_pregenerated(bool value)
Definition: objects-inl.h:3080
#define READ_SHORT_FIELD(p, offset)
Definition: objects-inl.h:950
#define FIELD_ADDR(p, offset)
Definition: objects-inl.h:862
void set_opt_reenable_tries(int value)
Definition: objects-inl.h:4017
#define UNREACHABLE()
Definition: checks.h:50
Object * GetElementNoExceptionThrown(uint32_t index)
Definition: objects-inl.h:842
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static SeededNumberDictionary * cast(Object *obj)
Definition: objects.h:3207
virtual void Validate(JSObject *obj)=0
MUST_USE_RESULT MaybeObject * SetContent(FixedArrayBase *storage)
Definition: objects-inl.h:5004
const uint32_t kIsSymbolMask
Definition: objects.h:443
void set_unchecked(int index, Smi *value)
Definition: objects-inl.h:1848
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2676
static const int kExponentShift
Definition: objects.h:1322
bool IsStringObjectWithCharacterAt(uint32_t index)
Definition: objects-inl.h:1656
static const int kValueOffset
Definition: objects.h:1307
const int kFailureTagSize
Definition: v8globals.h:72
String * GetUnderlying()
Definition: objects-inl.h:2342
const uint32_t kHoleNanUpper32
Definition: v8globals.h:476
static InlineCacheHolderFlag ExtractCacheHolderFromFlags(Flags flags)
Definition: objects-inl.h:3369
const int kDoubleSize
Definition: globals.h:232
void set_undefined(int index)
Definition: objects-inl.h:1812
static SlicedString * cast(Object *obj)
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:5601
int pre_allocated_property_fields()
Definition: objects-inl.h:2804
static uint32_t SeededHash(uint32_t key, uint32_t seed)
Definition: objects-inl.h:4897
#define WRITE_BARRIER(heap, object, offset, value)
Definition: objects-inl.h:871
#define HAS_SMI_TAG(value)
Definition: v8globals.h:390
void InitializeBody(int object_size)
Definition: objects-inl.h:1629
#define MAKE_STRUCT_PREDICATE(NAME, Name, name)
Definition: objects-inl.h:763
static const int kFirstOffset
Definition: objects.h:2623
bool IsAsciiRepresentationUnderneath()
Definition: objects-inl.h:301
static Failure * RetryAfterGC()
Definition: objects-inl.h:1032
void IteratePointers(ObjectVisitor *v, int start, int end)
Definition: objects-inl.h:1179
int SeqTwoByteStringSize(InstanceType instance_type)
Definition: objects-inl.h:2398
static const uchar kMaxNonSurrogateCharCode
Definition: unicode.h:133
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1051
void set_resource(const Resource *buffer)
Definition: objects-inl.h:2473
static Failure * cast(MaybeObject *object)
Definition: objects-inl.h:485
const uint32_t kIsIndirectStringMask
Definition: objects.h:462
#define READ_INT_FIELD(p, offset)
Definition: objects-inl.h:926
static const int kMinValue
Definition: objects.h:1004
bool ToArrayIndex(uint32_t *index)
Definition: objects-inl.h:1637
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
int get_int(int index)
Definition: objects-inl.h:2578
MUST_USE_RESULT MaybeObject * ResetElements()
Definition: objects-inl.h:1394
ElementsKind GetElementsKind()
Definition: objects-inl.h:4503
SharedFunctionInfo * unchecked_shared()
Definition: objects-inl.h:4117
static Object * RawUninitializedSentinel(Heap *heap)
Definition: objects-inl.h:5062
static Handle< Map > GetElementsTransitionMap(Handle< JSObject > object, ElementsKind to_kind)
Definition: objects.cc:2303
MUST_USE_RESULT MaybeObject * AllocateFixedArrayWithHoles(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4720
const int kPointerSize
Definition: globals.h:234
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit is_expression
Definition: objects-inl.h:3686
static const int kIsNotArrayIndexMask
Definition: objects.h:7117
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit kIsExpressionBit compiler_hints
Definition: objects-inl.h:3691
#define TYPE_CHECKER(type, instancetype)
Definition: objects-inl.h:70
static Oddball * cast(Object *obj)
static Address & Address_at(Address addr)
Definition: v8memory.h:71
intptr_t OffsetFrom(T x)
Definition: utils.h:126
int GetFieldIndex(int descriptor_number)
Definition: objects-inl.h:2014
const int kHeapObjectTag
Definition: v8.h:3848
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2657
bool IsAligned(T value, U alignment)
Definition: utils.h:206
static SeqAsciiString * cast(Object *obj)
void set_inobject_properties(int value)
Definition: objects-inl.h:2850
unsigned safepoint_table_offset()
Definition: objects-inl.h:3183
void set_hash_field(uint32_t value)
Definition: objects-inl.h:2267
const uint16_t * ExternalTwoByteStringGetData(unsigned start)
Definition: objects-inl.h:2524
bool HasElement(uint32_t index)
Definition: objects-inl.h:4802
#define WRITE_SHORT_FIELD(p, offset, value)
Definition: objects-inl.h:953
const uint32_t kAsciiDataHintMask
Definition: objects.h:478
AllocationSpace allocation_space() const
Definition: objects-inl.h:1004
MUST_USE_RESULT MaybeObject * set_initial_map_and_cache_transitions(Map *value)
Definition: objects-inl.h:4143
bool IsTwoByteRepresentationUnderneath()
Definition: objects-inl.h:317
static FunctionTemplateInfo * cast(Object *obj)
kPropertyAccessorsOffset named_property_handler
Definition: objects-inl.h:3614
static const int kFirstDeoptEntryIndex
Definition: objects.h:4016
static const int kPropertiesOffset
Definition: objects.h:2113
T RoundUp(T x, intptr_t m)
Definition: utils.h:150
void set_elements_transition_map(Map *transitioned_map)
Definition: objects-inl.h:3507
static FixedDoubleArray * cast(Object *obj)
Object * FastPropertyAt(int index)
Definition: objects-inl.h:1521
bool IsTwoByteRepresentation()
Definition: objects-inl.h:295
uint16_t ExternalAsciiStringGet(int index)
Definition: objects-inl.h:2486
static Code * GetCodeFromTargetAddress(Address address)
Definition: objects-inl.h:3380
bool is_inline_cache_stub()
Definition: objects-inl.h:3306
bool IsFastSmiElementsKind(ElementsKind kind)
HeapObject * unchecked_prototype_transitions()
Definition: objects-inl.h:3560
const uint32_t kShortExternalStringTag
Definition: objects.h:484
static int SmiValue(internal::Object *value)
Definition: v8.h:3950
ElementsKind FastSmiToObjectElementsKind(ElementsKind from_kind)
int SeqAsciiStringSize(InstanceType instance_type)
Definition: objects-inl.h:2403
Object * FastPropertyAtPut(int index, Object *value)
Definition: objects-inl.h:1534
void set_kind(byte kind)
Definition: objects-inl.h:1418
void Update(DescriptorArray *array, String *name, int result)
Definition: heap.h:2296
static int GetIdentityHash(Handle< JSObject > obj)
Definition: objects.cc:3491
StringRepresentationTag
Definition: objects.h:456
static int SizeFor(int length)
Definition: objects.h:2369
static const int kElementsOffset
Definition: objects.h:2114
WriteBarrierMode GetWriteBarrierMode(const AssertNoAllocation &)
Definition: objects-inl.h:1769
void set_resource(const Resource *buffer)
Definition: objects-inl.h:2505
PropertyDetails GetDetails(int descriptor_number)
Definition: objects-inl.h:1996
Object ** GetFirstElementAddress()
Definition: objects-inl.h:1214
static uint32_t HashForObject(uint32_t key, Object *object)
Definition: objects-inl.h:4891
BuiltinFunctionId builtin_function_id()
Definition: objects-inl.h:3967
MUST_USE_RESULT MaybeObject * Copy()
Definition: objects-inl.h:5026
const uint32_t kStringTag
Definition: objects.h:437
byte * relocation_start()
Definition: objects-inl.h:4402
InlineCacheState ic_state()
Definition: objects-inl.h:3024
static uint32_t HashForObject(String *key, Object *object)
Definition: objects-inl.h:4926
bool IsUndefined() const
Definition: v8.h:4277
void set_construction_count(int value)
Definition: objects-inl.h:3792
double get_scalar(int index)
Definition: objects-inl.h:1721
uint16_t ConsStringGet(int index)
Definition: objects.cc:6762
DescriptorLookupCache * descriptor_lookup_cache()
Definition: isolate.h:842
void set_map_no_write_barrier(Map *value)
Definition: objects-inl.h:1146
void set_check_type(CheckType value)
Definition: objects-inl.h:3216
void set_to_boolean_state(byte value)
Definition: objects-inl.h:3288
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset needs_access_check
Definition: objects-inl.h:3682
static int OffsetOfElementAt(int index)
Definition: objects.h:2291
void set(int index, uint8_t value)
Definition: objects-inl.h:2662
PropertyAttributes GetPropertyAttributeWithReceiver(JSReceiver *receiver, String *name)
Definition: objects.cc:3084
static int SizeFor(int length)
Definition: objects.h:2288
static ExtraICState ExtractExtraICStateFromFlags(Flags flags)
Definition: objects-inl.h:3354
Context * context()
Definition: isolate.h:518
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
void SetCell(int index, JSGlobalPropertyCell *cell)
Definition: objects-inl.h:5042
static ElementsAccessor * ForKind(ElementsKind elements_kind)
Definition: elements.h:134
static SeqTwoByteString * cast(Object *obj)
const int kElementsKindCount
Definition: elements-kind.h:76
void SetDataAt(int index, Object *value)
Definition: objects-inl.h:4484
static bool IsMatch(Object *key, Object *other)
Definition: objects-inl.h:4937
static const int kHeaderSize
Definition: objects.h:2233
bool HasElementWithHandler(uint32_t index)
Definition: objects.cc:272
void set(int index, double value)
Definition: objects-inl.h:1746
MUST_USE_RESULT MaybeObject * NumberFromDouble(double value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:2971
static InlineCacheState ExtractICStateFromFlags(Flags flags)
Definition: objects-inl.h:3349
bool HasProperty(String *name)
Definition: objects-inl.h:4762
static const int kSize
Definition: objects.h:6189
bool has_deoptimization_support()
Definition: objects-inl.h:3100
static Kind ExtractKindFromFlags(Flags flags)
Definition: objects-inl.h:3344
static const int kMapOffset
Definition: objects.h:1219
bool has_named_interceptor()
Definition: objects.h:4638
static int ExtractArgumentsCountFromFlags(Flags flags)
Definition: objects-inl.h:3364
bool is_the_hole(int index)
Definition: objects-inl.h:1681
void set_instance_type(InstanceType value)
Definition: objects-inl.h:2869
const uint32_t kIsNotStringMask
Definition: objects.h:436
bool IsOutOfMemoryException() const
Definition: objects-inl.h:999
static HeapNumber * cast(Object *obj)
int32_t get_scalar(int index)
Definition: objects-inl.h:2707
byte get(int index)
Definition: objects-inl.h:2566
static StringDictionary * cast(Object *obj)
Definition: objects.h:3153
void set_value(double value)
Definition: objects-inl.h:1195
MUST_USE_RESULT MaybeObject * CopyFixedArray(FixedArray *src)
Definition: heap-inl.h:173
static const int kLengthOffset
Definition: objects.h:2232
static double nan_value()
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2714
const int kSpaceTagMask
Definition: v8globals.h:196
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
Definition: utils.h:285
#define STRUCT_LIST(V)
Definition: objects.h:429
static const int kBitField3StorageOffset
Definition: objects.h:2620
AccessorDescriptor * GetCallbacks(int descriptor_number)
Definition: objects-inl.h:2030
V8EXPORT bool IsNumber() const
Definition: api.cc:2175
ExtraICState extra_ic_state()
Definition: objects-inl.h:3037
static int SizeFor(int length)
Definition: objects.h:7313
const intptr_t kObjectAlignment
Definition: v8globals.h:44
void SetInternalField(int index, Object *value)
Definition: objects-inl.h:1497
PropertyType GetType(int descriptor_number)
Definition: objects-inl.h:2009
static JSGlobalPropertyCell * cast(Object *obj)
name_should_print_as_anonymous
Definition: objects-inl.h:3867
MUST_USE_RESULT MaybeObject * NumberFromUint32(uint32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:237
IncrementalMarking * incremental_marking()
Definition: heap.h:1524
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:1737
bool has_indexed_interceptor()
Definition: objects.h:4647
ElementsKind GetInitialFastElementsKind()
static const uint32_t kHashBitMask
Definition: objects.h:7125
bool HasPropertyWithHandler(String *name)
Definition: objects.cc:2516
uint16_t uc16
Definition: globals.h:273
Object * GetBackPointer()
Definition: objects-inl.h:3492
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, PropertyType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:3312
void AddCharacterNoIndex(uint32_t c)
Definition: objects-inl.h:4706
static const uint32_t kSignMask
Definition: objects.h:1316
void set_bit_field(byte value)
Definition: objects-inl.h:2889
static int SizeFor(int length)
Definition: objects.h:7367
static const int kSize
Definition: objects.h:6349
const int kSmiShiftSize
Definition: v8.h:3899
static JSValue * cast(Object *obj)
Definition: objects-inl.h:4327
const int kSmiTagSize
Definition: v8.h:3854
static const int kHeaderSize
Definition: objects.h:4513
uint32_t HashSequentialString(const schar *chars, int length, uint32_t seed)
Definition: objects-inl.h:4733
FunctionTemplateInfo * get_api_func_data()
Definition: objects-inl.h:3956
void EnsureSize(int minimum_size_of_backing_fixed_array)
Definition: objects-inl.h:4973
void set(int index, double value)
Definition: objects-inl.h:2776
bool is_undetectable()
Definition: objects.h:4661
#define WRITE_FIELD(p, offset, value)
Definition: objects-inl.h:868
static const int kFullStringRepresentationMask
Definition: v8.h:3921
void MemsetPointer(T **dest, U *value, int counter)
Definition: v8utils.h:146
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
Definition: objects-inl.h:3682
void set_major_key(int value)
Definition: objects-inl.h:3064
void Set(int index, uint16_t value)
Definition: objects-inl.h:2326
static void NoIncrementalWriteBarrierSet(FixedArray *array, int index, Object *value)
Definition: objects-inl.h:1788
#define HEAP
Definition: isolate.h:1408
V8EXPORT bool IsFalse() const
Definition: api.cc:2141
void set_is_access_check_needed(bool access_check_needed)
Definition: objects-inl.h:2932
MUST_USE_RESULT MaybeObject * CreateNextElementsTransition(ElementsKind elements_kind)
Definition: objects.cc:2266
MUST_USE_RESULT MaybeObject * GetProperty(String *key)
Definition: objects-inl.h:851
static const int kSize
Definition: objects.h:6281
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
static const byte kNull
Definition: objects.h:7739
const int kShortSize
Definition: globals.h:230
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2607
InstanceType instance_type()
Definition: objects-inl.h:2864
static JSProxy * cast(Object *obj)
static const int kMaxFastProperties
Definition: objects.h:2105
static HeapObject * FromAddress(Address address)
Definition: objects-inl.h:1163
Counters * counters()
Definition: isolate.h:804
void set(int index, byte value)
Definition: objects-inl.h:2572
void SetDetailsUnchecked(int descriptor_number, Smi *value)
Definition: objects-inl.h:2003
static double canonical_not_the_hole_nan_as_double()
Definition: objects-inl.h:1714
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping true
Definition: flags.cc:157
#define INT_ACCESSORS(holder, name, offset)
Definition: objects-inl.h:84
const int kSmiTag
Definition: v8.h:3853
bool TooManyFastProperties(int properties, StoreFromKeyed store_mode)
Definition: objects-inl.h:1609
static FixedArray * cast(Object *obj)
bool IsProperty(int descriptor_number)
Definition: objects-inl.h:2037
StringHasher(int length, uint32_t seed)
Definition: objects-inl.h:4657
static const int kHeaderSize
Definition: objects.h:2115
void set(int index, int8_t value)
Definition: objects-inl.h:2643
static Smi * set(Smi *smi, int bit_position, bool v)
Definition: objects.h:8752
bool used_for_prototype()
Definition: objects-inl.h:2994
void SeqAsciiStringSet(int index, uint16_t value)
Definition: objects-inl.h:2359
void set_parent(String *parent)
Definition: objects-inl.h:2413
bool IsCompatibleReceiver(Object *receiver)
Definition: objects-inl.h:4850
static HashTable * cast(Object *obj)
Definition: objects-inl.h:2250
void set_is_extensible(bool value)
Definition: objects-inl.h:2946
ElementsKind elements_kind()
Definition: objects.h:4685
void set_is_shared(bool value)
Definition: objects-inl.h:2972
static Handle< Object > GetElement(Handle< Object > object, uint32_t index)
Definition: objects.cc:244
const int kFailureTag
Definition: v8globals.h:71
void set_compare_operation(byte value)
Definition: objects-inl.h:3276
void set_attached_to_shared_function_info(bool value)
Definition: objects-inl.h:2959
void set_stack_slots(unsigned slots)
Definition: objects-inl.h:3177
double FastI2D(int x)
Definition: conversions.h:73
MUST_USE_RESULT MaybeObject * NumberFromInt32(int32_t value, PretenureFlag pretenure=NOT_TENURED)
Definition: heap-inl.h:229
const uint32_t kIsIndirectStringTag
Definition: objects.h:463
void SetEntry(int entry, Object *key, Object *value)
Definition: objects-inl.h:4858
Object * GetCallbacksObject(int descriptor_number)
Definition: objects-inl.h:2024
void set_instance_size(int value)
Definition: objects-inl.h:2842
Object * get(int index)
Definition: objects-inl.h:1675
JSFunction * unchecked_constructor()
Definition: objects-inl.h:2999
void set(int index, int32_t value)
Definition: objects-inl.h:2719
bool IsFastHoleyElementsKind(ElementsKind kind)
static uint32_t Hash(String *key)
Definition: objects-inl.h:4921
bool IsNullDescriptor(int descriptor_number)
Definition: objects-inl.h:2067
void set_javascript_builtin_code(Builtins::JavaScript id, Code *value)
Definition: objects-inl.h:4276
static const int kSize
Definition: objects.h:5990
ElementsAccessor * GetElementsAccessor()
Definition: objects-inl.h:4526
bool IsInstanceOf(FunctionTemplateInfo *type)
Definition: objects-inl.h:136
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit kIsExpressionBit kHasOnlySimpleThisPropertyAssignments kUsesArguments kFormalParameterCountOffset kStartPositionAndTypeOffset kCompilerHintsOffset stress_deopt_counter
Definition: objects-inl.h:3782
String * TryFlattenGetString(PretenureFlag pretenure=NOT_TENURED)
Definition: objects-inl.h:2292
void set_bit_field3_storage(int value)
Definition: objects-inl.h:1895
void set_bit_field2(byte value)
Definition: objects-inl.h:2899
static MUST_USE_RESULT MaybeObject * AsObject(uint32_t key)
Definition: objects-inl.h:4908
void set_finger_index(int finger_index)
Definition: objects-inl.h:2561
void set_map_word(MapWord map_word)
Definition: objects-inl.h:1156
void set_binary_op_result_type(byte value)
Definition: objects-inl.h:3252
#define SLOT_ADDR(obj, offset)
Definition: objects-inl.h:5139
bool has_debug_break_slots()
Definition: objects-inl.h:3115
static const int kSize
Definition: objects.h:8013
void set(int index, uint8_t value)
Definition: objects-inl.h:2612
static const byte kNotBooleanMask
Definition: objects.h:7737
static const int kExternalTwoByteRepresentationTag
Definition: v8.h:3922
const uint32_t kSymbolTag
Definition: objects.h:445
bool HasLocalProperty(String *name)
Definition: objects-inl.h:4770
const int kFailureTypeTagMask
Definition: objects.h:1038
static const byte kFalse
Definition: objects.h:7735
Definition: objects.h:6746
Type type() const
Definition: objects-inl.h:989
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2752
static Flags RemoveTypeFromFlags(Flags flags)
Definition: objects-inl.h:3374
void set_visitor_id(int visitor_id)
Definition: objects-inl.h:2788
static bool HasHeapObjectTag(internal::Object *value)
Definition: v8.h:3941
const uint32_t kAsciiStringTag
Definition: objects.h:451
#define ACCESSORS_TO_SMI(holder, name, offset)
Definition: objects-inl.h:98
T Min(T a, T b)
Definition: utils.h:229
void set_property_attributes(PropertyAttributes attributes)
Definition: objects-inl.h:4845
signed short int16_t
Definition: unicode.cc:45
static const int kSize
Definition: objects.h:6111
void set_code(Code *code)
Definition: objects-inl.h:4079
SMI_ACCESSORS(TypeFeedbackInfo, ic_with_type_info_count, kIcWithTypeinfoCountOffset) ACCESSORS(TypeFeedbackInfo
MUST_USE_RESULT MaybeObject * GetHash(CreationFlag flag)
Definition: objects.cc:740
static ConsString * cast(Object *obj)
void set_safepoint_table_offset(unsigned offset)
Definition: objects-inl.h:3189
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:1669
bool is_compiled_optimizable()
Definition: objects-inl.h:3130
static const int kTransitionsIndex
Definition: objects.h:2609
void set_flags(Flags flags)
Definition: objects-inl.h:3009
static const int kMaxValue
Definition: objects.h:1006
static const int kCodeCacheOffset
Definition: objects.h:4966
MUST_USE_RESULT MaybeObject * get(int index)
Definition: objects-inl.h:2638
#define WRITE_DOUBLE_FIELD(p, offset, value)
Definition: objects-inl.h:906
static const int kNotFound
Definition: objects.h:2606
void set_non_instance_prototype(bool value)
Definition: objects-inl.h:2904
const uc32 kMaxAsciiCharCode
Definition: globals.h:277
uint16_t SeqTwoByteStringGet(int index)
Definition: objects-inl.h:2386
Object ** GetValueSlot(int descriptor_number)
Definition: objects-inl.h:1976
StringDictionary * property_dictionary()
Definition: objects-inl.h:4626
static uint32_t SeededHashForObject(uint32_t key, uint32_t seed, Object *object)
Definition: objects-inl.h:4901
const int kCharSize
Definition: globals.h:229
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
static const byte kTrue
Definition: objects.h:7736
static const int kExponentOffset
Definition: objects.h:1313
void set_allow_osr_at_loop_nesting_level(int level)
Definition: objects-inl.h:3151
static JSObject * cast(Object *obj)
FlagType type() const
Definition: flags.cc:1358
uint32_t RoundUpToPowerOf2(uint32_t x)
Definition: utils.h:186
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset access_check_info
Definition: objects-inl.h:3624
int64_t get_representation(int index)
Definition: objects-inl.h:1730
#define MAKE_STRUCT_CASE(NAME, Name, name)
Object * javascript_builtin(Builtins::JavaScript id)
Definition: objects-inl.h:4256
PropertyAttributes GetLocalPropertyAttribute(String *name)
Definition: objects.cc:3134
int FastD2I(double x)
Definition: conversions.h:64
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit start_position_and_type
Definition: objects-inl.h:3686
void set_initial_map(Map *value)
Definition: objects-inl.h:4138
bool IsFastDoubleElementsKind(ElementsKind kind)
void set_has_function_cache(bool flag)
Definition: objects-inl.h:3300
MUST_USE_RESULT MaybeObject * EnsureCanContainElements(Object **elements, uint32_t count, EnsureElementsMode mode)
Definition: objects-inl.h:1260
static const int kFirstIndex
Definition: objects.h:2611
void set_unused_property_fields(int value)
Definition: objects-inl.h:2879
const uint32_t kStringEncodingMask
Definition: objects.h:449
void init_prototype_transitions(Object *undefined)
Definition: objects-inl.h:3554
void set_stack_check_table_offset(unsigned offset)
Definition: objects-inl.h:3202
void set_bit_field3(int value)
Definition: objects-inl.h:3478
static int ComputeCapacity(int at_least_space_for)
Definition: objects-inl.h:2124
void InitializeBody(Map *map, Object *pre_allocated_value, Object *filler_value)
Definition: objects-inl.h:1580
static JSFunction * cast(Object *obj)