v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
serialize.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "accessors.h"
31 #include "api.h"
32 #include "bootstrapper.h"
33 #include "execution.h"
34 #include "global-handles.h"
35 #include "ic-inl.h"
36 #include "natives.h"
37 #include "platform.h"
38 #include "runtime.h"
39 #include "serialize.h"
40 #include "stub-cache.h"
41 #include "v8threads.h"
42 
43 namespace v8 {
44 namespace internal {
45 
46 
47 // -----------------------------------------------------------------------------
48 // Coding of external references.
49 
50 // The encoding of an external reference. The type is in the high word.
51 // The id is in the low word.
52 static uint32_t EncodeExternal(TypeCode type, uint16_t id) {
53  return static_cast<uint32_t>(type) << 16 | id;
54 }
55 
56 
57 static int* GetInternalPointer(StatsCounter* counter) {
58  // All counters refer to dummy_counter, if deserializing happens without
59  // setting up counters.
60  static int dummy_counter = 0;
61  return counter->Enabled() ? counter->GetInternalPointer() : &dummy_counter;
62 }
63 
64 
66  ExternalReferenceTable* external_reference_table =
67  isolate->external_reference_table();
68  if (external_reference_table == NULL) {
69  external_reference_table = new ExternalReferenceTable(isolate);
70  isolate->set_external_reference_table(external_reference_table);
71  }
72  return external_reference_table;
73 }
74 
75 
76 void ExternalReferenceTable::AddFromId(TypeCode type,
77  uint16_t id,
78  const char* name,
79  Isolate* isolate) {
81  switch (type) {
82  case C_BUILTIN: {
83  ExternalReference ref(static_cast<Builtins::CFunctionId>(id), isolate);
84  address = ref.address();
85  break;
86  }
87  case BUILTIN: {
88  ExternalReference ref(static_cast<Builtins::Name>(id), isolate);
89  address = ref.address();
90  break;
91  }
92  case RUNTIME_FUNCTION: {
93  ExternalReference ref(static_cast<Runtime::FunctionId>(id), isolate);
94  address = ref.address();
95  break;
96  }
97  case IC_UTILITY: {
98  ExternalReference ref(IC_Utility(static_cast<IC::UtilityId>(id)),
99  isolate);
100  address = ref.address();
101  break;
102  }
103  default:
104  UNREACHABLE();
105  return;
106  }
107  Add(address, type, id, name);
108 }
109 
110 
111 void ExternalReferenceTable::Add(Address address,
112  TypeCode type,
113  uint16_t id,
114  const char* name) {
115  ASSERT_NE(NULL, address);
116  ExternalReferenceEntry entry;
117  entry.address = address;
118  entry.code = EncodeExternal(type, id);
119  entry.name = name;
120  ASSERT_NE(0, entry.code);
121  refs_.Add(entry);
122  if (id > max_id_[type]) max_id_[type] = id;
123 }
124 
125 
126 void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
127  for (int type_code = 0; type_code < kTypeCodeCount; type_code++) {
128  max_id_[type_code] = 0;
129  }
130 
131  // The following populates all of the different type of external references
132  // into the ExternalReferenceTable.
133  //
134  // NOTE: This function was originally 100k of code. It has since been
135  // rewritten to be mostly table driven, as the callback macro style tends to
136  // very easily cause code bloat. Please be careful in the future when adding
137  // new references.
138 
139  struct RefTableEntry {
140  TypeCode type;
141  uint16_t id;
142  const char* name;
143  };
144 
145  static const RefTableEntry ref_table[] = {
146  // Builtins
147 #define DEF_ENTRY_C(name, ignored) \
148  { C_BUILTIN, \
149  Builtins::c_##name, \
150  "Builtins::" #name },
151 
153 #undef DEF_ENTRY_C
154 
155 #define DEF_ENTRY_C(name, ignored) \
156  { BUILTIN, \
157  Builtins::k##name, \
158  "Builtins::" #name },
159 #define DEF_ENTRY_A(name, kind, state, extra) DEF_ENTRY_C(name, ignored)
160 
164 #undef DEF_ENTRY_C
165 #undef DEF_ENTRY_A
166 
167  // Runtime functions
168 #define RUNTIME_ENTRY(name, nargs, ressize) \
169  { RUNTIME_FUNCTION, \
170  Runtime::k##name, \
171  "Runtime::" #name },
172 
174 #undef RUNTIME_ENTRY
175 
176  // IC utilities
177 #define IC_ENTRY(name) \
178  { IC_UTILITY, \
179  IC::k##name, \
180  "IC::" #name },
181 
183 #undef IC_ENTRY
184  }; // end of ref_table[].
185 
186  for (size_t i = 0; i < ARRAY_SIZE(ref_table); ++i) {
187  AddFromId(ref_table[i].type,
188  ref_table[i].id,
189  ref_table[i].name,
190  isolate);
191  }
192 
193 #ifdef ENABLE_DEBUGGER_SUPPORT
194  // Debug addresses
195  Add(Debug_Address(Debug::k_after_break_target_address).address(isolate),
197  Debug::k_after_break_target_address << kDebugIdShift,
198  "Debug::after_break_target_address()");
199  Add(Debug_Address(Debug::k_debug_break_slot_address).address(isolate),
201  Debug::k_debug_break_slot_address << kDebugIdShift,
202  "Debug::debug_break_slot_address()");
203  Add(Debug_Address(Debug::k_debug_break_return_address).address(isolate),
205  Debug::k_debug_break_return_address << kDebugIdShift,
206  "Debug::debug_break_return_address()");
207  Add(Debug_Address(Debug::k_restarter_frame_function_pointer).address(isolate),
209  Debug::k_restarter_frame_function_pointer << kDebugIdShift,
210  "Debug::restarter_frame_function_pointer_address()");
211 #endif
212 
213  // Stat counters
214  struct StatsRefTableEntry {
215  StatsCounter* (Counters::*counter)();
216  uint16_t id;
217  const char* name;
218  };
219 
220  const StatsRefTableEntry stats_ref_table[] = {
221 #define COUNTER_ENTRY(name, caption) \
222  { &Counters::name, \
223  Counters::k_##name, \
224  "Counters::" #name },
225 
228 #undef COUNTER_ENTRY
229  }; // end of stats_ref_table[].
230 
231  Counters* counters = isolate->counters();
232  for (size_t i = 0; i < ARRAY_SIZE(stats_ref_table); ++i) {
233  Add(reinterpret_cast<Address>(GetInternalPointer(
234  (counters->*(stats_ref_table[i].counter))())),
236  stats_ref_table[i].id,
237  stats_ref_table[i].name);
238  }
239 
240  // Top addresses
241 
242  const char* AddressNames[] = {
243 #define BUILD_NAME_LITERAL(CamelName, hacker_name) \
244  "Isolate::" #hacker_name "_address",
246  NULL
247 #undef BUILD_NAME_LITERAL
248  };
249 
250  for (uint16_t i = 0; i < Isolate::kIsolateAddressCount; ++i) {
251  Add(isolate->get_address_from_id((Isolate::AddressId)i),
252  TOP_ADDRESS, i, AddressNames[i]);
253  }
254 
255  // Accessors
256 #define ACCESSOR_DESCRIPTOR_DECLARATION(name) \
257  Add((Address)&Accessors::name, \
258  ACCESSOR, \
259  Accessors::k##name, \
260  "Accessors::" #name);
261 
263 #undef ACCESSOR_DESCRIPTOR_DECLARATION
264 
265  StubCache* stub_cache = isolate->stub_cache();
266 
267  // Stub cache tables
268  Add(stub_cache->key_reference(StubCache::kPrimary).address(),
270  1,
271  "StubCache::primary_->key");
272  Add(stub_cache->value_reference(StubCache::kPrimary).address(),
274  2,
275  "StubCache::primary_->value");
276  Add(stub_cache->map_reference(StubCache::kPrimary).address(),
278  3,
279  "StubCache::primary_->map");
280  Add(stub_cache->key_reference(StubCache::kSecondary).address(),
282  4,
283  "StubCache::secondary_->key");
284  Add(stub_cache->value_reference(StubCache::kSecondary).address(),
286  5,
287  "StubCache::secondary_->value");
288  Add(stub_cache->map_reference(StubCache::kSecondary).address(),
290  6,
291  "StubCache::secondary_->map");
292 
293  // Runtime entries
294  Add(ExternalReference::perform_gc_function(isolate).address(),
296  1,
297  "Runtime::PerformGC");
298  Add(ExternalReference::fill_heap_number_with_random_function(
299  isolate).address(),
301  2,
302  "V8::FillHeapNumberWithRandom");
303  Add(ExternalReference::random_uint32_function(isolate).address(),
305  3,
306  "V8::Random");
307  Add(ExternalReference::delete_handle_scope_extensions(isolate).address(),
309  4,
310  "HandleScope::DeleteExtensions");
311  Add(ExternalReference::
312  incremental_marking_record_write_function(isolate).address(),
314  5,
315  "IncrementalMarking::RecordWrite");
316  Add(ExternalReference::store_buffer_overflow_function(isolate).address(),
318  6,
319  "StoreBuffer::StoreBufferOverflow");
320  Add(ExternalReference::
321  incremental_evacuation_record_write_function(isolate).address(),
323  7,
324  "IncrementalMarking::RecordWrite");
325 
326 
327 
328  // Miscellaneous
329  Add(ExternalReference::roots_array_start(isolate).address(),
330  UNCLASSIFIED,
331  3,
332  "Heap::roots_array_start()");
333  Add(ExternalReference::address_of_stack_limit(isolate).address(),
334  UNCLASSIFIED,
335  4,
336  "StackGuard::address_of_jslimit()");
337  Add(ExternalReference::address_of_real_stack_limit(isolate).address(),
338  UNCLASSIFIED,
339  5,
340  "StackGuard::address_of_real_jslimit()");
341 #ifndef V8_INTERPRETED_REGEXP
342  Add(ExternalReference::address_of_regexp_stack_limit(isolate).address(),
343  UNCLASSIFIED,
344  6,
345  "RegExpStack::limit_address()");
346  Add(ExternalReference::address_of_regexp_stack_memory_address(
347  isolate).address(),
348  UNCLASSIFIED,
349  7,
350  "RegExpStack::memory_address()");
351  Add(ExternalReference::address_of_regexp_stack_memory_size(isolate).address(),
352  UNCLASSIFIED,
353  8,
354  "RegExpStack::memory_size()");
355  Add(ExternalReference::address_of_static_offsets_vector(isolate).address(),
356  UNCLASSIFIED,
357  9,
358  "OffsetsVector::static_offsets_vector");
359 #endif // V8_INTERPRETED_REGEXP
360  Add(ExternalReference::new_space_start(isolate).address(),
361  UNCLASSIFIED,
362  10,
363  "Heap::NewSpaceStart()");
364  Add(ExternalReference::new_space_mask(isolate).address(),
365  UNCLASSIFIED,
366  11,
367  "Heap::NewSpaceMask()");
368  Add(ExternalReference::heap_always_allocate_scope_depth(isolate).address(),
369  UNCLASSIFIED,
370  12,
371  "Heap::always_allocate_scope_depth()");
372  Add(ExternalReference::new_space_allocation_limit_address(isolate).address(),
373  UNCLASSIFIED,
374  14,
375  "Heap::NewSpaceAllocationLimitAddress()");
376  Add(ExternalReference::new_space_allocation_top_address(isolate).address(),
377  UNCLASSIFIED,
378  15,
379  "Heap::NewSpaceAllocationTopAddress()");
380 #ifdef ENABLE_DEBUGGER_SUPPORT
381  Add(ExternalReference::debug_break(isolate).address(),
382  UNCLASSIFIED,
383  16,
384  "Debug::Break()");
385  Add(ExternalReference::debug_step_in_fp_address(isolate).address(),
386  UNCLASSIFIED,
387  17,
388  "Debug::step_in_fp_addr()");
389 #endif
390  Add(ExternalReference::double_fp_operation(Token::ADD, isolate).address(),
391  UNCLASSIFIED,
392  18,
393  "add_two_doubles");
394  Add(ExternalReference::double_fp_operation(Token::SUB, isolate).address(),
395  UNCLASSIFIED,
396  19,
397  "sub_two_doubles");
398  Add(ExternalReference::double_fp_operation(Token::MUL, isolate).address(),
399  UNCLASSIFIED,
400  20,
401  "mul_two_doubles");
402  Add(ExternalReference::double_fp_operation(Token::DIV, isolate).address(),
403  UNCLASSIFIED,
404  21,
405  "div_two_doubles");
406  Add(ExternalReference::double_fp_operation(Token::MOD, isolate).address(),
407  UNCLASSIFIED,
408  22,
409  "mod_two_doubles");
410  Add(ExternalReference::compare_doubles(isolate).address(),
411  UNCLASSIFIED,
412  23,
413  "compare_doubles");
414 #ifndef V8_INTERPRETED_REGEXP
415  Add(ExternalReference::re_case_insensitive_compare_uc16(isolate).address(),
416  UNCLASSIFIED,
417  24,
418  "NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16()");
419  Add(ExternalReference::re_check_stack_guard_state(isolate).address(),
420  UNCLASSIFIED,
421  25,
422  "RegExpMacroAssembler*::CheckStackGuardState()");
423  Add(ExternalReference::re_grow_stack(isolate).address(),
424  UNCLASSIFIED,
425  26,
426  "NativeRegExpMacroAssembler::GrowStack()");
427  Add(ExternalReference::re_word_character_map().address(),
428  UNCLASSIFIED,
429  27,
430  "NativeRegExpMacroAssembler::word_character_map");
431 #endif // V8_INTERPRETED_REGEXP
432  // Keyed lookup cache.
433  Add(ExternalReference::keyed_lookup_cache_keys(isolate).address(),
434  UNCLASSIFIED,
435  28,
436  "KeyedLookupCache::keys()");
437  Add(ExternalReference::keyed_lookup_cache_field_offsets(isolate).address(),
438  UNCLASSIFIED,
439  29,
440  "KeyedLookupCache::field_offsets()");
441  Add(ExternalReference::transcendental_cache_array_address(isolate).address(),
442  UNCLASSIFIED,
443  30,
444  "TranscendentalCache::caches()");
445  Add(ExternalReference::handle_scope_next_address().address(),
446  UNCLASSIFIED,
447  31,
448  "HandleScope::next");
449  Add(ExternalReference::handle_scope_limit_address().address(),
450  UNCLASSIFIED,
451  32,
452  "HandleScope::limit");
453  Add(ExternalReference::handle_scope_level_address().address(),
454  UNCLASSIFIED,
455  33,
456  "HandleScope::level");
457  Add(ExternalReference::new_deoptimizer_function(isolate).address(),
458  UNCLASSIFIED,
459  34,
460  "Deoptimizer::New()");
461  Add(ExternalReference::compute_output_frames_function(isolate).address(),
462  UNCLASSIFIED,
463  35,
464  "Deoptimizer::ComputeOutputFrames()");
465  Add(ExternalReference::address_of_min_int().address(),
466  UNCLASSIFIED,
467  36,
468  "LDoubleConstant::min_int");
469  Add(ExternalReference::address_of_one_half().address(),
470  UNCLASSIFIED,
471  37,
472  "LDoubleConstant::one_half");
473  Add(ExternalReference::isolate_address().address(),
474  UNCLASSIFIED,
475  38,
476  "isolate");
477  Add(ExternalReference::address_of_minus_zero().address(),
478  UNCLASSIFIED,
479  39,
480  "LDoubleConstant::minus_zero");
481  Add(ExternalReference::address_of_negative_infinity().address(),
482  UNCLASSIFIED,
483  40,
484  "LDoubleConstant::negative_infinity");
485  Add(ExternalReference::power_double_double_function(isolate).address(),
486  UNCLASSIFIED,
487  41,
488  "power_double_double_function");
489  Add(ExternalReference::power_double_int_function(isolate).address(),
490  UNCLASSIFIED,
491  42,
492  "power_double_int_function");
493  Add(ExternalReference::store_buffer_top(isolate).address(),
494  UNCLASSIFIED,
495  43,
496  "store_buffer_top");
497  Add(ExternalReference::address_of_canonical_non_hole_nan().address(),
498  UNCLASSIFIED,
499  44,
500  "canonical_nan");
501  Add(ExternalReference::address_of_the_hole_nan().address(),
502  UNCLASSIFIED,
503  45,
504  "the_hole_nan");
505  Add(ExternalReference::get_date_field_function(isolate).address(),
506  UNCLASSIFIED,
507  46,
508  "JSDate::GetField");
509  Add(ExternalReference::date_cache_stamp(isolate).address(),
510  UNCLASSIFIED,
511  47,
512  "date_cache_stamp");
513 }
514 
515 
517  : encodings_(Match),
518  isolate_(Isolate::Current()) {
519  ExternalReferenceTable* external_references =
521  for (int i = 0; i < external_references->size(); ++i) {
522  Put(external_references->address(i), i);
523  }
524 }
525 
526 
528  int index = IndexOf(key);
529  ASSERT(key == NULL || index >= 0);
530  return index >=0 ?
531  ExternalReferenceTable::instance(isolate_)->code(index) : 0;
532 }
533 
534 
536  int index = IndexOf(key);
537  return index >= 0 ?
538  ExternalReferenceTable::instance(isolate_)->name(index) : NULL;
539 }
540 
541 
542 int ExternalReferenceEncoder::IndexOf(Address key) const {
543  if (key == NULL) return -1;
544  HashMap::Entry* entry =
545  const_cast<HashMap&>(encodings_).Lookup(key, Hash(key), false);
546  return entry == NULL
547  ? -1
548  : static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
549 }
550 
551 
552 void ExternalReferenceEncoder::Put(Address key, int index) {
553  HashMap::Entry* entry = encodings_.Lookup(key, Hash(key), true);
554  entry->value = reinterpret_cast<void*>(index);
555 }
556 
557 
559  : encodings_(NewArray<Address*>(kTypeCodeCount)),
560  isolate_(Isolate::Current()) {
561  ExternalReferenceTable* external_references =
563  for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) {
564  int max = external_references->max_id(type) + 1;
565  encodings_[type] = NewArray<Address>(max + 1);
566  }
567  for (int i = 0; i < external_references->size(); ++i) {
568  Put(external_references->code(i), external_references->address(i));
569  }
570 }
571 
572 
574  for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) {
575  DeleteArray(encodings_[type]);
576  }
577  DeleteArray(encodings_);
578 }
579 
580 
583 
584 
586  : isolate_(NULL),
587  source_(source),
588  external_reference_decoder_(NULL) {
589 }
590 
591 
592 // This routine both allocates a new object, and also keeps
593 // track of where objects have been allocated so that we can
594 // fix back references when deserializing.
595 Address Deserializer::Allocate(int space_index, Space* space, int size) {
596  Address address;
597  if (!SpaceIsLarge(space_index)) {
598  ASSERT(!SpaceIsPaged(space_index) ||
600  MaybeObject* maybe_new_allocation;
601  if (space_index == NEW_SPACE) {
602  maybe_new_allocation =
603  reinterpret_cast<NewSpace*>(space)->AllocateRaw(size);
604  } else {
605  maybe_new_allocation =
606  reinterpret_cast<PagedSpace*>(space)->AllocateRaw(size);
607  }
608  ASSERT(!maybe_new_allocation->IsFailure());
609  Object* new_allocation = maybe_new_allocation->ToObjectUnchecked();
610  HeapObject* new_object = HeapObject::cast(new_allocation);
611  address = new_object->address();
612  high_water_[space_index] = address + size;
613  } else {
614  ASSERT(SpaceIsLarge(space_index));
615  LargeObjectSpace* lo_space = reinterpret_cast<LargeObjectSpace*>(space);
616  Object* new_allocation;
617  if (space_index == kLargeData || space_index == kLargeFixedArray) {
618  new_allocation =
619  lo_space->AllocateRaw(size, NOT_EXECUTABLE)->ToObjectUnchecked();
620  } else {
621  ASSERT_EQ(kLargeCode, space_index);
622  new_allocation =
623  lo_space->AllocateRaw(size, EXECUTABLE)->ToObjectUnchecked();
624  }
625  HeapObject* new_object = HeapObject::cast(new_allocation);
626  // Record all large objects in the same space.
627  address = new_object->address();
628  pages_[LO_SPACE].Add(address);
629  }
630  last_object_address_ = address;
631  return address;
632 }
633 
634 
635 // This returns the address of an object that has been described in the
636 // snapshot as being offset bytes back in a particular space.
637 HeapObject* Deserializer::GetAddressFromEnd(int space) {
638  int offset = source_->GetInt();
639  ASSERT(!SpaceIsLarge(space));
640  offset <<= kObjectAlignmentBits;
641  return HeapObject::FromAddress(high_water_[space] - offset);
642 }
643 
644 
645 // This returns the address of an object that has been described in the
646 // snapshot as being offset bytes into a particular space.
647 HeapObject* Deserializer::GetAddressFromStart(int space) {
648  int offset = source_->GetInt();
649  if (SpaceIsLarge(space)) {
650  // Large spaces have one object per 'page'.
651  return HeapObject::FromAddress(pages_[LO_SPACE][offset]);
652  }
653  offset <<= kObjectAlignmentBits;
654  if (space == NEW_SPACE) {
655  // New space has only one space - numbered 0.
656  return HeapObject::FromAddress(pages_[space][0] + offset);
657  }
658  ASSERT(SpaceIsPaged(space));
659  int page_of_pointee = offset >> kPageSizeBits;
660  Address object_address = pages_[space][page_of_pointee] +
661  (offset & Page::kPageAlignmentMask);
662  return HeapObject::FromAddress(object_address);
663 }
664 
665 
667  isolate_ = Isolate::Current();
668  ASSERT(isolate_ != NULL);
669  // Don't GC while deserializing - just expand the heap.
670  AlwaysAllocateScope always_allocate;
671  // Don't use the free lists while deserializing.
672  LinearAllocationScope allocate_linearly;
673  // No active threads.
675  // No active handles.
676  ASSERT(isolate_->handle_scope_implementer()->blocks()->is_empty());
677  // Make sure the entire partial snapshot cache is traversed, filling it with
678  // valid object pointers.
679  isolate_->set_serialize_partial_snapshot_cache_length(
681  ASSERT_EQ(NULL, external_reference_decoder_);
682  external_reference_decoder_ = new ExternalReferenceDecoder();
683  isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
684  isolate_->heap()->IterateWeakRoots(this, VISIT_ALL);
685 
686  isolate_->heap()->set_global_contexts_list(
687  isolate_->heap()->undefined_value());
688 
689  // Update data pointers to the external strings containing natives sources.
690  for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
691  Object* source = isolate_->heap()->natives_source_cache()->get(i);
692  if (!source->IsUndefined()) {
694  }
695  }
696 }
697 
698 
700  isolate_ = Isolate::Current();
701  // Don't GC while deserializing - just expand the heap.
702  AlwaysAllocateScope always_allocate;
703  // Don't use the free lists while deserializing.
704  LinearAllocationScope allocate_linearly;
705  if (external_reference_decoder_ == NULL) {
706  external_reference_decoder_ = new ExternalReferenceDecoder();
707  }
708  VisitPointer(root);
709 }
710 
711 
713  ASSERT(source_->AtEOF());
714  if (external_reference_decoder_) {
715  delete external_reference_decoder_;
716  external_reference_decoder_ = NULL;
717  }
718 }
719 
720 
721 // This is called on the roots. It is the driver of the deserialization
722 // process. It is also called on the body of each function.
723 void Deserializer::VisitPointers(Object** start, Object** end) {
724  // The space must be new space. Any other space would cause ReadChunk to try
725  // to update the remembered using NULL as the address.
726  ReadChunk(start, end, NEW_SPACE, NULL);
727 }
728 
729 
730 // This routine writes the new object into the pointer provided and then
731 // returns true if the new object was in young space and false otherwise.
732 // The reason for this strange interface is that otherwise the object is
733 // written very late, which means the FreeSpace map is not set up by the
734 // time we need to use it to mark the space at the end of a page free.
735 void Deserializer::ReadObject(int space_number,
736  Space* space,
737  Object** write_back) {
738  int size = source_->GetInt() << kObjectAlignmentBits;
739  Address address = Allocate(space_number, space, size);
740  *write_back = HeapObject::FromAddress(address);
741  Object** current = reinterpret_cast<Object**>(address);
742  Object** limit = current + (size >> kPointerSizeLog2);
743  if (FLAG_log_snapshot_positions) {
744  LOG(isolate_, SnapshotPositionEvent(address, source_->position()));
745  }
746  ReadChunk(current, limit, space_number, address);
747 #ifdef DEBUG
748  bool is_codespace = (space == HEAP->code_space()) ||
749  ((space == HEAP->lo_space()) && (space_number == kLargeCode));
750  ASSERT(HeapObject::FromAddress(address)->IsCode() == is_codespace);
751 #endif
752 }
753 
754 
755 // This macro is always used with a constant argument so it should all fold
756 // away to almost nothing in the generated code. It might be nicer to do this
757 // with the ternary operator but there are type issues with that.
758 #define ASSIGN_DEST_SPACE(space_number) \
759  Space* dest_space; \
760  if (space_number == NEW_SPACE) { \
761  dest_space = isolate->heap()->new_space(); \
762  } else if (space_number == OLD_POINTER_SPACE) { \
763  dest_space = isolate->heap()->old_pointer_space(); \
764  } else if (space_number == OLD_DATA_SPACE) { \
765  dest_space = isolate->heap()->old_data_space(); \
766  } else if (space_number == CODE_SPACE) { \
767  dest_space = isolate->heap()->code_space(); \
768  } else if (space_number == MAP_SPACE) { \
769  dest_space = isolate->heap()->map_space(); \
770  } else if (space_number == CELL_SPACE) { \
771  dest_space = isolate->heap()->cell_space(); \
772  } else { \
773  ASSERT(space_number >= LO_SPACE); \
774  dest_space = isolate->heap()->lo_space(); \
775  }
776 
777 
778 static const int kUnknownOffsetFromStart = -1;
779 
780 
781 void Deserializer::ReadChunk(Object** current,
782  Object** limit,
783  int source_space,
784  Address current_object_address) {
785  Isolate* const isolate = isolate_;
786  bool write_barrier_needed = (current_object_address != NULL &&
787  source_space != NEW_SPACE &&
788  source_space != CELL_SPACE &&
789  source_space != CODE_SPACE &&
790  source_space != OLD_DATA_SPACE);
791  while (current < limit) {
792  int data = source_->Get();
793  switch (data) {
794 #define CASE_STATEMENT(where, how, within, space_number) \
795  case where + how + within + space_number: \
796  ASSERT((where & ~kPointedToMask) == 0); \
797  ASSERT((how & ~kHowToCodeMask) == 0); \
798  ASSERT((within & ~kWhereToPointMask) == 0); \
799  ASSERT((space_number & ~kSpaceMask) == 0);
800 
801 #define CASE_BODY(where, how, within, space_number_if_any, offset_from_start) \
802  { \
803  bool emit_write_barrier = false; \
804  bool current_was_incremented = false; \
805  int space_number = space_number_if_any == kAnyOldSpace ? \
806  (data & kSpaceMask) : space_number_if_any; \
807  if (where == kNewObject && how == kPlain && within == kStartOfObject) {\
808  ASSIGN_DEST_SPACE(space_number) \
809  ReadObject(space_number, dest_space, current); \
810  emit_write_barrier = (space_number == NEW_SPACE); \
811  } else { \
812  Object* new_object = NULL; /* May not be a real Object pointer. */ \
813  if (where == kNewObject) { \
814  ASSIGN_DEST_SPACE(space_number) \
815  ReadObject(space_number, dest_space, &new_object); \
816  } else if (where == kRootArray) { \
817  int root_id = source_->GetInt(); \
818  new_object = isolate->heap()->roots_array_start()[root_id]; \
819  emit_write_barrier = isolate->heap()->InNewSpace(new_object); \
820  } else if (where == kPartialSnapshotCache) { \
821  int cache_index = source_->GetInt(); \
822  new_object = isolate->serialize_partial_snapshot_cache() \
823  [cache_index]; \
824  emit_write_barrier = isolate->heap()->InNewSpace(new_object); \
825  } else if (where == kExternalReference) { \
826  int reference_id = source_->GetInt(); \
827  Address address = external_reference_decoder_-> \
828  Decode(reference_id); \
829  new_object = reinterpret_cast<Object*>(address); \
830  } else if (where == kBackref) { \
831  emit_write_barrier = (space_number == NEW_SPACE); \
832  new_object = GetAddressFromEnd(data & kSpaceMask); \
833  } else { \
834  ASSERT(where == kFromStart); \
835  if (offset_from_start == kUnknownOffsetFromStart) { \
836  emit_write_barrier = (space_number == NEW_SPACE); \
837  new_object = GetAddressFromStart(data & kSpaceMask); \
838  } else { \
839  Address object_address = pages_[space_number][0] + \
840  (offset_from_start << kObjectAlignmentBits); \
841  new_object = HeapObject::FromAddress(object_address); \
842  } \
843  } \
844  if (within == kFirstInstruction) { \
845  Code* new_code_object = reinterpret_cast<Code*>(new_object); \
846  new_object = reinterpret_cast<Object*>( \
847  new_code_object->instruction_start()); \
848  } \
849  if (how == kFromCode) { \
850  Address location_of_branch_data = \
851  reinterpret_cast<Address>(current); \
852  Assembler::deserialization_set_special_target_at( \
853  location_of_branch_data, \
854  reinterpret_cast<Address>(new_object)); \
855  location_of_branch_data += Assembler::kSpecialTargetSize; \
856  current = reinterpret_cast<Object**>(location_of_branch_data); \
857  current_was_incremented = true; \
858  } else { \
859  *current = new_object; \
860  } \
861  } \
862  if (emit_write_barrier && write_barrier_needed) { \
863  Address current_address = reinterpret_cast<Address>(current); \
864  isolate->heap()->RecordWrite( \
865  current_object_address, \
866  static_cast<int>(current_address - current_object_address)); \
867  } \
868  if (!current_was_incremented) { \
869  current++; \
870  } \
871  break; \
872  } \
873 
874 // This generates a case and a body for each space. The large object spaces are
875 // very rare in snapshots so they are grouped in one body.
876 #define ONE_PER_SPACE(where, how, within) \
877  CASE_STATEMENT(where, how, within, NEW_SPACE) \
878  CASE_BODY(where, how, within, NEW_SPACE, kUnknownOffsetFromStart) \
879  CASE_STATEMENT(where, how, within, OLD_DATA_SPACE) \
880  CASE_BODY(where, how, within, OLD_DATA_SPACE, kUnknownOffsetFromStart) \
881  CASE_STATEMENT(where, how, within, OLD_POINTER_SPACE) \
882  CASE_BODY(where, how, within, OLD_POINTER_SPACE, kUnknownOffsetFromStart) \
883  CASE_STATEMENT(where, how, within, CODE_SPACE) \
884  CASE_BODY(where, how, within, CODE_SPACE, kUnknownOffsetFromStart) \
885  CASE_STATEMENT(where, how, within, CELL_SPACE) \
886  CASE_BODY(where, how, within, CELL_SPACE, kUnknownOffsetFromStart) \
887  CASE_STATEMENT(where, how, within, MAP_SPACE) \
888  CASE_BODY(where, how, within, MAP_SPACE, kUnknownOffsetFromStart) \
889  CASE_STATEMENT(where, how, within, kLargeData) \
890  CASE_STATEMENT(where, how, within, kLargeCode) \
891  CASE_STATEMENT(where, how, within, kLargeFixedArray) \
892  CASE_BODY(where, how, within, kAnyOldSpace, kUnknownOffsetFromStart)
893 
894 // This generates a case and a body for the new space (which has to do extra
895 // write barrier handling) and handles the other spaces with 8 fall-through
896 // cases and one body.
897 #define ALL_SPACES(where, how, within) \
898  CASE_STATEMENT(where, how, within, NEW_SPACE) \
899  CASE_BODY(where, how, within, NEW_SPACE, kUnknownOffsetFromStart) \
900  CASE_STATEMENT(where, how, within, OLD_DATA_SPACE) \
901  CASE_STATEMENT(where, how, within, OLD_POINTER_SPACE) \
902  CASE_STATEMENT(where, how, within, CODE_SPACE) \
903  CASE_STATEMENT(where, how, within, CELL_SPACE) \
904  CASE_STATEMENT(where, how, within, MAP_SPACE) \
905  CASE_STATEMENT(where, how, within, kLargeData) \
906  CASE_STATEMENT(where, how, within, kLargeCode) \
907  CASE_STATEMENT(where, how, within, kLargeFixedArray) \
908  CASE_BODY(where, how, within, kAnyOldSpace, kUnknownOffsetFromStart)
909 
910 #define ONE_PER_CODE_SPACE(where, how, within) \
911  CASE_STATEMENT(where, how, within, CODE_SPACE) \
912  CASE_BODY(where, how, within, CODE_SPACE, kUnknownOffsetFromStart) \
913  CASE_STATEMENT(where, how, within, kLargeCode) \
914  CASE_BODY(where, how, within, kLargeCode, kUnknownOffsetFromStart)
915 
916 #define FOUR_CASES(byte_code) \
917  case byte_code: \
918  case byte_code + 1: \
919  case byte_code + 2: \
920  case byte_code + 3:
921 
922 #define SIXTEEN_CASES(byte_code) \
923  FOUR_CASES(byte_code) \
924  FOUR_CASES(byte_code + 4) \
925  FOUR_CASES(byte_code + 8) \
926  FOUR_CASES(byte_code + 12)
927 
928  // We generate 15 cases and bodies that process special tags that combine
929  // the raw data tag and the length into one byte.
930 #define RAW_CASE(index, size) \
931  case kRawData + index: { \
932  byte* raw_data_out = reinterpret_cast<byte*>(current); \
933  source_->CopyRaw(raw_data_out, size); \
934  current = reinterpret_cast<Object**>(raw_data_out + size); \
935  break; \
936  }
938 #undef RAW_CASE
939 
940  // Deserialize a chunk of raw data that doesn't have one of the popular
941  // lengths.
942  case kRawData: {
943  int size = source_->GetInt();
944  byte* raw_data_out = reinterpret_cast<byte*>(current);
945  source_->CopyRaw(raw_data_out, size);
946  current = reinterpret_cast<Object**>(raw_data_out + size);
947  break;
948  }
949 
952  int root_id = RootArrayConstantFromByteCode(data);
953  Object* object = isolate->heap()->roots_array_start()[root_id];
954  ASSERT(!isolate->heap()->InNewSpace(object));
955  *current++ = object;
956  break;
957  }
958 
959  case kRepeat: {
960  int repeats = source_->GetInt();
961  Object* object = current[-1];
962  ASSERT(!isolate->heap()->InNewSpace(object));
963  for (int i = 0; i < repeats; i++) current[i] = object;
964  current += repeats;
965  break;
966  }
967 
970  STATIC_ASSERT(kMaxRepeats == 12);
974  int repeats = RepeatsForCode(data);
975  Object* object = current[-1];
976  ASSERT(!isolate->heap()->InNewSpace(object));
977  for (int i = 0; i < repeats; i++) current[i] = object;
978  current += repeats;
979  break;
980  }
981 
982  // Deserialize a new object and write a pointer to it to the current
983  // object.
985  // Support for direct instruction pointers in functions
987  // Deserialize a new code object and write a pointer to its first
988  // instruction to the current code object.
990  // Find a recently deserialized object using its offset from the current
991  // allocation point and write a pointer to it to the current object.
993 #if V8_TARGET_ARCH_MIPS
994  // Deserialize a new object from pointer found in code and write
995  // a pointer to it to the current object. Required only for MIPS, and
996  // omitted on the other architectures because it is fully unrolled and
997  // would cause bloat.
999  // Find a recently deserialized code object using its offset from the
1000  // current allocation point and write a pointer to it to the current
1001  // object. Required only for MIPS.
1003  // Find an already deserialized code object using its offset from
1004  // the start and write a pointer to it to the current object.
1005  // Required only for MIPS.
1007 #endif
1008  // Find a recently deserialized code object using its offset from the
1009  // current allocation point and write a pointer to its first instruction
1010  // to the current code object or the instruction pointer in a function
1011  // object.
1014  // Find an already deserialized object using its offset from the start
1015  // and write a pointer to it to the current object.
1018  // Find an already deserialized code object using its offset from the
1019  // start and write a pointer to its first instruction to the current code
1020  // object.
1022  // Find an object in the roots array and write a pointer to it to the
1023  // current object.
1025  CASE_BODY(kRootArray, kPlain, kStartOfObject, 0, kUnknownOffsetFromStart)
1026  // Find an object in the partial snapshots cache and write a pointer to it
1027  // to the current object.
1030  kPlain,
1032  0,
1033  kUnknownOffsetFromStart)
1034  // Find an code entry in the partial snapshots cache and
1035  // write a pointer to it to the current object.
1038  kPlain,
1040  0,
1041  kUnknownOffsetFromStart)
1042  // Find an external reference and write a pointer to it to the current
1043  // object.
1046  kPlain,
1048  0,
1049  kUnknownOffsetFromStart)
1050  // Find an external reference and write a pointer to it in the current
1051  // code object.
1054  kFromCode,
1056  0,
1057  kUnknownOffsetFromStart)
1058 
1059 #undef CASE_STATEMENT
1060 #undef CASE_BODY
1061 #undef ONE_PER_SPACE
1062 #undef ALL_SPACES
1063 #undef ASSIGN_DEST_SPACE
1064 
1065  case kNewPage: {
1066  int space = source_->Get();
1067  pages_[space].Add(last_object_address_);
1068  if (space == CODE_SPACE) {
1069  CPU::FlushICache(last_object_address_, Page::kPageSize);
1070  }
1071  break;
1072  }
1073 
1074  case kSkip: {
1075  current++;
1076  break;
1077  }
1078 
1079  case kNativesStringResource: {
1080  int index = source_->Get();
1081  Vector<const char> source_vector = Natives::GetRawScriptSource(index);
1082  NativesExternalStringResource* resource =
1083  new NativesExternalStringResource(isolate->bootstrapper(),
1084  source_vector.start(),
1085  source_vector.length());
1086  *current++ = reinterpret_cast<Object*>(resource);
1087  break;
1088  }
1089 
1090  case kSynchronize: {
1091  // If we get here then that indicates that you have a mismatch between
1092  // the number of GC roots when serializing and deserializing.
1093  UNREACHABLE();
1094  }
1095 
1096  default:
1097  UNREACHABLE();
1098  }
1099  }
1100  ASSERT_EQ(current, limit);
1101 }
1102 
1103 
1104 void SnapshotByteSink::PutInt(uintptr_t integer, const char* description) {
1105  const int max_shift = ((kPointerSize * kBitsPerByte) / 7) * 7;
1106  for (int shift = max_shift; shift > 0; shift -= 7) {
1107  if (integer >= static_cast<uintptr_t>(1u) << shift) {
1108  Put((static_cast<int>((integer >> shift)) & 0x7f) | 0x80, "IntPart");
1109  }
1110  }
1111  PutSection(static_cast<int>(integer & 0x7f), "IntLastPart");
1112 }
1113 
1114 
1116  : sink_(sink),
1117  current_root_index_(0),
1118  external_reference_encoder_(new ExternalReferenceEncoder),
1119  large_object_total_(0),
1120  root_index_wave_front_(0) {
1121  isolate_ = Isolate::Current();
1122  // The serializer is meant to be used only to generate initial heap images
1123  // from a context in which there is only one isolate.
1125  for (int i = 0; i <= LAST_SPACE; i++) {
1126  fullness_[i] = 0;
1127  }
1128 }
1129 
1130 
1133 }
1134 
1135 
1137  Isolate* isolate = Isolate::Current();
1138  // No active threads.
1139  CHECK_EQ(NULL, Isolate::Current()->thread_manager()->FirstThreadStateInUse());
1140  // No active or weak handles.
1141  CHECK(isolate->handle_scope_implementer()->blocks()->is_empty());
1142  CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles());
1143  // We don't support serializing installed extensions.
1144  CHECK(!isolate->has_installed_extensions());
1145 
1146  HEAP->IterateStrongRoots(this, VISIT_ONLY_STRONG);
1147 }
1148 
1149 
1151  this->VisitPointer(object);
1152  Isolate* isolate = Isolate::Current();
1153 
1154  // After we have done the partial serialization the partial snapshot cache
1155  // will contain some references needed to decode the partial snapshot. We
1156  // fill it up with undefineds so it has a predictable length so the
1157  // deserialization code doesn't need to know the length.
1158  for (int index = isolate->serialize_partial_snapshot_cache_length();
1160  index++) {
1161  isolate->serialize_partial_snapshot_cache()[index] =
1162  isolate->heap()->undefined_value();
1163  startup_serializer_->VisitPointer(
1164  &isolate->serialize_partial_snapshot_cache()[index]);
1165  }
1166  isolate->set_serialize_partial_snapshot_cache_length(
1167  Isolate::kPartialSnapshotCacheCapacity);
1168 }
1169 
1170 
1172  Isolate* isolate = Isolate::Current();
1173 
1174  for (Object** current = start; current < end; current++) {
1175  if (start == isolate->heap()->roots_array_start()) {
1177  Max(root_index_wave_front_, static_cast<intptr_t>(current - start));
1178  }
1179  if (reinterpret_cast<Address>(current) ==
1180  isolate->heap()->store_buffer()->TopAddress()) {
1181  sink_->Put(kSkip, "Skip");
1182  } else if ((*current)->IsSmi()) {
1183  sink_->Put(kRawData, "RawData");
1184  sink_->PutInt(kPointerSize, "length");
1185  for (int i = 0; i < kPointerSize; i++) {
1186  sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte");
1187  }
1188  } else {
1190  }
1191  }
1192 }
1193 
1194 
1195 // This ensures that the partial snapshot cache keeps things alive during GC and
1196 // tracks their movement. When it is called during serialization of the startup
1197 // snapshot the partial snapshot is empty, so nothing happens. When the partial
1198 // (context) snapshot is created, this array is populated with the pointers that
1199 // the partial snapshot will need. As that happens we emit serialized objects to
1200 // the startup snapshot that correspond to the elements of this cache array. On
1201 // deserialization we therefore need to visit the cache array. This fills it up
1202 // with pointers to deserialized objects.
1203 void SerializerDeserializer::Iterate(ObjectVisitor* visitor) {
1204  Isolate* isolate = Isolate::Current();
1205  visitor->VisitPointers(
1206  isolate->serialize_partial_snapshot_cache(),
1207  &isolate->serialize_partial_snapshot_cache()[
1208  isolate->serialize_partial_snapshot_cache_length()]);
1209 }
1210 
1211 
1212 // When deserializing we need to set the size of the snapshot cache. This means
1213 // the root iteration code (above) will iterate over array elements, writing the
1214 // references to deserialized objects in them.
1216  Isolate::Current()->set_serialize_partial_snapshot_cache_length(size);
1217 }
1218 
1219 
1221  Isolate* isolate = Isolate::Current();
1222 
1223  for (int i = 0;
1224  i < isolate->serialize_partial_snapshot_cache_length();
1225  i++) {
1226  Object* entry = isolate->serialize_partial_snapshot_cache()[i];
1227  if (entry == heap_object) return i;
1228  }
1229 
1230  // We didn't find the object in the cache. So we add it to the cache and
1231  // then visit the pointer so that it becomes part of the startup snapshot
1232  // and we can refer to it from the partial snapshot.
1233  int length = isolate->serialize_partial_snapshot_cache_length();
1235  isolate->serialize_partial_snapshot_cache()[length] = heap_object;
1236  startup_serializer_->VisitPointer(
1237  &isolate->serialize_partial_snapshot_cache()[length]);
1238  // We don't recurse from the startup snapshot generator into the partial
1239  // snapshot generator.
1240  ASSERT(length == isolate->serialize_partial_snapshot_cache_length());
1241  isolate->set_serialize_partial_snapshot_cache_length(length + 1);
1242  return length;
1243 }
1244 
1245 
1246 int Serializer::RootIndex(HeapObject* heap_object, HowToCode from) {
1247  Heap* heap = HEAP;
1248  if (heap->InNewSpace(heap_object)) return kInvalidRootIndex;
1249  for (int i = 0; i < root_index_wave_front_; i++) {
1250  Object* root = heap->roots_array_start()[i];
1251  if (!root->IsSmi() && root == heap_object) {
1252 #if V8_TARGET_ARCH_MIPS
1253  if (from == kFromCode) {
1254  // In order to avoid code bloat in the deserializer we don't have
1255  // support for the encoding that specifies a particular root should
1256  // be written into the lui/ori instructions on MIPS. Therefore we
1257  // should not generate such serialization data for MIPS.
1258  return kInvalidRootIndex;
1259  }
1260 #endif
1261  return i;
1262  }
1263  }
1264  return kInvalidRootIndex;
1265 }
1266 
1267 
1268 // Encode the location of an already deserialized object in order to write its
1269 // location into a later object. We can encode the location as an offset from
1270 // the start of the deserialized objects or as an offset backwards from the
1271 // current allocation pointer.
1273  int space,
1274  int address,
1275  HowToCode how_to_code,
1276  WhereToPoint where_to_point) {
1277  int offset = CurrentAllocationAddress(space) - address;
1278  bool from_start = true;
1279  if (SpaceIsPaged(space)) {
1280  // For paged space it is simple to encode back from current allocation if
1281  // the object is on the same page as the current allocation pointer.
1282  if ((CurrentAllocationAddress(space) >> kPageSizeBits) ==
1283  (address >> kPageSizeBits)) {
1284  from_start = false;
1285  address = offset;
1286  }
1287  } else if (space == NEW_SPACE) {
1288  // For new space it is always simple to encode back from current allocation.
1289  if (offset < address) {
1290  from_start = false;
1291  address = offset;
1292  }
1293  }
1294  // If we are actually dealing with real offsets (and not a numbering of
1295  // all objects) then we should shift out the bits that are always 0.
1296  if (!SpaceIsLarge(space)) address >>= kObjectAlignmentBits;
1297  if (from_start) {
1298  sink_->Put(kFromStart + how_to_code + where_to_point + space, "RefSer");
1299  sink_->PutInt(address, "address");
1300  } else {
1301  sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRefSer");
1302  sink_->PutInt(address, "address");
1303  }
1304 }
1305 
1306 
1308  Object* o,
1309  HowToCode how_to_code,
1310  WhereToPoint where_to_point) {
1311  CHECK(o->IsHeapObject());
1312  HeapObject* heap_object = HeapObject::cast(o);
1313 
1314  int root_index;
1315  if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) {
1316  PutRoot(root_index, heap_object, how_to_code, where_to_point);
1317  return;
1318  }
1319 
1320  if (address_mapper_.IsMapped(heap_object)) {
1321  int space = SpaceOfAlreadySerializedObject(heap_object);
1322  int address = address_mapper_.MappedTo(heap_object);
1324  address,
1325  how_to_code,
1326  where_to_point);
1327  } else {
1328  // Object has not yet been serialized. Serialize it here.
1329  ObjectSerializer object_serializer(this,
1330  heap_object,
1331  sink_,
1332  how_to_code,
1333  where_to_point);
1334  object_serializer.Serialize();
1335  }
1336 }
1337 
1338 
1340  for (int i = Isolate::Current()->serialize_partial_snapshot_cache_length();
1342  i++) {
1343  sink_->Put(kRootArray + kPlain + kStartOfObject, "RootSerialization");
1344  sink_->PutInt(Heap::kUndefinedValueRootIndex, "root_index");
1345  }
1346  HEAP->IterateWeakRoots(this, VISIT_ALL);
1347 }
1348 
1349 
1350 void Serializer::PutRoot(int root_index,
1351  HeapObject* object,
1353  SerializerDeserializer::WhereToPoint where_to_point) {
1354  if (how_to_code == kPlain &&
1355  where_to_point == kStartOfObject &&
1356  root_index < kRootArrayNumberOfConstantEncodings &&
1357  !HEAP->InNewSpace(object)) {
1358  if (root_index < kRootArrayNumberOfLowConstantEncodings) {
1359  sink_->Put(kRootArrayLowConstants + root_index, "RootLoConstant");
1360  } else {
1361  sink_->Put(kRootArrayHighConstants + root_index -
1363  "RootHiConstant");
1364  }
1365  } else {
1366  sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization");
1367  sink_->PutInt(root_index, "root_index");
1368  }
1369 }
1370 
1371 
1373  Object* o,
1374  HowToCode how_to_code,
1375  WhereToPoint where_to_point) {
1376  CHECK(o->IsHeapObject());
1377  HeapObject* heap_object = HeapObject::cast(o);
1378 
1379  if (heap_object->IsMap()) {
1380  // The code-caches link to context-specific code objects, which
1381  // the startup and context serializes cannot currently handle.
1382  ASSERT(Map::cast(heap_object)->code_cache() ==
1383  heap_object->GetHeap()->raw_unchecked_empty_fixed_array());
1384  }
1385 
1386  int root_index;
1387  if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) {
1388  PutRoot(root_index, heap_object, how_to_code, where_to_point);
1389  return;
1390  }
1391 
1392  if (ShouldBeInThePartialSnapshotCache(heap_object)) {
1393  int cache_index = PartialSnapshotCacheIndex(heap_object);
1394  sink_->Put(kPartialSnapshotCache + how_to_code + where_to_point,
1395  "PartialSnapshotCache");
1396  sink_->PutInt(cache_index, "partial_snapshot_cache_index");
1397  return;
1398  }
1399 
1400  // Pointers from the partial snapshot to the objects in the startup snapshot
1401  // should go through the root array or through the partial snapshot cache.
1402  // If this is not the case you may have to add something to the root array.
1403  ASSERT(!startup_serializer_->address_mapper()->IsMapped(heap_object));
1404  // All the symbols that the partial snapshot needs should be either in the
1405  // root table or in the partial snapshot cache.
1406  ASSERT(!heap_object->IsSymbol());
1407 
1408  if (address_mapper_.IsMapped(heap_object)) {
1409  int space = SpaceOfAlreadySerializedObject(heap_object);
1410  int address = address_mapper_.MappedTo(heap_object);
1412  address,
1413  how_to_code,
1414  where_to_point);
1415  } else {
1416  // Object has not yet been serialized. Serialize it here.
1417  ObjectSerializer serializer(this,
1418  heap_object,
1419  sink_,
1420  how_to_code,
1421  where_to_point);
1422  serializer.Serialize();
1423  }
1424 }
1425 
1426 
1428  int space = Serializer::SpaceOfObject(object_);
1429  int size = object_->Size();
1430 
1431  sink_->Put(kNewObject + reference_representation_ + space,
1432  "ObjectSerialization");
1433  sink_->PutInt(size >> kObjectAlignmentBits, "Size in words");
1434 
1435  LOG(i::Isolate::Current(),
1436  SnapshotPositionEvent(object_->address(), sink_->Position()));
1437 
1438  // Mark this object as already serialized.
1439  bool start_new_page;
1440  int offset = serializer_->Allocate(space, size, &start_new_page);
1441  serializer_->address_mapper()->AddMapping(object_, offset);
1442  if (start_new_page) {
1443  sink_->Put(kNewPage, "NewPage");
1444  sink_->PutSection(space, "NewPageSpace");
1445  }
1446 
1447  // Serialize the map (first word of the object).
1448  serializer_->SerializeObject(object_->map(), kPlain, kStartOfObject);
1449 
1450  // Serialize the rest of the object.
1451  CHECK_EQ(0, bytes_processed_so_far_);
1452  bytes_processed_so_far_ = kPointerSize;
1453  object_->IterateBody(object_->map()->instance_type(), size, this);
1454  OutputRawData(object_->address() + size);
1455 }
1456 
1457 
1459  Object** end) {
1460  Object** current = start;
1461  while (current < end) {
1462  while (current < end && (*current)->IsSmi()) current++;
1463  if (current < end) OutputRawData(reinterpret_cast<Address>(current));
1464 
1465  while (current < end && !(*current)->IsSmi()) {
1466  HeapObject* current_contents = HeapObject::cast(*current);
1467  int root_index = serializer_->RootIndex(current_contents, kPlain);
1468  // Repeats are not subject to the write barrier so there are only some
1469  // objects that can be used in a repeat encoding. These are the early
1470  // ones in the root array that are never in new space.
1471  if (current != start &&
1472  root_index != kInvalidRootIndex &&
1473  root_index < kRootArrayNumberOfConstantEncodings &&
1474  current_contents == current[-1]) {
1475  ASSERT(!HEAP->InNewSpace(current_contents));
1476  int repeat_count = 1;
1477  while (current < end - 1 && current[repeat_count] == current_contents) {
1478  repeat_count++;
1479  }
1480  current += repeat_count;
1481  bytes_processed_so_far_ += repeat_count * kPointerSize;
1482  if (repeat_count > kMaxRepeats) {
1483  sink_->Put(kRepeat, "SerializeRepeats");
1484  sink_->PutInt(repeat_count, "SerializeRepeats");
1485  } else {
1486  sink_->Put(CodeForRepeats(repeat_count), "SerializeRepeats");
1487  }
1488  } else {
1489  serializer_->SerializeObject(current_contents, kPlain, kStartOfObject);
1490  bytes_processed_so_far_ += kPointerSize;
1491  current++;
1492  }
1493  }
1494  }
1495 }
1496 
1497 
1499  Object** current = rinfo->target_object_address();
1500 
1501  OutputRawData(rinfo->target_address_address());
1502  HowToCode representation = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
1503  serializer_->SerializeObject(*current, representation, kStartOfObject);
1504  bytes_processed_so_far_ += rinfo->target_address_size();
1505 }
1506 
1507 
1509  Address* end) {
1510  Address references_start = reinterpret_cast<Address>(start);
1511  OutputRawData(references_start);
1512 
1513  for (Address* current = start; current < end; current++) {
1514  sink_->Put(kExternalReference + kPlain + kStartOfObject, "ExternalRef");
1515  int reference_id = serializer_->EncodeExternalReference(*current);
1516  sink_->PutInt(reference_id, "reference id");
1517  }
1518  bytes_processed_so_far_ += static_cast<int>((end - start) * kPointerSize);
1519 }
1520 
1521 
1523  Address references_start = rinfo->target_address_address();
1524  OutputRawData(references_start);
1525 
1526  Address* current = rinfo->target_reference_address();
1527  int representation = rinfo->IsCodedSpecially() ?
1529  sink_->Put(kExternalReference + representation, "ExternalRef");
1530  int reference_id = serializer_->EncodeExternalReference(*current);
1531  sink_->PutInt(reference_id, "reference id");
1532  bytes_processed_so_far_ += rinfo->target_address_size();
1533 }
1534 
1535 
1537  Address target_start = rinfo->target_address_address();
1538  OutputRawData(target_start);
1539  Address target = rinfo->target_address();
1540  uint32_t encoding = serializer_->EncodeExternalReference(target);
1541  CHECK(target == NULL ? encoding == 0 : encoding != 0);
1542  int representation;
1543  // Can't use a ternary operator because of gcc.
1544  if (rinfo->IsCodedSpecially()) {
1545  representation = kStartOfObject + kFromCode;
1546  } else {
1547  representation = kStartOfObject + kPlain;
1548  }
1549  sink_->Put(kExternalReference + representation, "ExternalReference");
1550  sink_->PutInt(encoding, "reference id");
1551  bytes_processed_so_far_ += rinfo->target_address_size();
1552 }
1553 
1554 
1556  CHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
1557  Address target_start = rinfo->target_address_address();
1558  OutputRawData(target_start);
1559  Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
1560  serializer_->SerializeObject(target, kFromCode, kFirstInstruction);
1561  bytes_processed_so_far_ += rinfo->target_address_size();
1562 }
1563 
1564 
1566  Code* target = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
1567  OutputRawData(entry_address);
1568  serializer_->SerializeObject(target, kPlain, kFirstInstruction);
1569  bytes_processed_so_far_ += kPointerSize;
1570 }
1571 
1572 
1574  // We shouldn't have any global property cell references in code
1575  // objects in the snapshot.
1576  UNREACHABLE();
1577 }
1578 
1579 
1581  v8::String::ExternalAsciiStringResource** resource_pointer) {
1582  Address references_start = reinterpret_cast<Address>(resource_pointer);
1583  OutputRawData(references_start);
1584  for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
1585  Object* source = HEAP->natives_source_cache()->get(i);
1586  if (!source->IsUndefined()) {
1589  const Resource* resource = string->resource();
1590  if (resource == *resource_pointer) {
1591  sink_->Put(kNativesStringResource, "NativesStringResource");
1592  sink_->PutSection(i, "NativesStringResourceEnd");
1593  bytes_processed_so_far_ += sizeof(resource);
1594  return;
1595  }
1596  }
1597  }
1598  // One of the strings in the natives cache should match the resource. We
1599  // can't serialize any other kinds of external strings.
1600  UNREACHABLE();
1601 }
1602 
1603 
1604 void Serializer::ObjectSerializer::OutputRawData(Address up_to) {
1605  Address object_start = object_->address();
1606  int up_to_offset = static_cast<int>(up_to - object_start);
1607  int skipped = up_to_offset - bytes_processed_so_far_;
1608  // This assert will fail if the reloc info gives us the target_address_address
1609  // locations in a non-ascending order. Luckily that doesn't happen.
1610  ASSERT(skipped >= 0);
1611  if (skipped != 0) {
1612  Address base = object_start + bytes_processed_so_far_;
1613 #define RAW_CASE(index, length) \
1614  if (skipped == length) { \
1615  sink_->PutSection(kRawData + index, "RawDataFixed"); \
1616  } else /* NOLINT */
1618 #undef RAW_CASE
1619  { /* NOLINT */
1620  sink_->Put(kRawData, "RawData");
1621  sink_->PutInt(skipped, "length");
1622  }
1623  for (int i = 0; i < skipped; i++) {
1624  unsigned int data = base[i];
1625  sink_->PutSection(data, "Byte");
1626  }
1627  bytes_processed_so_far_ += skipped;
1628  }
1629 }
1630 
1631 
1633  for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
1634  AllocationSpace s = static_cast<AllocationSpace>(i);
1635  if (HEAP->InSpace(object, s)) {
1636  if (i == LO_SPACE) {
1637  if (object->IsCode()) {
1638  return kLargeCode;
1639  } else if (object->IsFixedArray()) {
1640  return kLargeFixedArray;
1641  } else {
1642  return kLargeData;
1643  }
1644  }
1645  return i;
1646  }
1647  }
1648  UNREACHABLE();
1649  return 0;
1650 }
1651 
1652 
1654  for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
1655  AllocationSpace s = static_cast<AllocationSpace>(i);
1656  if (HEAP->InSpace(object, s)) {
1657  return i;
1658  }
1659  }
1660  UNREACHABLE();
1661  return 0;
1662 }
1663 
1664 
1665 int Serializer::Allocate(int space, int size, bool* new_page) {
1666  CHECK(space >= 0 && space < kNumberOfSpaces);
1667  if (SpaceIsLarge(space)) {
1668  // In large object space we merely number the objects instead of trying to
1669  // determine some sort of address.
1670  *new_page = true;
1671  large_object_total_ += size;
1672  return fullness_[LO_SPACE]++;
1673  }
1674  *new_page = false;
1675  if (fullness_[space] == 0) {
1676  *new_page = true;
1677  }
1678  if (SpaceIsPaged(space)) {
1679  // Paged spaces are a little special. We encode their addresses as if the
1680  // pages were all contiguous and each page were filled up in the range
1681  // 0 - Page::kObjectAreaSize. In practice the pages may not be contiguous
1682  // and allocation does not start at offset 0 in the page, but this scheme
1683  // means the deserializer can get the page number quickly by shifting the
1684  // serialized address.
1686  int used_in_this_page = (fullness_[space] & (Page::kPageSize - 1));
1687  CHECK(size <= SpaceAreaSize(space));
1688  if (used_in_this_page + size > SpaceAreaSize(space)) {
1689  *new_page = true;
1690  fullness_[space] = RoundUp(fullness_[space], Page::kPageSize);
1691  }
1692  }
1693  int allocation_address = fullness_[space];
1694  fullness_[space] = allocation_address + size;
1695  return allocation_address;
1696 }
1697 
1698 
1700  if (space == CODE_SPACE) {
1702  } else {
1704  }
1705 }
1706 
1707 
1708 } } // namespace v8::internal
byte * Address
Definition: globals.h:172
Object ** roots_array_start()
Definition: heap.h:1222
static const int kInvalidRootIndex
Definition: serialize.h:486
void VisitCodeTarget(RelocInfo *target)
Definition: serialize.cc:1555
#define COMMON_RAW_LENGTHS(f)
Definition: serialize.h:190
int CurrentAllocationAddress(int space)
Definition: serialize.h:464
SerializationAddressMapper address_mapper_
Definition: serialize.h:574
#define CHECK_EQ(expected, value)
Definition: checks.h:219
static int CodePageAreaSize()
Definition: spaces.h:1041
void CopyRaw(byte *to, int number_of_bytes)
Definition: serialize.h:324
void AddMapping(HeapObject *obj, int to)
Definition: serialize.h:426
static const int kRootArrayNumberOfLowConstantEncodings
Definition: serialize.h:281
bool InNewSpace(Object *object)
Definition: heap-inl.h:292
virtual void Serialize(Object **o)
Definition: serialize.cc:1150
HandleScopeImplementer * handle_scope_implementer()
Definition: isolate.h:849
static bool too_late_to_enable_now_
Definition: serialize.h:572
#define SIXTEEN_CASES(byte_code)
void IterateWeakRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:5636
const int kTypeCodeCount
Definition: serialize.h:53
#define LOG(isolate, Call)
Definition: log.h:81
static int RootArrayConstantFromByteCode(int byte_code)
Definition: serialize.h:282
static Object * GetObjectFromEntryAddress(Address location_of_address)
Definition: objects-inl.h:3391
#define ACCESSOR_DESCRIPTOR_DECLARATION(name)
static Vector< const char > GetRawScriptSource(int index)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
static HeapObject * cast(Object *obj)
T Max(T a, T b)
Definition: utils.h:222
virtual void SerializeStrongReferences()
Definition: serialize.cc:1136
static const int kOldSpaceRoots
Definition: heap.h:1572
virtual void SerializeObject(Object *o, HowToCode how_to_code, WhereToPoint where_to_point)
Definition: serialize.cc:1307
static Map * cast(Object *obj)
static const int kRootArrayHighConstants
Definition: serialize.h:278
Serializer(SnapshotByteSink *sink)
Definition: serialize.cc:1115
#define CASE_BODY(where, how, within, space_number_if_any, offset_from_start)
static const intptr_t kPageAlignmentMask
Definition: spaces.h:704
int SpaceAreaSize(int space)
Definition: serialize.cc:1699
void VisitRuntimeEntry(RelocInfo *reloc)
Definition: serialize.cc:1536
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:5648
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:246
unsigned short uint16_t
Definition: unicode.cc:46
void VisitPointers(Object **start, Object **end)
Definition: serialize.cc:1171
ThreadManager * thread_manager()
Definition: isolate.h:867
void DeserializePartial(Object **root)
Definition: serialize.cc:699
static int CodeForRepeats(int repeats)
Definition: serialize.h:268
#define CHECK(condition)
Definition: checks.h:56
static void Iterate(ObjectVisitor *visitor)
Definition: serialize.cc:1203
void PutInt(uintptr_t integer, const char *description)
Definition: serialize.cc:1104
static ExternalAsciiString * cast(Object *obj)
intptr_t root_index_wave_front_
Definition: serialize.h:575
static const int kPageSize
Definition: spaces.h:695
static Code * cast(Object *obj)
SnapshotByteSink * sink_
Definition: serialize.h:567
bool IsDefaultIsolate() const
Definition: isolate.h:467
#define RUNTIME_FUNCTION_LIST(F)
Definition: runtime.h:483
StoreBuffer * store_buffer()
Definition: heap.h:1516
void VisitCodeEntry(Address entry_address)
Definition: serialize.cc:1565
#define ALL_SPACES(where, how, within)
void VisitPointers(Object **start, Object **end)
Definition: serialize.cc:1458
#define STATS_COUNTER_LIST_2(SC)
Definition: v8-counters.h:116
void IterateBody(InstanceType type, int object_size, ObjectVisitor *v)
Definition: objects.cc:1313
uint8_t byte
Definition: globals.h:171
static bool SpaceIsPaged(int space)
Definition: serialize.h:299
#define UNREACHABLE()
Definition: checks.h:50
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
int fullness_[LAST_SPACE+1]
Definition: serialize.h:566
static bool SpaceIsLarge(int space)
Definition: serialize.h:298
Deserializer(SnapshotByteSource *source)
Definition: serialize.cc:585
virtual int PartialSnapshotCacheIndex(HeapObject *o)
Definition: serialize.cc:1220
const int kPointerSize
Definition: globals.h:234
MemoryAllocator * memory_allocator()
Definition: isolate.h:830
static int SpaceOfObject(HeapObject *object)
Definition: serialize.cc:1632
#define FOUR_CASES(byte_code)
T * NewArray(size_t size)
Definition: allocation.h:83
SerializationAddressMapper * address_mapper()
Definition: serialize.h:481
#define COUNTER_ENTRY(name, caption)
#define DEF_ENTRY_A(name, kind, state, extra)
GlobalHandles * global_handles()
Definition: isolate.h:865
ExternalReferenceEncoder * external_reference_encoder_
Definition: serialize.h:569
Entry * Lookup(void *key, uint32_t hash, bool insert, AllocationPolicy allocator=AllocationPolicy())
Definition: hashmap.h:130
const int kDebugIdShift
Definition: serialize.h:60
T RoundUp(T x, intptr_t m)
Definition: utils.h:150
void SerializeReferenceToPreviousObject(int space, int address, HowToCode how_to_code, WhereToPoint where_to_point)
Definition: serialize.cc:1272
static Code * GetCodeFromTargetAddress(Address address)
Definition: objects-inl.h:3380
static const int kRootArrayNumberOfConstantEncodings
Definition: serialize.h:280
uint32_t Encode(Address key) const
Definition: serialize.cc:527
const int kBitsPerByte
Definition: globals.h:251
static bool serialization_enabled_
Definition: serialize.h:570
bool IsPowerOf2(T x)
Definition: utils.h:50
static int RepeatsForCode(int byte_code)
Definition: serialize.h:272
void VisitExternalReferences(Address *start, Address *end)
Definition: serialize.cc:1508
bool has_installed_extensions()
Definition: isolate.h:915
#define RAW_CASE(index, size)
List< internal::Object ** > * blocks()
Definition: api.h:447
static const int kObjectStartOffset
Definition: spaces.h:501
#define ACCESSOR_DESCRIPTOR_LIST(V)
Definition: accessors.h:39
void set_global_contexts_list(Object *object)
Definition: heap.h:1151
#define BUILTIN_LIST_DEBUG_A(V)
Definition: builtins.h:221
virtual void SerializeObject(Object *o, HowToCode how_to_code, WhereToPoint where_to_point)=0
static int SpaceOfAlreadySerializedObject(HeapObject *object)
Definition: serialize.cc:1653
#define IC_UTIL_LIST(ICU)
Definition: ic.h:40
#define FOR_EACH_ISOLATE_ADDRESS_NAME(C)
Definition: isolate.h:137
int Allocate(int space, int size, bool *new_page_started)
Definition: serialize.cc:1665
static const int kPartialSnapshotCacheCapacity
Definition: isolate.h:980
const int kObjectAlignmentBits
Definition: v8globals.h:43
#define CASE_STATEMENT(where, how, within, space_number)
#define HEAP
Definition: isolate.h:1408
void VisitExternalAsciiString(v8::String::ExternalAsciiStringResource **resource)
Definition: serialize.cc:1580
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
InstanceType instance_type()
Definition: objects-inl.h:2864
#define ONE_PER_SPACE(where, how, within)
static HeapObject * FromAddress(Address address)
Definition: objects-inl.h:1163
const int kFirstTypeCode
Definition: serialize.h:54
#define ASSERT_NE(v1, v2)
Definition: checks.h:272
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
#define BUILD_NAME_LITERAL(CamelName, hacker_name)
#define ONE_PER_CODE_SPACE(where, how, within)
const int kPageSizeBits
Definition: v8globals.h:100
static const int kNativesStringResource
Definition: serialize.h:262
static void FlushICache(void *start, size_t size)
#define IC_ENTRY(name)
int RootIndex(HeapObject *heap_object, HowToCode from)
Definition: serialize.cc:1246
static void SetSnapshotCacheSize(int size)
Definition: serialize.cc:1215
void DeleteArray(T *array)
Definition: allocation.h:91
static const int kRootArrayLowConstants
Definition: serialize.h:276
#define DEF_ENTRY_C(name, ignored)
virtual void Put(int byte, const char *description)=0
void VisitGlobalPropertyCell(RelocInfo *rinfo)
Definition: serialize.cc:1573
static ExternalReferenceTable * instance(Isolate *isolate)
Definition: serialize.cc:65
virtual bool ShouldBeInThePartialSnapshotCache(HeapObject *o)
Definition: serialize.h:602
void PutRoot(int index, HeapObject *object, HowToCode how, WhereToPoint where)
Definition: serialize.cc:1350
#define BUILTIN_LIST_C(V)
Definition: builtins.h:42
virtual void SerializeObject(Object *o, HowToCode how_to_code, WhereToPoint where_to_point)
Definition: serialize.cc:1372
ThreadState * FirstThreadStateInUse()
Definition: v8threads.cc:281
#define ARRAY_SIZE(a)
Definition: globals.h:295
#define STATS_COUNTER_LIST_1(SC)
Definition: v8-counters.h:58
FlagType type() const
Definition: flags.cc:1358
#define BUILTIN_LIST_A(V)
Definition: builtins.h:66
virtual void PutSection(int byte, const char *description)
Definition: serialize.h:395
void VisitExternalReference(RelocInfo *rinfo)
Definition: serialize.cc:1522
void VisitEmbeddedPointer(RelocInfo *target)
Definition: serialize.cc:1498
const char * NameOfAddress(Address key) const
Definition: serialize.cc:535