v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
serialize.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "accessors.h"
31 #include "api.h"
32 #include "bootstrapper.h"
33 #include "execution.h"
34 #include "global-handles.h"
35 #include "ic-inl.h"
36 #include "natives.h"
37 #include "platform.h"
38 #include "runtime.h"
39 #include "serialize.h"
40 #include "snapshot.h"
41 #include "stub-cache.h"
42 #include "v8threads.h"
43 
44 namespace v8 {
45 namespace internal {
46 
47 
48 // -----------------------------------------------------------------------------
49 // Coding of external references.
50 
51 // The encoding of an external reference. The type is in the high word.
52 // The id is in the low word.
53 static uint32_t EncodeExternal(TypeCode type, uint16_t id) {
54  return static_cast<uint32_t>(type) << 16 | id;
55 }
56 
57 
58 static int* GetInternalPointer(StatsCounter* counter) {
59  // All counters refer to dummy_counter, if deserializing happens without
60  // setting up counters.
61  static int dummy_counter = 0;
62  return counter->Enabled() ? counter->GetInternalPointer() : &dummy_counter;
63 }
64 
65 
67  ExternalReferenceTable* external_reference_table =
68  isolate->external_reference_table();
69  if (external_reference_table == NULL) {
70  external_reference_table = new ExternalReferenceTable(isolate);
71  isolate->set_external_reference_table(external_reference_table);
72  }
73  return external_reference_table;
74 }
75 
76 
77 void ExternalReferenceTable::AddFromId(TypeCode type,
78  uint16_t id,
79  const char* name,
80  Isolate* isolate) {
82  switch (type) {
83  case C_BUILTIN: {
84  ExternalReference ref(static_cast<Builtins::CFunctionId>(id), isolate);
85  address = ref.address();
86  break;
87  }
88  case BUILTIN: {
89  ExternalReference ref(static_cast<Builtins::Name>(id), isolate);
90  address = ref.address();
91  break;
92  }
93  case RUNTIME_FUNCTION: {
94  ExternalReference ref(static_cast<Runtime::FunctionId>(id), isolate);
95  address = ref.address();
96  break;
97  }
98  case IC_UTILITY: {
99  ExternalReference ref(IC_Utility(static_cast<IC::UtilityId>(id)),
100  isolate);
101  address = ref.address();
102  break;
103  }
104  default:
105  UNREACHABLE();
106  return;
107  }
108  Add(address, type, id, name);
109 }
110 
111 
112 void ExternalReferenceTable::Add(Address address,
113  TypeCode type,
114  uint16_t id,
115  const char* name) {
116  ASSERT_NE(NULL, address);
117  ExternalReferenceEntry entry;
118  entry.address = address;
119  entry.code = EncodeExternal(type, id);
120  entry.name = name;
121  ASSERT_NE(0, entry.code);
122  refs_.Add(entry);
123  if (id > max_id_[type]) max_id_[type] = id;
124 }
125 
126 
127 void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
128  for (int type_code = 0; type_code < kTypeCodeCount; type_code++) {
129  max_id_[type_code] = 0;
130  }
131 
132  // The following populates all of the different type of external references
133  // into the ExternalReferenceTable.
134  //
135  // NOTE: This function was originally 100k of code. It has since been
136  // rewritten to be mostly table driven, as the callback macro style tends to
137  // very easily cause code bloat. Please be careful in the future when adding
138  // new references.
139 
140  struct RefTableEntry {
141  TypeCode type;
142  uint16_t id;
143  const char* name;
144  };
145 
146  static const RefTableEntry ref_table[] = {
147  // Builtins
148 #define DEF_ENTRY_C(name, ignored) \
149  { C_BUILTIN, \
150  Builtins::c_##name, \
151  "Builtins::" #name },
152 
154 #undef DEF_ENTRY_C
155 
156 #define DEF_ENTRY_C(name, ignored) \
157  { BUILTIN, \
158  Builtins::k##name, \
159  "Builtins::" #name },
160 #define DEF_ENTRY_A(name, kind, state, extra) DEF_ENTRY_C(name, ignored)
161 
165 #undef DEF_ENTRY_C
166 #undef DEF_ENTRY_A
167 
168  // Runtime functions
169 #define RUNTIME_ENTRY(name, nargs, ressize) \
170  { RUNTIME_FUNCTION, \
171  Runtime::k##name, \
172  "Runtime::" #name },
173 
175 #undef RUNTIME_ENTRY
176 
177  // IC utilities
178 #define IC_ENTRY(name) \
179  { IC_UTILITY, \
180  IC::k##name, \
181  "IC::" #name },
182 
184 #undef IC_ENTRY
185  }; // end of ref_table[].
186 
187  for (size_t i = 0; i < ARRAY_SIZE(ref_table); ++i) {
188  AddFromId(ref_table[i].type,
189  ref_table[i].id,
190  ref_table[i].name,
191  isolate);
192  }
193 
194 #ifdef ENABLE_DEBUGGER_SUPPORT
195  // Debug addresses
196  Add(Debug_Address(Debug::k_after_break_target_address).address(isolate),
198  Debug::k_after_break_target_address << kDebugIdShift,
199  "Debug::after_break_target_address()");
200  Add(Debug_Address(Debug::k_debug_break_slot_address).address(isolate),
202  Debug::k_debug_break_slot_address << kDebugIdShift,
203  "Debug::debug_break_slot_address()");
204  Add(Debug_Address(Debug::k_debug_break_return_address).address(isolate),
206  Debug::k_debug_break_return_address << kDebugIdShift,
207  "Debug::debug_break_return_address()");
208  Add(Debug_Address(Debug::k_restarter_frame_function_pointer).address(isolate),
210  Debug::k_restarter_frame_function_pointer << kDebugIdShift,
211  "Debug::restarter_frame_function_pointer_address()");
212 #endif
213 
214  // Stat counters
215  struct StatsRefTableEntry {
216  StatsCounter* (Counters::*counter)();
217  uint16_t id;
218  const char* name;
219  };
220 
221  const StatsRefTableEntry stats_ref_table[] = {
222 #define COUNTER_ENTRY(name, caption) \
223  { &Counters::name, \
224  Counters::k_##name, \
225  "Counters::" #name },
226 
229 #undef COUNTER_ENTRY
230  }; // end of stats_ref_table[].
231 
232  Counters* counters = isolate->counters();
233  for (size_t i = 0; i < ARRAY_SIZE(stats_ref_table); ++i) {
234  Add(reinterpret_cast<Address>(GetInternalPointer(
235  (counters->*(stats_ref_table[i].counter))())),
237  stats_ref_table[i].id,
238  stats_ref_table[i].name);
239  }
240 
241  // Top addresses
242 
243  const char* AddressNames[] = {
244 #define BUILD_NAME_LITERAL(CamelName, hacker_name) \
245  "Isolate::" #hacker_name "_address",
247  NULL
248 #undef BUILD_NAME_LITERAL
249  };
250 
251  for (uint16_t i = 0; i < Isolate::kIsolateAddressCount; ++i) {
252  Add(isolate->get_address_from_id((Isolate::AddressId)i),
253  TOP_ADDRESS, i, AddressNames[i]);
254  }
255 
256  // Accessors
257 #define ACCESSOR_DESCRIPTOR_DECLARATION(name) \
258  Add((Address)&Accessors::name, \
259  ACCESSOR, \
260  Accessors::k##name, \
261  "Accessors::" #name);
262 
264 #undef ACCESSOR_DESCRIPTOR_DECLARATION
265 
266  StubCache* stub_cache = isolate->stub_cache();
267 
268  // Stub cache tables
269  Add(stub_cache->key_reference(StubCache::kPrimary).address(),
271  1,
272  "StubCache::primary_->key");
273  Add(stub_cache->value_reference(StubCache::kPrimary).address(),
275  2,
276  "StubCache::primary_->value");
277  Add(stub_cache->map_reference(StubCache::kPrimary).address(),
279  3,
280  "StubCache::primary_->map");
281  Add(stub_cache->key_reference(StubCache::kSecondary).address(),
283  4,
284  "StubCache::secondary_->key");
285  Add(stub_cache->value_reference(StubCache::kSecondary).address(),
287  5,
288  "StubCache::secondary_->value");
289  Add(stub_cache->map_reference(StubCache::kSecondary).address(),
291  6,
292  "StubCache::secondary_->map");
293 
294  // Runtime entries
295  Add(ExternalReference::perform_gc_function(isolate).address(),
297  1,
298  "Runtime::PerformGC");
299  Add(ExternalReference::fill_heap_number_with_random_function(
300  isolate).address(),
302  2,
303  "V8::FillHeapNumberWithRandom");
304  Add(ExternalReference::random_uint32_function(isolate).address(),
306  3,
307  "V8::Random");
308  Add(ExternalReference::delete_handle_scope_extensions(isolate).address(),
310  4,
311  "HandleScope::DeleteExtensions");
312  Add(ExternalReference::
313  incremental_marking_record_write_function(isolate).address(),
315  5,
316  "IncrementalMarking::RecordWrite");
317  Add(ExternalReference::store_buffer_overflow_function(isolate).address(),
319  6,
320  "StoreBuffer::StoreBufferOverflow");
321  Add(ExternalReference::
322  incremental_evacuation_record_write_function(isolate).address(),
324  7,
325  "IncrementalMarking::RecordWrite");
326 
327 
328 
329  // Miscellaneous
330  Add(ExternalReference::roots_array_start(isolate).address(),
331  UNCLASSIFIED,
332  3,
333  "Heap::roots_array_start()");
334  Add(ExternalReference::address_of_stack_limit(isolate).address(),
335  UNCLASSIFIED,
336  4,
337  "StackGuard::address_of_jslimit()");
338  Add(ExternalReference::address_of_real_stack_limit(isolate).address(),
339  UNCLASSIFIED,
340  5,
341  "StackGuard::address_of_real_jslimit()");
342 #ifndef V8_INTERPRETED_REGEXP
343  Add(ExternalReference::address_of_regexp_stack_limit(isolate).address(),
344  UNCLASSIFIED,
345  6,
346  "RegExpStack::limit_address()");
347  Add(ExternalReference::address_of_regexp_stack_memory_address(
348  isolate).address(),
349  UNCLASSIFIED,
350  7,
351  "RegExpStack::memory_address()");
352  Add(ExternalReference::address_of_regexp_stack_memory_size(isolate).address(),
353  UNCLASSIFIED,
354  8,
355  "RegExpStack::memory_size()");
356  Add(ExternalReference::address_of_static_offsets_vector(isolate).address(),
357  UNCLASSIFIED,
358  9,
359  "OffsetsVector::static_offsets_vector");
360 #endif // V8_INTERPRETED_REGEXP
361  Add(ExternalReference::new_space_start(isolate).address(),
362  UNCLASSIFIED,
363  10,
364  "Heap::NewSpaceStart()");
365  Add(ExternalReference::new_space_mask(isolate).address(),
366  UNCLASSIFIED,
367  11,
368  "Heap::NewSpaceMask()");
369  Add(ExternalReference::heap_always_allocate_scope_depth(isolate).address(),
370  UNCLASSIFIED,
371  12,
372  "Heap::always_allocate_scope_depth()");
373  Add(ExternalReference::new_space_allocation_limit_address(isolate).address(),
374  UNCLASSIFIED,
375  14,
376  "Heap::NewSpaceAllocationLimitAddress()");
377  Add(ExternalReference::new_space_allocation_top_address(isolate).address(),
378  UNCLASSIFIED,
379  15,
380  "Heap::NewSpaceAllocationTopAddress()");
381 #ifdef ENABLE_DEBUGGER_SUPPORT
382  Add(ExternalReference::debug_break(isolate).address(),
383  UNCLASSIFIED,
384  16,
385  "Debug::Break()");
386  Add(ExternalReference::debug_step_in_fp_address(isolate).address(),
387  UNCLASSIFIED,
388  17,
389  "Debug::step_in_fp_addr()");
390 #endif
391  Add(ExternalReference::double_fp_operation(Token::ADD, isolate).address(),
392  UNCLASSIFIED,
393  18,
394  "add_two_doubles");
395  Add(ExternalReference::double_fp_operation(Token::SUB, isolate).address(),
396  UNCLASSIFIED,
397  19,
398  "sub_two_doubles");
399  Add(ExternalReference::double_fp_operation(Token::MUL, isolate).address(),
400  UNCLASSIFIED,
401  20,
402  "mul_two_doubles");
403  Add(ExternalReference::double_fp_operation(Token::DIV, isolate).address(),
404  UNCLASSIFIED,
405  21,
406  "div_two_doubles");
407  Add(ExternalReference::double_fp_operation(Token::MOD, isolate).address(),
408  UNCLASSIFIED,
409  22,
410  "mod_two_doubles");
411  Add(ExternalReference::compare_doubles(isolate).address(),
412  UNCLASSIFIED,
413  23,
414  "compare_doubles");
415 #ifndef V8_INTERPRETED_REGEXP
416  Add(ExternalReference::re_case_insensitive_compare_uc16(isolate).address(),
417  UNCLASSIFIED,
418  24,
419  "NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16()");
420  Add(ExternalReference::re_check_stack_guard_state(isolate).address(),
421  UNCLASSIFIED,
422  25,
423  "RegExpMacroAssembler*::CheckStackGuardState()");
424  Add(ExternalReference::re_grow_stack(isolate).address(),
425  UNCLASSIFIED,
426  26,
427  "NativeRegExpMacroAssembler::GrowStack()");
428  Add(ExternalReference::re_word_character_map().address(),
429  UNCLASSIFIED,
430  27,
431  "NativeRegExpMacroAssembler::word_character_map");
432 #endif // V8_INTERPRETED_REGEXP
433  // Keyed lookup cache.
434  Add(ExternalReference::keyed_lookup_cache_keys(isolate).address(),
435  UNCLASSIFIED,
436  28,
437  "KeyedLookupCache::keys()");
438  Add(ExternalReference::keyed_lookup_cache_field_offsets(isolate).address(),
439  UNCLASSIFIED,
440  29,
441  "KeyedLookupCache::field_offsets()");
442  Add(ExternalReference::transcendental_cache_array_address(isolate).address(),
443  UNCLASSIFIED,
444  30,
445  "TranscendentalCache::caches()");
446  Add(ExternalReference::handle_scope_next_address().address(),
447  UNCLASSIFIED,
448  31,
449  "HandleScope::next");
450  Add(ExternalReference::handle_scope_limit_address().address(),
451  UNCLASSIFIED,
452  32,
453  "HandleScope::limit");
454  Add(ExternalReference::handle_scope_level_address().address(),
455  UNCLASSIFIED,
456  33,
457  "HandleScope::level");
458  Add(ExternalReference::new_deoptimizer_function(isolate).address(),
459  UNCLASSIFIED,
460  34,
461  "Deoptimizer::New()");
462  Add(ExternalReference::compute_output_frames_function(isolate).address(),
463  UNCLASSIFIED,
464  35,
465  "Deoptimizer::ComputeOutputFrames()");
466  Add(ExternalReference::address_of_min_int().address(),
467  UNCLASSIFIED,
468  36,
469  "LDoubleConstant::min_int");
470  Add(ExternalReference::address_of_one_half().address(),
471  UNCLASSIFIED,
472  37,
473  "LDoubleConstant::one_half");
474  Add(ExternalReference::isolate_address().address(),
475  UNCLASSIFIED,
476  38,
477  "isolate");
478  Add(ExternalReference::address_of_minus_zero().address(),
479  UNCLASSIFIED,
480  39,
481  "LDoubleConstant::minus_zero");
482  Add(ExternalReference::address_of_negative_infinity().address(),
483  UNCLASSIFIED,
484  40,
485  "LDoubleConstant::negative_infinity");
486  Add(ExternalReference::power_double_double_function(isolate).address(),
487  UNCLASSIFIED,
488  41,
489  "power_double_double_function");
490  Add(ExternalReference::power_double_int_function(isolate).address(),
491  UNCLASSIFIED,
492  42,
493  "power_double_int_function");
494  Add(ExternalReference::store_buffer_top(isolate).address(),
495  UNCLASSIFIED,
496  43,
497  "store_buffer_top");
498  Add(ExternalReference::address_of_canonical_non_hole_nan().address(),
499  UNCLASSIFIED,
500  44,
501  "canonical_nan");
502  Add(ExternalReference::address_of_the_hole_nan().address(),
503  UNCLASSIFIED,
504  45,
505  "the_hole_nan");
506  Add(ExternalReference::get_date_field_function(isolate).address(),
507  UNCLASSIFIED,
508  46,
509  "JSDate::GetField");
510  Add(ExternalReference::date_cache_stamp(isolate).address(),
511  UNCLASSIFIED,
512  47,
513  "date_cache_stamp");
514  Add(ExternalReference::address_of_pending_message_obj(isolate).address(),
515  UNCLASSIFIED,
516  48,
517  "address_of_pending_message_obj");
518  Add(ExternalReference::address_of_has_pending_message(isolate).address(),
519  UNCLASSIFIED,
520  49,
521  "address_of_has_pending_message");
522  Add(ExternalReference::address_of_pending_message_script(isolate).address(),
523  UNCLASSIFIED,
524  50,
525  "pending_message_script");
526 }
527 
528 
530  : encodings_(Match),
531  isolate_(Isolate::Current()) {
532  ExternalReferenceTable* external_references =
534  for (int i = 0; i < external_references->size(); ++i) {
535  Put(external_references->address(i), i);
536  }
537 }
538 
539 
541  int index = IndexOf(key);
542  ASSERT(key == NULL || index >= 0);
543  return index >=0 ?
544  ExternalReferenceTable::instance(isolate_)->code(index) : 0;
545 }
546 
547 
549  int index = IndexOf(key);
550  return index >= 0 ?
551  ExternalReferenceTable::instance(isolate_)->name(index) : NULL;
552 }
553 
554 
555 int ExternalReferenceEncoder::IndexOf(Address key) const {
556  if (key == NULL) return -1;
557  HashMap::Entry* entry =
558  const_cast<HashMap&>(encodings_).Lookup(key, Hash(key), false);
559  return entry == NULL
560  ? -1
561  : static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
562 }
563 
564 
565 void ExternalReferenceEncoder::Put(Address key, int index) {
566  HashMap::Entry* entry = encodings_.Lookup(key, Hash(key), true);
567  entry->value = reinterpret_cast<void*>(index);
568 }
569 
570 
572  : encodings_(NewArray<Address*>(kTypeCodeCount)),
573  isolate_(Isolate::Current()) {
574  ExternalReferenceTable* external_references =
576  for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) {
577  int max = external_references->max_id(type) + 1;
578  encodings_[type] = NewArray<Address>(max + 1);
579  }
580  for (int i = 0; i < external_references->size(); ++i) {
581  Put(external_references->code(i), external_references->address(i));
582  }
583 }
584 
585 
587  for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) {
588  DeleteArray(encodings_[type]);
589  }
590  DeleteArray(encodings_);
591 }
592 
593 
596 
597 
599  : isolate_(NULL),
600  source_(source),
601  external_reference_decoder_(NULL) {
602  for (int i = 0; i < LAST_SPACE + 1; i++) {
603  reservations_[i] = kUninitializedReservation;
604  }
605 }
606 
607 
609  isolate_ = Isolate::Current();
610  ASSERT(isolate_ != NULL);
611  isolate_->heap()->ReserveSpace(reservations_, &high_water_[0]);
612  // No active threads.
614  // No active handles.
615  ASSERT(isolate_->handle_scope_implementer()->blocks()->is_empty());
616  ASSERT_EQ(NULL, external_reference_decoder_);
617  external_reference_decoder_ = new ExternalReferenceDecoder();
618  isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
619  isolate_->heap()->RepairFreeListsAfterBoot();
620  isolate_->heap()->IterateWeakRoots(this, VISIT_ALL);
621 
622  isolate_->heap()->set_native_contexts_list(
623  isolate_->heap()->undefined_value());
624 
625  // Update data pointers to the external strings containing natives sources.
626  for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
627  Object* source = isolate_->heap()->natives_source_cache()->get(i);
628  if (!source->IsUndefined()) {
630  }
631  }
632 
633  // Issue code events for newly deserialized code objects.
634  LOG_CODE_EVENT(isolate_, LogCodeObjects());
635  LOG_CODE_EVENT(isolate_, LogCompiledFunctions());
636 }
637 
638 
640  isolate_ = Isolate::Current();
641  for (int i = NEW_SPACE; i < kNumberOfSpaces; i++) {
642  ASSERT(reservations_[i] != kUninitializedReservation);
643  }
644  isolate_->heap()->ReserveSpace(reservations_, &high_water_[0]);
645  if (external_reference_decoder_ == NULL) {
646  external_reference_decoder_ = new ExternalReferenceDecoder();
647  }
648 
649  // Keep track of the code space start and end pointers in case new
650  // code objects were unserialized
651  OldSpace* code_space = isolate_->heap()->code_space();
652  Address start_address = code_space->top();
653  VisitPointer(root);
654 
655  // There's no code deserialized here. If this assert fires
656  // then that's changed and logging should be added to notify
657  // the profiler et al of the new code.
658  CHECK_EQ(start_address, code_space->top());
659 }
660 
661 
663  ASSERT(source_->AtEOF());
664  if (external_reference_decoder_) {
665  delete external_reference_decoder_;
666  external_reference_decoder_ = NULL;
667  }
668 }
669 
670 
671 // This is called on the roots. It is the driver of the deserialization
672 // process. It is also called on the body of each function.
673 void Deserializer::VisitPointers(Object** start, Object** end) {
674  // The space must be new space. Any other space would cause ReadChunk to try
675  // to update the remembered using NULL as the address.
676  ReadChunk(start, end, NEW_SPACE, NULL);
677 }
678 
679 
680 // This routine writes the new object into the pointer provided and then
681 // returns true if the new object was in young space and false otherwise.
682 // The reason for this strange interface is that otherwise the object is
683 // written very late, which means the FreeSpace map is not set up by the
684 // time we need to use it to mark the space at the end of a page free.
685 void Deserializer::ReadObject(int space_number,
686  Object** write_back) {
687  int size = source_->GetInt() << kObjectAlignmentBits;
688  Address address = Allocate(space_number, size);
689  *write_back = HeapObject::FromAddress(address);
690  Object** current = reinterpret_cast<Object**>(address);
691  Object** limit = current + (size >> kPointerSizeLog2);
692  if (FLAG_log_snapshot_positions) {
693  LOG(isolate_, SnapshotPositionEvent(address, source_->position()));
694  }
695  ReadChunk(current, limit, space_number, address);
696 #ifdef DEBUG
697  bool is_codespace = (space_number == CODE_SPACE);
698  ASSERT(HeapObject::FromAddress(address)->IsCode() == is_codespace);
699 #endif
700 }
701 
702 void Deserializer::ReadChunk(Object** current,
703  Object** limit,
704  int source_space,
705  Address current_object_address) {
706  Isolate* const isolate = isolate_;
707  // Write barrier support costs around 1% in startup time. In fact there
708  // are no new space objects in current boot snapshots, so it's not needed,
709  // but that may change.
710  bool write_barrier_needed = (current_object_address != NULL &&
711  source_space != NEW_SPACE &&
712  source_space != CELL_SPACE &&
713  source_space != CODE_SPACE &&
714  source_space != OLD_DATA_SPACE);
715  while (current < limit) {
716  int data = source_->Get();
717  switch (data) {
718 #define CASE_STATEMENT(where, how, within, space_number) \
719  case where + how + within + space_number: \
720  ASSERT((where & ~kPointedToMask) == 0); \
721  ASSERT((how & ~kHowToCodeMask) == 0); \
722  ASSERT((within & ~kWhereToPointMask) == 0); \
723  ASSERT((space_number & ~kSpaceMask) == 0);
724 
725 #define CASE_BODY(where, how, within, space_number_if_any) \
726  { \
727  bool emit_write_barrier = false; \
728  bool current_was_incremented = false; \
729  int space_number = space_number_if_any == kAnyOldSpace ? \
730  (data & kSpaceMask) : space_number_if_any; \
731  if (where == kNewObject && how == kPlain && within == kStartOfObject) {\
732  ReadObject(space_number, current); \
733  emit_write_barrier = (space_number == NEW_SPACE); \
734  } else { \
735  Object* new_object = NULL; /* May not be a real Object pointer. */ \
736  if (where == kNewObject) { \
737  ReadObject(space_number, &new_object); \
738  } else if (where == kRootArray) { \
739  int root_id = source_->GetInt(); \
740  new_object = isolate->heap()->roots_array_start()[root_id]; \
741  emit_write_barrier = isolate->heap()->InNewSpace(new_object); \
742  } else if (where == kPartialSnapshotCache) { \
743  int cache_index = source_->GetInt(); \
744  new_object = isolate->serialize_partial_snapshot_cache() \
745  [cache_index]; \
746  emit_write_barrier = isolate->heap()->InNewSpace(new_object); \
747  } else if (where == kExternalReference) { \
748  int skip = source_->GetInt(); \
749  current = reinterpret_cast<Object**>(reinterpret_cast<Address>( \
750  current) + skip); \
751  int reference_id = source_->GetInt(); \
752  Address address = external_reference_decoder_-> \
753  Decode(reference_id); \
754  new_object = reinterpret_cast<Object*>(address); \
755  } else if (where == kBackref) { \
756  emit_write_barrier = (space_number == NEW_SPACE); \
757  new_object = GetAddressFromEnd(data & kSpaceMask); \
758  } else { \
759  ASSERT(where == kBackrefWithSkip); \
760  int skip = source_->GetInt(); \
761  current = reinterpret_cast<Object**>( \
762  reinterpret_cast<Address>(current) + skip); \
763  emit_write_barrier = (space_number == NEW_SPACE); \
764  new_object = GetAddressFromEnd(data & kSpaceMask); \
765  } \
766  if (within == kInnerPointer) { \
767  if (space_number != CODE_SPACE || new_object->IsCode()) { \
768  Code* new_code_object = reinterpret_cast<Code*>(new_object); \
769  new_object = reinterpret_cast<Object*>( \
770  new_code_object->instruction_start()); \
771  } else { \
772  ASSERT(space_number == CODE_SPACE); \
773  JSGlobalPropertyCell* cell = \
774  JSGlobalPropertyCell::cast(new_object); \
775  new_object = reinterpret_cast<Object*>( \
776  cell->ValueAddress()); \
777  } \
778  } \
779  if (how == kFromCode) { \
780  Address location_of_branch_data = \
781  reinterpret_cast<Address>(current); \
782  Assembler::deserialization_set_special_target_at( \
783  location_of_branch_data, \
784  reinterpret_cast<Address>(new_object)); \
785  location_of_branch_data += Assembler::kSpecialTargetSize; \
786  current = reinterpret_cast<Object**>(location_of_branch_data); \
787  current_was_incremented = true; \
788  } else { \
789  *current = new_object; \
790  } \
791  } \
792  if (emit_write_barrier && write_barrier_needed) { \
793  Address current_address = reinterpret_cast<Address>(current); \
794  isolate->heap()->RecordWrite( \
795  current_object_address, \
796  static_cast<int>(current_address - current_object_address)); \
797  } \
798  if (!current_was_incremented) { \
799  current++; \
800  } \
801  break; \
802  } \
803 
804 // This generates a case and a body for the new space (which has to do extra
805 // write barrier handling) and handles the other spaces with 8 fall-through
806 // cases and one body.
807 #define ALL_SPACES(where, how, within) \
808  CASE_STATEMENT(where, how, within, NEW_SPACE) \
809  CASE_BODY(where, how, within, NEW_SPACE) \
810  CASE_STATEMENT(where, how, within, OLD_DATA_SPACE) \
811  CASE_STATEMENT(where, how, within, OLD_POINTER_SPACE) \
812  CASE_STATEMENT(where, how, within, CODE_SPACE) \
813  CASE_STATEMENT(where, how, within, CELL_SPACE) \
814  CASE_STATEMENT(where, how, within, MAP_SPACE) \
815  CASE_BODY(where, how, within, kAnyOldSpace)
816 
817 #define FOUR_CASES(byte_code) \
818  case byte_code: \
819  case byte_code + 1: \
820  case byte_code + 2: \
821  case byte_code + 3:
822 
823 #define SIXTEEN_CASES(byte_code) \
824  FOUR_CASES(byte_code) \
825  FOUR_CASES(byte_code + 4) \
826  FOUR_CASES(byte_code + 8) \
827  FOUR_CASES(byte_code + 12)
828 
829 #define COMMON_RAW_LENGTHS(f) \
830  f(1) \
831  f(2) \
832  f(3) \
833  f(4) \
834  f(5) \
835  f(6) \
836  f(7) \
837  f(8) \
838  f(9) \
839  f(10) \
840  f(11) \
841  f(12) \
842  f(13) \
843  f(14) \
844  f(15) \
845  f(16) \
846  f(17) \
847  f(18) \
848  f(19) \
849  f(20) \
850  f(21) \
851  f(22) \
852  f(23) \
853  f(24) \
854  f(25) \
855  f(26) \
856  f(27) \
857  f(28) \
858  f(29) \
859  f(30) \
860  f(31)
861 
862  // We generate 15 cases and bodies that process special tags that combine
863  // the raw data tag and the length into one byte.
864 #define RAW_CASE(index) \
865  case kRawData + index: { \
866  byte* raw_data_out = reinterpret_cast<byte*>(current); \
867  source_->CopyRaw(raw_data_out, index * kPointerSize); \
868  current = \
869  reinterpret_cast<Object**>(raw_data_out + index * kPointerSize); \
870  break; \
871  }
873 #undef RAW_CASE
874 
875  // Deserialize a chunk of raw data that doesn't have one of the popular
876  // lengths.
877  case kRawData: {
878  int size = source_->GetInt();
879  byte* raw_data_out = reinterpret_cast<byte*>(current);
880  source_->CopyRaw(raw_data_out, size);
881  break;
882  }
883 
886  int root_id = RootArrayConstantFromByteCode(data);
887  Object* object = isolate->heap()->roots_array_start()[root_id];
888  ASSERT(!isolate->heap()->InNewSpace(object));
889  *current++ = object;
890  break;
891  }
892 
895  int root_id = RootArrayConstantFromByteCode(data);
896  int skip = source_->GetInt();
897  current = reinterpret_cast<Object**>(
898  reinterpret_cast<intptr_t>(current) + skip);
899  Object* object = isolate->heap()->roots_array_start()[root_id];
900  ASSERT(!isolate->heap()->InNewSpace(object));
901  *current++ = object;
902  break;
903  }
904 
905  case kRepeat: {
906  int repeats = source_->GetInt();
907  Object* object = current[-1];
908  ASSERT(!isolate->heap()->InNewSpace(object));
909  for (int i = 0; i < repeats; i++) current[i] = object;
910  current += repeats;
911  break;
912  }
913 
916  STATIC_ASSERT(kMaxRepeats == 13);
917  case kConstantRepeat:
921  int repeats = RepeatsForCode(data);
922  Object* object = current[-1];
923  ASSERT(!isolate->heap()->InNewSpace(object));
924  for (int i = 0; i < repeats; i++) current[i] = object;
925  current += repeats;
926  break;
927  }
928 
929  // Deserialize a new object and write a pointer to it to the current
930  // object.
932  // Support for direct instruction pointers in functions. It's an inner
933  // pointer because it points at the entry point, not at the start of the
934  // code object.
936  CASE_BODY(kNewObject, kPlain, kInnerPointer, CODE_SPACE)
937  // Deserialize a new code object and write a pointer to its first
938  // instruction to the current code object.
939  ALL_SPACES(kNewObject, kFromCode, kInnerPointer)
940  // Find a recently deserialized object using its offset from the current
941  // allocation point and write a pointer to it to the current object.
944 #if V8_TARGET_ARCH_MIPS
945  // Deserialize a new object from pointer found in code and write
946  // a pointer to it to the current object. Required only for MIPS, and
947  // omitted on the other architectures because it is fully unrolled and
948  // would cause bloat.
949  ALL_SPACES(kNewObject, kFromCode, kStartOfObject)
950  // Find a recently deserialized code object using its offset from the
951  // current allocation point and write a pointer to it to the current
952  // object. Required only for MIPS.
953  ALL_SPACES(kBackref, kFromCode, kStartOfObject)
954  ALL_SPACES(kBackrefWithSkip, kFromCode, kStartOfObject)
955 #endif
956  // Find a recently deserialized code object using its offset from the
957  // current allocation point and write a pointer to its first instruction
958  // to the current code object or the instruction pointer in a function
959  // object.
960  ALL_SPACES(kBackref, kFromCode, kInnerPointer)
961  ALL_SPACES(kBackrefWithSkip, kFromCode, kInnerPointer)
962  ALL_SPACES(kBackref, kPlain, kInnerPointer)
963  ALL_SPACES(kBackrefWithSkip, kPlain, kInnerPointer)
964  // Find an object in the roots array and write a pointer to it to the
965  // current object.
967  CASE_BODY(kRootArray, kPlain, kStartOfObject, 0)
968  // Find an object in the partial snapshots cache and write a pointer to it
969  // to the current object.
970  CASE_STATEMENT(kPartialSnapshotCache, kPlain, kStartOfObject, 0)
971  CASE_BODY(kPartialSnapshotCache,
972  kPlain,
973  kStartOfObject,
974  0)
975  // Find an code entry in the partial snapshots cache and
976  // write a pointer to it to the current object.
977  CASE_STATEMENT(kPartialSnapshotCache, kPlain, kInnerPointer, 0)
978  CASE_BODY(kPartialSnapshotCache,
979  kPlain,
980  kInnerPointer,
981  0)
982  // Find an external reference and write a pointer to it to the current
983  // object.
984  CASE_STATEMENT(kExternalReference, kPlain, kStartOfObject, 0)
985  CASE_BODY(kExternalReference,
986  kPlain,
987  kStartOfObject,
988  0)
989  // Find an external reference and write a pointer to it in the current
990  // code object.
991  CASE_STATEMENT(kExternalReference, kFromCode, kStartOfObject, 0)
992  CASE_BODY(kExternalReference,
993  kFromCode,
994  kStartOfObject,
995  0)
996 
997 #undef CASE_STATEMENT
998 #undef CASE_BODY
999 #undef ALL_SPACES
1000 
1001  case kSkip: {
1002  int size = source_->GetInt();
1003  current = reinterpret_cast<Object**>(
1004  reinterpret_cast<intptr_t>(current) + size);
1005  break;
1006  }
1007 
1008  case kNativesStringResource: {
1009  int index = source_->Get();
1010  Vector<const char> source_vector = Natives::GetRawScriptSource(index);
1011  NativesExternalStringResource* resource =
1012  new NativesExternalStringResource(isolate->bootstrapper(),
1013  source_vector.start(),
1014  source_vector.length());
1015  *current++ = reinterpret_cast<Object*>(resource);
1016  break;
1017  }
1018 
1019  case kSynchronize: {
1020  // If we get here then that indicates that you have a mismatch between
1021  // the number of GC roots when serializing and deserializing.
1022  UNREACHABLE();
1023  }
1024 
1025  default:
1026  UNREACHABLE();
1027  }
1028  }
1029  ASSERT_EQ(limit, current);
1030 }
1031 
1032 
1033 void SnapshotByteSink::PutInt(uintptr_t integer, const char* description) {
1034  ASSERT(integer < 1 << 22);
1035  integer <<= 2;
1036  int bytes = 1;
1037  if (integer > 0xff) bytes = 2;
1038  if (integer > 0xffff) bytes = 3;
1039  integer |= bytes;
1040  Put(static_cast<int>(integer & 0xff), "IntPart1");
1041  if (bytes > 1) Put(static_cast<int>((integer >> 8) & 0xff), "IntPart2");
1042  if (bytes > 2) Put(static_cast<int>((integer >> 16) & 0xff), "IntPart3");
1043 }
1044 
1045 
1047  : sink_(sink),
1048  current_root_index_(0),
1049  external_reference_encoder_(new ExternalReferenceEncoder),
1050  root_index_wave_front_(0) {
1051  isolate_ = Isolate::Current();
1052  // The serializer is meant to be used only to generate initial heap images
1053  // from a context in which there is only one isolate.
1055  for (int i = 0; i <= LAST_SPACE; i++) {
1056  fullness_[i] = 0;
1057  }
1058 }
1059 
1060 
1063 }
1064 
1065 
1067  Isolate* isolate = Isolate::Current();
1068  // No active threads.
1069  CHECK_EQ(NULL, Isolate::Current()->thread_manager()->FirstThreadStateInUse());
1070  // No active or weak handles.
1071  CHECK(isolate->handle_scope_implementer()->blocks()->is_empty());
1072  CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles());
1073  // We don't support serializing installed extensions.
1074  CHECK(!isolate->has_installed_extensions());
1075 
1076  HEAP->IterateStrongRoots(this, VISIT_ONLY_STRONG);
1077 }
1078 
1079 
1081  this->VisitPointer(object);
1082  Pad();
1083 }
1084 
1085 
1087  Isolate* isolate = Isolate::Current();
1088 
1089  for (Object** current = start; current < end; current++) {
1090  if (start == isolate->heap()->roots_array_start()) {
1092  Max(root_index_wave_front_, static_cast<intptr_t>(current - start));
1093  }
1094  if (reinterpret_cast<Address>(current) ==
1095  isolate->heap()->store_buffer()->TopAddress()) {
1096  sink_->Put(kSkip, "Skip");
1097  sink_->PutInt(kPointerSize, "SkipOneWord");
1098  } else if ((*current)->IsSmi()) {
1099  sink_->Put(kRawData + 1, "Smi");
1100  for (int i = 0; i < kPointerSize; i++) {
1101  sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte");
1102  }
1103  } else {
1104  SerializeObject(*current, kPlain, kStartOfObject, 0);
1105  }
1106  }
1107 }
1108 
1109 
1110 // This ensures that the partial snapshot cache keeps things alive during GC and
1111 // tracks their movement. When it is called during serialization of the startup
1112 // snapshot nothing happens. When the partial (context) snapshot is created,
1113 // this array is populated with the pointers that the partial snapshot will
1114 // need. As that happens we emit serialized objects to the startup snapshot
1115 // that correspond to the elements of this cache array. On deserialization we
1116 // therefore need to visit the cache array. This fills it up with pointers to
1117 // deserialized objects.
1118 void SerializerDeserializer::Iterate(ObjectVisitor* visitor) {
1119  if (Serializer::enabled()) return;
1120  Isolate* isolate = Isolate::Current();
1121  for (int i = 0; ; i++) {
1122  if (isolate->serialize_partial_snapshot_cache_length() <= i) {
1123  // Extend the array ready to get a value from the visitor when
1124  // deserializing.
1126  }
1127  Object** cache = isolate->serialize_partial_snapshot_cache();
1128  visitor->VisitPointers(&cache[i], &cache[i + 1]);
1129  // Sentinel is the undefined object, which is a root so it will not normally
1130  // be found in the cache.
1131  if (cache[i] == isolate->heap()->undefined_value()) {
1132  break;
1133  }
1134  }
1135 }
1136 
1137 
1139  Isolate* isolate = Isolate::Current();
1140 
1141  for (int i = 0;
1142  i < isolate->serialize_partial_snapshot_cache_length();
1143  i++) {
1144  Object* entry = isolate->serialize_partial_snapshot_cache()[i];
1145  if (entry == heap_object) return i;
1146  }
1147 
1148  // We didn't find the object in the cache. So we add it to the cache and
1149  // then visit the pointer so that it becomes part of the startup snapshot
1150  // and we can refer to it from the partial snapshot.
1151  int length = isolate->serialize_partial_snapshot_cache_length();
1152  isolate->PushToPartialSnapshotCache(heap_object);
1153  startup_serializer_->VisitPointer(reinterpret_cast<Object**>(&heap_object));
1154  // We don't recurse from the startup snapshot generator into the partial
1155  // snapshot generator.
1156  ASSERT(length == isolate->serialize_partial_snapshot_cache_length() - 1);
1157  return length;
1158 }
1159 
1160 
1161 int Serializer::RootIndex(HeapObject* heap_object, HowToCode from) {
1162  Heap* heap = HEAP;
1163  if (heap->InNewSpace(heap_object)) return kInvalidRootIndex;
1164  for (int i = 0; i < root_index_wave_front_; i++) {
1165  Object* root = heap->roots_array_start()[i];
1166  if (!root->IsSmi() && root == heap_object) {
1167 #if V8_TARGET_ARCH_MIPS
1168  if (from == kFromCode) {
1169  // In order to avoid code bloat in the deserializer we don't have
1170  // support for the encoding that specifies a particular root should
1171  // be written into the lui/ori instructions on MIPS. Therefore we
1172  // should not generate such serialization data for MIPS.
1173  return kInvalidRootIndex;
1174  }
1175 #endif
1176  return i;
1177  }
1178  }
1179  return kInvalidRootIndex;
1180 }
1181 
1182 
1183 // Encode the location of an already deserialized object in order to write its
1184 // location into a later object. We can encode the location as an offset from
1185 // the start of the deserialized objects or as an offset backwards from the
1186 // current allocation pointer.
1188  int space,
1189  int address,
1190  HowToCode how_to_code,
1191  WhereToPoint where_to_point,
1192  int skip) {
1193  int offset = CurrentAllocationAddress(space) - address;
1194  // Shift out the bits that are always 0.
1195  offset >>= kObjectAlignmentBits;
1196  if (skip == 0) {
1197  sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRefSer");
1198  } else {
1199  sink_->Put(kBackrefWithSkip + how_to_code + where_to_point + space,
1200  "BackRefSerWithSkip");
1201  sink_->PutInt(skip, "BackRefSkipDistance");
1202  }
1203  sink_->PutInt(offset, "offset");
1204 }
1205 
1206 
1208  Object* o,
1209  HowToCode how_to_code,
1210  WhereToPoint where_to_point,
1211  int skip) {
1212  CHECK(o->IsHeapObject());
1213  HeapObject* heap_object = HeapObject::cast(o);
1214 
1215  int root_index;
1216  if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) {
1217  PutRoot(root_index, heap_object, how_to_code, where_to_point, skip);
1218  return;
1219  }
1220 
1221  if (address_mapper_.IsMapped(heap_object)) {
1222  int space = SpaceOfObject(heap_object);
1223  int address = address_mapper_.MappedTo(heap_object);
1225  address,
1226  how_to_code,
1227  where_to_point,
1228  skip);
1229  } else {
1230  if (skip != 0) {
1231  sink_->Put(kSkip, "FlushPendingSkip");
1232  sink_->PutInt(skip, "SkipDistance");
1233  }
1234 
1235  // Object has not yet been serialized. Serialize it here.
1236  ObjectSerializer object_serializer(this,
1237  heap_object,
1238  sink_,
1239  how_to_code,
1240  where_to_point);
1241  object_serializer.Serialize();
1242  }
1243 }
1244 
1245 
1247  // This phase comes right after the partial serialization (of the snapshot).
1248  // After we have done the partial serialization the partial snapshot cache
1249  // will contain some references needed to decode the partial snapshot. We
1250  // add one entry with 'undefined' which is the sentinel that the deserializer
1251  // uses to know it is done deserializing the array.
1252  Isolate* isolate = Isolate::Current();
1253  Object* undefined = isolate->heap()->undefined_value();
1254  VisitPointer(&undefined);
1255  HEAP->IterateWeakRoots(this, VISIT_ALL);
1256  Pad();
1257 }
1258 
1259 
1260 void Serializer::PutRoot(int root_index,
1261  HeapObject* object,
1263  SerializerDeserializer::WhereToPoint where_to_point,
1264  int skip) {
1265  if (how_to_code == kPlain &&
1266  where_to_point == kStartOfObject &&
1267  root_index < kRootArrayNumberOfConstantEncodings &&
1268  !HEAP->InNewSpace(object)) {
1269  if (skip == 0) {
1270  sink_->Put(kRootArrayConstants + kNoSkipDistance + root_index,
1271  "RootConstant");
1272  } else {
1273  sink_->Put(kRootArrayConstants + kHasSkipDistance + root_index,
1274  "RootConstant");
1275  sink_->PutInt(skip, "SkipInPutRoot");
1276  }
1277  } else {
1278  if (skip != 0) {
1279  sink_->Put(kSkip, "SkipFromPutRoot");
1280  sink_->PutInt(skip, "SkipFromPutRootDistance");
1281  }
1282  sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization");
1283  sink_->PutInt(root_index, "root_index");
1284  }
1285 }
1286 
1287 
1289  Object* o,
1290  HowToCode how_to_code,
1291  WhereToPoint where_to_point,
1292  int skip) {
1293  CHECK(o->IsHeapObject());
1294  HeapObject* heap_object = HeapObject::cast(o);
1295 
1296  if (heap_object->IsMap()) {
1297  // The code-caches link to context-specific code objects, which
1298  // the startup and context serializes cannot currently handle.
1299  ASSERT(Map::cast(heap_object)->code_cache() ==
1300  heap_object->GetHeap()->raw_unchecked_empty_fixed_array());
1301  }
1302 
1303  int root_index;
1304  if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) {
1305  PutRoot(root_index, heap_object, how_to_code, where_to_point, skip);
1306  return;
1307  }
1308 
1309  if (ShouldBeInThePartialSnapshotCache(heap_object)) {
1310  if (skip != 0) {
1311  sink_->Put(kSkip, "SkipFromSerializeObject");
1312  sink_->PutInt(skip, "SkipDistanceFromSerializeObject");
1313  }
1314 
1315  int cache_index = PartialSnapshotCacheIndex(heap_object);
1316  sink_->Put(kPartialSnapshotCache + how_to_code + where_to_point,
1317  "PartialSnapshotCache");
1318  sink_->PutInt(cache_index, "partial_snapshot_cache_index");
1319  return;
1320  }
1321 
1322  // Pointers from the partial snapshot to the objects in the startup snapshot
1323  // should go through the root array or through the partial snapshot cache.
1324  // If this is not the case you may have to add something to the root array.
1325  ASSERT(!startup_serializer_->address_mapper()->IsMapped(heap_object));
1326  // All the symbols that the partial snapshot needs should be either in the
1327  // root table or in the partial snapshot cache.
1328  ASSERT(!heap_object->IsSymbol());
1329 
1330  if (address_mapper_.IsMapped(heap_object)) {
1331  int space = SpaceOfObject(heap_object);
1332  int address = address_mapper_.MappedTo(heap_object);
1334  address,
1335  how_to_code,
1336  where_to_point,
1337  skip);
1338  } else {
1339  if (skip != 0) {
1340  sink_->Put(kSkip, "SkipFromSerializeObject");
1341  sink_->PutInt(skip, "SkipDistanceFromSerializeObject");
1342  }
1343  // Object has not yet been serialized. Serialize it here.
1344  ObjectSerializer serializer(this,
1345  heap_object,
1346  sink_,
1347  how_to_code,
1348  where_to_point);
1349  serializer.Serialize();
1350  }
1351 }
1352 
1353 
1355  int space = Serializer::SpaceOfObject(object_);
1356  int size = object_->Size();
1357 
1358  sink_->Put(kNewObject + reference_representation_ + space,
1359  "ObjectSerialization");
1360  sink_->PutInt(size >> kObjectAlignmentBits, "Size in words");
1361 
1362  LOG(i::Isolate::Current(),
1363  SnapshotPositionEvent(object_->address(), sink_->Position()));
1364 
1365  // Mark this object as already serialized.
1366  int offset = serializer_->Allocate(space, size);
1367  serializer_->address_mapper()->AddMapping(object_, offset);
1368 
1369  // Serialize the map (first word of the object).
1370  serializer_->SerializeObject(object_->map(), kPlain, kStartOfObject, 0);
1371 
1372  // Serialize the rest of the object.
1373  CHECK_EQ(0, bytes_processed_so_far_);
1374  bytes_processed_so_far_ = kPointerSize;
1375  object_->IterateBody(object_->map()->instance_type(), size, this);
1376  OutputRawData(object_->address() + size);
1377 }
1378 
1379 
1381  Object** end) {
1382  Object** current = start;
1383  while (current < end) {
1384  while (current < end && (*current)->IsSmi()) current++;
1385  if (current < end) OutputRawData(reinterpret_cast<Address>(current));
1386 
1387  while (current < end && !(*current)->IsSmi()) {
1388  HeapObject* current_contents = HeapObject::cast(*current);
1389  int root_index = serializer_->RootIndex(current_contents, kPlain);
1390  // Repeats are not subject to the write barrier so there are only some
1391  // objects that can be used in a repeat encoding. These are the early
1392  // ones in the root array that are never in new space.
1393  if (current != start &&
1394  root_index != kInvalidRootIndex &&
1395  root_index < kRootArrayNumberOfConstantEncodings &&
1396  current_contents == current[-1]) {
1397  ASSERT(!HEAP->InNewSpace(current_contents));
1398  int repeat_count = 1;
1399  while (current < end - 1 && current[repeat_count] == current_contents) {
1400  repeat_count++;
1401  }
1402  current += repeat_count;
1403  bytes_processed_so_far_ += repeat_count * kPointerSize;
1404  if (repeat_count > kMaxRepeats) {
1405  sink_->Put(kRepeat, "SerializeRepeats");
1406  sink_->PutInt(repeat_count, "SerializeRepeats");
1407  } else {
1408  sink_->Put(CodeForRepeats(repeat_count), "SerializeRepeats");
1409  }
1410  } else {
1411  serializer_->SerializeObject(
1412  current_contents, kPlain, kStartOfObject, 0);
1413  bytes_processed_so_far_ += kPointerSize;
1414  current++;
1415  }
1416  }
1417  }
1418 }
1419 
1420 
1422  Object** current = rinfo->target_object_address();
1423 
1424  int skip = OutputRawData(rinfo->target_address_address(),
1425  kCanReturnSkipInsteadOfSkipping);
1426  HowToCode representation = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
1427  serializer_->SerializeObject(*current, representation, kStartOfObject, skip);
1428  bytes_processed_so_far_ += rinfo->target_address_size();
1429 }
1430 
1431 
1433  Address* end) {
1434  Address references_start = reinterpret_cast<Address>(start);
1435  int skip = OutputRawData(references_start, kCanReturnSkipInsteadOfSkipping);
1436 
1437  for (Address* current = start; current < end; current++) {
1438  sink_->Put(kExternalReference + kPlain + kStartOfObject, "ExternalRef");
1439  sink_->PutInt(skip, "SkipB4ExternalRef");
1440  skip = 0;
1441  int reference_id = serializer_->EncodeExternalReference(*current);
1442  sink_->PutInt(reference_id, "reference id");
1443  }
1444  bytes_processed_so_far_ += static_cast<int>((end - start) * kPointerSize);
1445 }
1446 
1447 
1449  Address references_start = rinfo->target_address_address();
1450  int skip = OutputRawData(references_start, kCanReturnSkipInsteadOfSkipping);
1451 
1452  Address* current = rinfo->target_reference_address();
1453  int representation = rinfo->IsCodedSpecially() ?
1454  kFromCode + kStartOfObject : kPlain + kStartOfObject;
1455  sink_->Put(kExternalReference + representation, "ExternalRef");
1456  sink_->PutInt(skip, "SkipB4ExternalRef");
1457  int reference_id = serializer_->EncodeExternalReference(*current);
1458  sink_->PutInt(reference_id, "reference id");
1459  bytes_processed_so_far_ += rinfo->target_address_size();
1460 }
1461 
1462 
1464  Address target_start = rinfo->target_address_address();
1465  int skip = OutputRawData(target_start, kCanReturnSkipInsteadOfSkipping);
1466  Address target = rinfo->target_address();
1467  uint32_t encoding = serializer_->EncodeExternalReference(target);
1468  CHECK(target == NULL ? encoding == 0 : encoding != 0);
1469  int representation;
1470  // Can't use a ternary operator because of gcc.
1471  if (rinfo->IsCodedSpecially()) {
1472  representation = kStartOfObject + kFromCode;
1473  } else {
1474  representation = kStartOfObject + kPlain;
1475  }
1476  sink_->Put(kExternalReference + representation, "ExternalReference");
1477  sink_->PutInt(skip, "SkipB4ExternalRef");
1478  sink_->PutInt(encoding, "reference id");
1479  bytes_processed_so_far_ += rinfo->target_address_size();
1480 }
1481 
1482 
1484  CHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
1485  Address target_start = rinfo->target_address_address();
1486  int skip = OutputRawData(target_start, kCanReturnSkipInsteadOfSkipping);
1487  Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
1488  serializer_->SerializeObject(target, kFromCode, kInnerPointer, skip);
1489  bytes_processed_so_far_ += rinfo->target_address_size();
1490 }
1491 
1492 
1494  Code* target = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
1495  int skip = OutputRawData(entry_address, kCanReturnSkipInsteadOfSkipping);
1496  serializer_->SerializeObject(target, kPlain, kInnerPointer, skip);
1497  bytes_processed_so_far_ += kPointerSize;
1498 }
1499 
1500 
1502  ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
1503  JSGlobalPropertyCell* cell =
1504  JSGlobalPropertyCell::cast(rinfo->target_cell());
1505  int skip = OutputRawData(rinfo->pc(), kCanReturnSkipInsteadOfSkipping);
1506  serializer_->SerializeObject(cell, kPlain, kInnerPointer, skip);
1507 }
1508 
1509 
1511  v8::String::ExternalAsciiStringResource** resource_pointer) {
1512  Address references_start = reinterpret_cast<Address>(resource_pointer);
1513  OutputRawData(references_start);
1514  for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
1515  Object* source = HEAP->natives_source_cache()->get(i);
1516  if (!source->IsUndefined()) {
1519  const Resource* resource = string->resource();
1520  if (resource == *resource_pointer) {
1521  sink_->Put(kNativesStringResource, "NativesStringResource");
1522  sink_->PutSection(i, "NativesStringResourceEnd");
1523  bytes_processed_so_far_ += sizeof(resource);
1524  return;
1525  }
1526  }
1527  }
1528  // One of the strings in the natives cache should match the resource. We
1529  // can't serialize any other kinds of external strings.
1530  UNREACHABLE();
1531 }
1532 
1533 
1534 int Serializer::ObjectSerializer::OutputRawData(
1535  Address up_to, Serializer::ObjectSerializer::ReturnSkip return_skip) {
1536  Address object_start = object_->address();
1537  Address base = object_start + bytes_processed_so_far_;
1538  int up_to_offset = static_cast<int>(up_to - object_start);
1539  int to_skip = up_to_offset - bytes_processed_so_far_;
1540  int bytes_to_output = to_skip;
1541  bytes_processed_so_far_ += to_skip;
1542  // This assert will fail if the reloc info gives us the target_address_address
1543  // locations in a non-ascending order. Luckily that doesn't happen.
1544  ASSERT(to_skip >= 0);
1545  bool outputting_code = false;
1546  if (to_skip != 0 && code_object_ && !code_has_been_output_) {
1547  // Output the code all at once and fix later.
1548  bytes_to_output = object_->Size() + to_skip - bytes_processed_so_far_;
1549  outputting_code = true;
1550  code_has_been_output_ = true;
1551  }
1552  if (bytes_to_output != 0 &&
1553  (!code_object_ || outputting_code)) {
1554 #define RAW_CASE(index) \
1555  if (!outputting_code && bytes_to_output == index * kPointerSize && \
1556  index * kPointerSize == to_skip) { \
1557  sink_->PutSection(kRawData + index, "RawDataFixed"); \
1558  to_skip = 0; /* This insn already skips. */ \
1559  } else /* NOLINT */
1561 #undef RAW_CASE
1562  { /* NOLINT */
1563  // We always end up here if we are outputting the code of a code object.
1564  sink_->Put(kRawData, "RawData");
1565  sink_->PutInt(bytes_to_output, "length");
1566  }
1567  for (int i = 0; i < bytes_to_output; i++) {
1568  unsigned int data = base[i];
1569  sink_->PutSection(data, "Byte");
1570  }
1571  }
1572  if (to_skip != 0 && return_skip == kIgnoringReturn) {
1573  sink_->Put(kSkip, "Skip");
1574  sink_->PutInt(to_skip, "SkipDistance");
1575  to_skip = 0;
1576  }
1577  return to_skip;
1578 }
1579 
1580 
1582  for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
1583  AllocationSpace s = static_cast<AllocationSpace>(i);
1584  if (HEAP->InSpace(object, s)) {
1585  ASSERT(i < kNumberOfSpaces);
1586  return i;
1587  }
1588  }
1589  UNREACHABLE();
1590  return 0;
1591 }
1592 
1593 
1594 int Serializer::Allocate(int space, int size) {
1595  CHECK(space >= 0 && space < kNumberOfSpaces);
1596  int allocation_address = fullness_[space];
1597  fullness_[space] = allocation_address + size;
1598  return allocation_address;
1599 }
1600 
1601 
1603  if (space == CODE_SPACE) {
1605  } else {
1607  }
1608 }
1609 
1610 
1612  // The non-branching GetInt will read up to 3 bytes too far, so we need
1613  // to pad the snapshot to make sure we don't read over the end.
1614  for (unsigned i = 0; i < sizeof(int32_t) - 1; i++) {
1615  sink_->Put(kNop, "Padding");
1616  }
1617 }
1618 
1619 
1621  if (0u + length_ - position_ > 2 * sizeof(uint32_t)) return false;
1622  for (int x = position_; x < length_; x++) {
1623  if (data_[x] != SerializerDeserializer::nop()) return false;
1624  }
1625  return true;
1626 }
1627 
1628 } } // namespace v8::internal
byte * Address
Definition: globals.h:157
Object ** roots_array_start()
Definition: heap.h:1257
virtual void SerializeObject(Object *o, HowToCode how_to_code, WhereToPoint where_to_point, int skip)=0
static const int kInvalidRootIndex
Definition: serialize.h:490
void VisitCodeTarget(RelocInfo *target)
Definition: serialize.cc:1483
int CurrentAllocationAddress(int space)
Definition: serialize.h:465
SerializationAddressMapper address_mapper_
Definition: serialize.h:579
#define CHECK_EQ(expected, value)
Definition: checks.h:219
static int CodePageAreaSize()
Definition: spaces.h:1049
void CopyRaw(byte *to, int number_of_bytes)
Definition: serialize.h:311
void AddMapping(HeapObject *obj, int to)
Definition: serialize.h:427
bool InNewSpace(Object *object)
Definition: heap-inl.h:288
virtual void Serialize(Object **o)
Definition: serialize.cc:1080
HandleScopeImplementer * handle_scope_implementer()
Definition: isolate.h:864
static bool too_late_to_enable_now_
Definition: serialize.h:578
#define SIXTEEN_CASES(byte_code)
void IterateWeakRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:5746
const int kTypeCodeCount
Definition: serialize.h:53
static Smi * FromInt(int value)
Definition: objects-inl.h:981
#define LOG(isolate, Call)
Definition: log.h:81
static int RootArrayConstantFromByteCode(int byte_code)
Definition: serialize.h:285
static Object * GetObjectFromEntryAddress(Address location_of_address)
Definition: objects-inl.h:3570
#define ACCESSOR_DESCRIPTOR_DECLARATION(name)
static Vector< const char > GetRawScriptSource(int index)
static HeapObject * cast(Object *obj)
T Max(T a, T b)
Definition: utils.h:222
virtual void SerializeStrongReferences()
Definition: serialize.cc:1066
static const int kOldSpaceRoots
Definition: heap.h:1601
static Map * cast(Object *obj)
Serializer(SnapshotByteSink *sink)
Definition: serialize.cc:1046
int int32_t
Definition: unicode.cc:47
int SpaceAreaSize(int space)
Definition: serialize.cc:1602
static bool enabled()
Definition: serialize.h:481
void VisitRuntimeEntry(RelocInfo *reloc)
Definition: serialize.cc:1463
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:5758
#define ASSERT(condition)
Definition: checks.h:270
#define CASE_BODY(where, how, within, space_number_if_any)
const int kPointerSizeLog2
Definition: globals.h:232
unsigned short uint16_t
Definition: unicode.cc:46
void VisitPointers(Object **start, Object **end)
Definition: serialize.cc:1086
ThreadManager * thread_manager()
Definition: isolate.h:882
void DeserializePartial(Object **root)
Definition: serialize.cc:639
static int CodeForRepeats(int repeats)
Definition: serialize.h:274
#define CHECK(condition)
Definition: checks.h:56
static void Iterate(ObjectVisitor *visitor)
Definition: serialize.cc:1118
void PutInt(uintptr_t integer, const char *description)
Definition: serialize.cc:1033
static ExternalAsciiString * cast(Object *obj)
intptr_t root_index_wave_front_
Definition: serialize.h:580
static const int kPageSize
Definition: spaces.h:711
static Code * cast(Object *obj)
SnapshotByteSink * sink_
Definition: serialize.h:573
bool IsDefaultIsolate() const
Definition: isolate.h:469
void PutRoot(int index, HeapObject *object, HowToCode how, WhereToPoint where, int skip)
Definition: serialize.cc:1260
#define RUNTIME_FUNCTION_LIST(F)
Definition: runtime.h:493
StoreBuffer * store_buffer()
Definition: heap.h:1545
void VisitCodeEntry(Address entry_address)
Definition: serialize.cc:1493
#define ALL_SPACES(where, how, within)
void VisitPointers(Object **start, Object **end)
Definition: serialize.cc:1380
#define STATS_COUNTER_LIST_2(SC)
Definition: v8-counters.h:147
void IterateBody(InstanceType type, int object_size, ObjectVisitor *v)
Definition: objects.cc:1328
uint8_t byte
Definition: globals.h:156
void RepairFreeListsAfterBoot()
Definition: heap.cc:439
#define UNREACHABLE()
Definition: checks.h:50
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
int fullness_[LAST_SPACE+1]
Definition: serialize.h:572
#define RAW_CASE(index)
Deserializer(SnapshotByteSource *source)
Definition: serialize.cc:598
virtual int PartialSnapshotCacheIndex(HeapObject *o)
Definition: serialize.cc:1138
static const int kRootArrayConstants
Definition: serialize.h:282
const int kPointerSize
Definition: globals.h:220
MemoryAllocator * memory_allocator()
Definition: isolate.h:845
static int SpaceOfObject(HeapObject *object)
Definition: serialize.cc:1581
#define FOUR_CASES(byte_code)
T * NewArray(size_t size)
Definition: allocation.h:83
SerializationAddressMapper * address_mapper()
Definition: serialize.h:482
#define COUNTER_ENTRY(name, caption)
#define DEF_ENTRY_A(name, kind, state, extra)
virtual void SerializeObject(Object *o, HowToCode how_to_code, WhereToPoint where_to_point, int skip)
Definition: serialize.cc:1288
GlobalHandles * global_handles()
Definition: isolate.h:880
ExternalReferenceEncoder * external_reference_encoder_
Definition: serialize.h:575
Entry * Lookup(void *key, uint32_t hash, bool insert, AllocationPolicy allocator=AllocationPolicy())
Definition: hashmap.h:131
void ReserveSpace(int *sizes, Address *addresses)
Definition: heap.cc:696
const int kDebugIdShift
Definition: serialize.h:60
int Allocate(int space, int size)
Definition: serialize.cc:1594
static Code * GetCodeFromTargetAddress(Address address)
Definition: objects-inl.h:3559
OldSpace * code_space()
Definition: heap.h:508
static const int kRootArrayNumberOfConstantEncodings
Definition: serialize.h:284
uint32_t Encode(Address key) const
Definition: serialize.cc:540
static bool serialization_enabled_
Definition: serialize.h:576
static int RepeatsForCode(int byte_code)
Definition: serialize.h:278
#define COMMON_RAW_LENGTHS(f)
void VisitExternalReferences(Address *start, Address *end)
Definition: serialize.cc:1432
void SerializeReferenceToPreviousObject(int space, int address, HowToCode how_to_code, WhereToPoint where_to_point, int skip)
Definition: serialize.cc:1187
bool has_installed_extensions()
Definition: isolate.h:930
List< internal::Object ** > * blocks()
Definition: api.h:451
static const int kObjectStartOffset
Definition: spaces.h:516
void PushToPartialSnapshotCache(Object *obj)
Definition: isolate.cc:1640
#define ACCESSOR_DESCRIPTOR_LIST(V)
Definition: accessors.h:39
#define BUILTIN_LIST_DEBUG_A(V)
Definition: builtins.h:229
#define IC_UTIL_LIST(ICU)
Definition: ic.h:40
#define FOR_EACH_ISOLATE_ADDRESS_NAME(C)
Definition: isolate.h:138
static JSGlobalPropertyCell * cast(Object *obj)
const int kObjectAlignmentBits
Definition: v8globals.h:43
#define CASE_STATEMENT(where, how, within, space_number)
#define HEAP
Definition: isolate.h:1433
void VisitExternalAsciiString(v8::String::ExternalAsciiStringResource **resource)
Definition: serialize.cc:1510
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
virtual void SerializeObject(Object *o, HowToCode how_to_code, WhereToPoint where_to_point, int skip)
Definition: serialize.cc:1207
InstanceType instance_type()
Definition: objects-inl.h:3009
static HeapObject * FromAddress(Address address)
Definition: objects-inl.h:1171
const int kFirstTypeCode
Definition: serialize.h:54
#define ASSERT_NE(v1, v2)
Definition: checks.h:272
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
#define BUILD_NAME_LITERAL(CamelName, hacker_name)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
static const int kNativesStringResource
Definition: serialize.h:269
#define IC_ENTRY(name)
int RootIndex(HeapObject *heap_object, HowToCode from)
Definition: serialize.cc:1161
void DeleteArray(T *array)
Definition: allocation.h:91
#define DEF_ENTRY_C(name, ignored)
virtual void Put(int byte, const char *description)=0
#define LOG_CODE_EVENT(isolate, Call)
Definition: log.h:89
void VisitGlobalPropertyCell(RelocInfo *rinfo)
Definition: serialize.cc:1501
static ExternalReferenceTable * instance(Isolate *isolate)
Definition: serialize.cc:66
virtual bool ShouldBeInThePartialSnapshotCache(HeapObject *o)
Definition: serialize.h:609
#define BUILTIN_LIST_C(V)
Definition: builtins.h:42
ThreadState * FirstThreadStateInUse()
Definition: v8threads.cc:287
#define ARRAY_SIZE(a)
Definition: globals.h:281
#define STATS_COUNTER_LIST_1(SC)
Definition: v8-counters.h:89
#define BUILTIN_LIST_A(V)
Definition: builtins.h:66
virtual void PutSection(int byte, const char *description)
Definition: serialize.h:396
void VisitExternalReference(RelocInfo *rinfo)
Definition: serialize.cc:1448
void VisitEmbeddedPointer(RelocInfo *target)
Definition: serialize.cc:1421
void set_native_contexts_list(Object *object)
Definition: heap.h:1186
const char * NameOfAddress(Address key) const
Definition: serialize.cc:548