55 return static_cast<uint32_t
>(type) << 16 |
id;
59 static int* GetInternalPointer(StatsCounter* counter) {
62 static int dummy_counter = 0;
63 return counter->Enabled() ? counter->GetInternalPointer() : &dummy_counter;
69 isolate->external_reference_table();
70 if (external_reference_table ==
NULL) {
72 isolate->set_external_reference_table(external_reference_table);
74 return external_reference_table;
78 void ExternalReferenceTable::AddFromId(
TypeCode type,
85 ExternalReference ref(static_cast<Builtins::CFunctionId>(
id), isolate);
86 address = ref.address();
90 ExternalReference ref(static_cast<Builtins::Name>(
id), isolate);
91 address = ref.address();
95 ExternalReference ref(static_cast<Runtime::FunctionId>(
id), isolate);
96 address = ref.address();
100 ExternalReference ref(IC_Utility(static_cast<IC::UtilityId>(
id)),
102 address = ref.address();
109 Add(address, type,
id, name);
113 void ExternalReferenceTable::Add(
Address address,
118 ExternalReferenceEntry entry;
120 entry.code = EncodeExternal(type,
id);
124 if (
id > max_id_[type]) max_id_[type] = id;
128 void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
129 for (
int type_code = 0; type_code <
kTypeCodeCount; type_code++) {
130 max_id_[type_code] = 0;
141 struct RefTableEntry {
147 static const RefTableEntry ref_table[] = {
149 #define DEF_ENTRY_C(name, ignored) \
151 Builtins::c_##name, \
152 "Builtins::" #name },
157 #define DEF_ENTRY_C(name, ignored) \
160 "Builtins::" #name },
161 #define DEF_ENTRY_A(name, kind, state, extra) DEF_ENTRY_C(name, ignored)
170 #define RUNTIME_ENTRY(name, nargs, ressize) \
171 { RUNTIME_FUNCTION, \
178 #define RUNTIME_HIDDEN_ENTRY(name, nargs, ressize) \
179 { RUNTIME_FUNCTION, \
180 Runtime::kHidden##name, \
181 "Runtime::Hidden" #name },
184 #undef RUNTIME_HIDDEN_ENTRY
186 #define INLINE_OPTIMIZED_ENTRY(name, nargs, ressize) \
187 { RUNTIME_FUNCTION, \
188 Runtime::kInlineOptimized##name, \
192 #undef INLINE_OPTIMIZED_ENTRY
195 #define IC_ENTRY(name) \
204 for (
size_t i = 0; i <
ARRAY_SIZE(ref_table); ++i) {
205 AddFromId(ref_table[i].type,
211 #ifdef ENABLE_DEBUGGER_SUPPORT
213 Add(Debug_Address(Debug::k_after_break_target_address).
address(isolate),
216 "Debug::after_break_target_address()");
217 Add(Debug_Address(Debug::k_debug_break_slot_address).
address(isolate),
220 "Debug::debug_break_slot_address()");
221 Add(Debug_Address(Debug::k_debug_break_return_address).
address(isolate),
224 "Debug::debug_break_return_address()");
225 Add(Debug_Address(Debug::k_restarter_frame_function_pointer).
address(isolate),
228 "Debug::restarter_frame_function_pointer_address()");
232 struct StatsRefTableEntry {
233 StatsCounter* (Counters::*counter)();
238 const StatsRefTableEntry stats_ref_table[] = {
239 #define COUNTER_ENTRY(name, caption) \
241 Counters::k_##name, \
242 "Counters::" #name },
249 Counters* counters = isolate->counters();
250 for (
size_t i = 0; i <
ARRAY_SIZE(stats_ref_table); ++i) {
251 Add(reinterpret_cast<Address>(GetInternalPointer(
252 (counters->*(stats_ref_table[i].counter))())),
254 stats_ref_table[i].id,
255 stats_ref_table[i].name);
260 const char* AddressNames[] = {
261 #define BUILD_NAME_LITERAL(CamelName, hacker_name) \
262 "Isolate::" #hacker_name "_address",
265 #undef BUILD_NAME_LITERAL
274 #define ACCESSOR_DESCRIPTOR_DECLARATION(name) \
275 Add((Address)&Accessors::name, \
277 Accessors::k##name, \
278 "Accessors::" #name);
281 #undef ACCESSOR_DESCRIPTOR_DECLARATION
283 StubCache* stub_cache = isolate->stub_cache();
289 "StubCache::primary_->key");
293 "StubCache::primary_->value");
297 "StubCache::primary_->map");
301 "StubCache::secondary_->key");
305 "StubCache::secondary_->value");
309 "StubCache::secondary_->map");
312 Add(ExternalReference::perform_gc_function(isolate).
address(),
315 "Runtime::PerformGC");
317 Add(ExternalReference::out_of_memory_function(isolate).
address(),
320 "Runtime::OutOfMemory");
321 Add(ExternalReference::delete_handle_scope_extensions(isolate).
address(),
324 "HandleScope::DeleteExtensions");
325 Add(ExternalReference::
326 incremental_marking_record_write_function(isolate).
address(),
329 "IncrementalMarking::RecordWrite");
330 Add(ExternalReference::store_buffer_overflow_function(isolate).
address(),
333 "StoreBuffer::StoreBufferOverflow");
336 Add(ExternalReference::roots_array_start(isolate).
address(),
339 "Heap::roots_array_start()");
340 Add(ExternalReference::address_of_stack_limit(isolate).
address(),
343 "StackGuard::address_of_jslimit()");
344 Add(ExternalReference::address_of_real_stack_limit(isolate).
address(),
347 "StackGuard::address_of_real_jslimit()");
348 #ifndef V8_INTERPRETED_REGEXP
349 Add(ExternalReference::address_of_regexp_stack_limit(isolate).
address(),
352 "RegExpStack::limit_address()");
353 Add(ExternalReference::address_of_regexp_stack_memory_address(
357 "RegExpStack::memory_address()");
358 Add(ExternalReference::address_of_regexp_stack_memory_size(isolate).
address(),
361 "RegExpStack::memory_size()");
362 Add(ExternalReference::address_of_static_offsets_vector(isolate).
address(),
365 "OffsetsVector::static_offsets_vector");
366 #endif // V8_INTERPRETED_REGEXP
367 Add(ExternalReference::new_space_start(isolate).
address(),
370 "Heap::NewSpaceStart()");
371 Add(ExternalReference::new_space_mask(isolate).
address(),
374 "Heap::NewSpaceMask()");
375 Add(ExternalReference::heap_always_allocate_scope_depth(isolate).
address(),
378 "Heap::always_allocate_scope_depth()");
379 Add(ExternalReference::new_space_allocation_limit_address(isolate).
address(),
382 "Heap::NewSpaceAllocationLimitAddress()");
383 Add(ExternalReference::new_space_allocation_top_address(isolate).
address(),
386 "Heap::NewSpaceAllocationTopAddress()");
387 #ifdef ENABLE_DEBUGGER_SUPPORT
388 Add(ExternalReference::debug_break(isolate).
address(),
392 Add(ExternalReference::debug_step_in_fp_address(isolate).
address(),
395 "Debug::step_in_fp_addr()");
397 Add(ExternalReference::mod_two_doubles_operation(isolate).
address(),
401 #ifndef V8_INTERPRETED_REGEXP
402 Add(ExternalReference::re_case_insensitive_compare_uc16(isolate).
address(),
405 "NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16()");
406 Add(ExternalReference::re_check_stack_guard_state(isolate).
address(),
409 "RegExpMacroAssembler*::CheckStackGuardState()");
410 Add(ExternalReference::re_grow_stack(isolate).
address(),
413 "NativeRegExpMacroAssembler::GrowStack()");
414 Add(ExternalReference::re_word_character_map().
address(),
417 "NativeRegExpMacroAssembler::word_character_map");
418 #endif // V8_INTERPRETED_REGEXP
420 Add(ExternalReference::keyed_lookup_cache_keys(isolate).
address(),
423 "KeyedLookupCache::keys()");
424 Add(ExternalReference::keyed_lookup_cache_field_offsets(isolate).
address(),
427 "KeyedLookupCache::field_offsets()");
428 Add(ExternalReference::handle_scope_next_address(isolate).
address(),
431 "HandleScope::next");
432 Add(ExternalReference::handle_scope_limit_address(isolate).
address(),
435 "HandleScope::limit");
436 Add(ExternalReference::handle_scope_level_address(isolate).
address(),
439 "HandleScope::level");
440 Add(ExternalReference::new_deoptimizer_function(isolate).
address(),
443 "Deoptimizer::New()");
444 Add(ExternalReference::compute_output_frames_function(isolate).
address(),
447 "Deoptimizer::ComputeOutputFrames()");
448 Add(ExternalReference::address_of_min_int().
address(),
451 "LDoubleConstant::min_int");
452 Add(ExternalReference::address_of_one_half().
address(),
455 "LDoubleConstant::one_half");
456 Add(ExternalReference::isolate_address(isolate).
address(),
460 Add(ExternalReference::address_of_minus_zero().
address(),
463 "LDoubleConstant::minus_zero");
464 Add(ExternalReference::address_of_negative_infinity().
address(),
467 "LDoubleConstant::negative_infinity");
468 Add(ExternalReference::power_double_double_function(isolate).
address(),
471 "power_double_double_function");
472 Add(ExternalReference::power_double_int_function(isolate).
address(),
475 "power_double_int_function");
476 Add(ExternalReference::store_buffer_top(isolate).
address(),
480 Add(ExternalReference::address_of_canonical_non_hole_nan().
address(),
484 Add(ExternalReference::address_of_the_hole_nan().
address(),
488 Add(ExternalReference::get_date_field_function(isolate).
address(),
492 Add(ExternalReference::date_cache_stamp(isolate).
address(),
496 Add(ExternalReference::address_of_pending_message_obj(isolate).
address(),
499 "address_of_pending_message_obj");
500 Add(ExternalReference::address_of_has_pending_message(isolate).
address(),
503 "address_of_has_pending_message");
504 Add(ExternalReference::address_of_pending_message_script(isolate).
address(),
507 "pending_message_script");
508 Add(ExternalReference::get_make_code_young_function(isolate).
address(),
511 "Code::MakeCodeYoung");
512 Add(ExternalReference::cpu_features().
address(),
516 Add(ExternalReference(Runtime::kHiddenAllocateInNewSpace, isolate).
address(),
519 "Runtime::AllocateInNewSpace");
520 Add(ExternalReference(
521 Runtime::kHiddenAllocateInTargetSpace, isolate).
address(),
524 "Runtime::AllocateInTargetSpace");
525 Add(ExternalReference::old_pointer_space_allocation_top_address(
529 "Heap::OldPointerSpaceAllocationTopAddress");
530 Add(ExternalReference::old_pointer_space_allocation_limit_address(
534 "Heap::OldPointerSpaceAllocationLimitAddress");
535 Add(ExternalReference::old_data_space_allocation_top_address(
539 "Heap::OldDataSpaceAllocationTopAddress");
540 Add(ExternalReference::old_data_space_allocation_limit_address(
544 "Heap::OldDataSpaceAllocationLimitAddress");
545 Add(ExternalReference::new_space_high_promotion_mode_active_address(isolate).
549 "Heap::NewSpaceAllocationLimitAddress");
550 Add(ExternalReference::allocation_sites_list_address(isolate).
address(),
553 "Heap::allocation_sites_list_address()");
554 Add(ExternalReference::address_of_uint32_bias().
address(),
558 Add(ExternalReference::get_mark_code_as_executed_function(isolate).
address(),
561 "Code::MarkCodeAsExecuted");
565 HandleScope scope(isolate);
582 for (
int i = 0; i < external_references->
size(); ++i) {
583 Put(external_references->
address(i), i);
589 int index = IndexOf(key);
597 int index = IndexOf(key);
603 int ExternalReferenceEncoder::IndexOf(
Address key)
const {
604 if (key ==
NULL)
return -1;
605 HashMap::Entry* entry =
606 const_cast<HashMap&
>(encodings_).Lookup(key, Hash(key),
false);
609 :
static_cast<int>(
reinterpret_cast<intptr_t
>(entry->value));
613 void ExternalReferenceEncoder::Put(
Address key,
int index) {
614 HashMap::Entry* entry = encodings_.
Lookup(key, Hash(key),
true);
615 entry->value =
reinterpret_cast<void*
>(index);
625 int max = external_references->
max_id(type) + 1;
626 encodings_[type] = NewArray<Address>(max + 1);
628 for (
int i = 0; i < external_references->
size(); ++i) {
629 Put(external_references->
code(i), external_references->
address(i));
649 : isolate_(isolate) {
658 address_to_name_map_.Move(from, to);
662 address_to_name_map_.Remove(from);
666 return address_to_name_map_.Lookup(address);
672 NameMap() : impl_(&PointerEquals) {}
675 for (HashMap::Entry* p = impl_.Start(); p !=
NULL; p = impl_.Next(p)) {
680 void Insert(
Address code_address,
const char* name,
int name_size) {
681 HashMap::Entry* entry = FindOrCreateEntry(code_address);
682 if (entry->value ==
NULL) {
683 entry->value = CopyName(name, name_size);
688 HashMap::Entry* entry = FindEntry(code_address);
689 return (entry !=
NULL) ?
static_cast<const char*
>(entry->value) :
NULL;
692 void Remove(
Address code_address) {
693 HashMap::Entry* entry = FindEntry(code_address);
701 if (from == to)
return;
702 HashMap::Entry* from_entry = FindEntry(from);
704 void* value = from_entry->value;
705 RemoveEntry(from_entry);
706 HashMap::Entry* to_entry = FindOrCreateEntry(to);
708 to_entry->value = value;
712 static bool PointerEquals(
void* lhs,
void* rhs) {
716 static char* CopyName(
const char* name,
int name_size) {
717 char* result = NewArray<char>(name_size + 1);
718 for (
int i = 0; i < name_size; ++i) {
720 if (c ==
'\0') c =
' ';
723 result[name_size] =
'\0';
727 HashMap::Entry* FindOrCreateEntry(
Address code_address) {
731 HashMap::Entry* FindEntry(
Address code_address) {
732 return impl_.Lookup(code_address,
737 void RemoveEntry(HashMap::Entry* entry) {
738 impl_.Remove(entry->key, entry->hash);
746 virtual void LogRecordedBuffer(Code*
code,
750 address_to_name_map_.Insert(code->address(),
name, length);
753 NameMap address_to_name_map_;
758 CodeAddressMap* Serializer::code_address_map_ =
NULL;
775 delete code_address_map_;
776 code_address_map_ =
NULL;
783 external_reference_decoder_(
NULL) {
785 reservations_[i] = kUninitializedReservation;
790 void Deserializer::FlushICacheForNewCodeObjects() {
792 while (it.has_next()) {
815 isolate_->
heap()->undefined_value());
817 isolate_->
heap()->undefined_value());
823 isolate_->
heap()->undefined_value());
830 Object* source = isolate_->
heap()->natives_source_cache()->get(i);
831 if (!source->IsUndefined()) {
836 FlushICacheForNewCodeObjects();
847 ASSERT(reservations_[i] != kUninitializedReservation);
850 if (external_reference_decoder_ ==
NULL) {
869 if (external_reference_decoder_) {
870 delete external_reference_decoder_;
871 external_reference_decoder_ =
NULL;
878 void Deserializer::VisitPointers(
Object** start,
Object** end) {
885 void Deserializer::RelinkAllocationSite(AllocationSite* site) {
887 site->set_weak_next(isolate_->
heap()->undefined_value());
900 void Deserializer::ReadObject(
int space_number,
903 Address address = Allocate(space_number, size);
908 if (FLAG_log_snapshot_positions) {
909 LOG(isolate_, SnapshotPositionEvent(address, source_->
position()));
911 ReadChunk(current, limit, space_number, address);
916 if (obj->IsAllocationSite()) {
921 bool is_codespace = (space_number ==
CODE_SPACE);
922 ASSERT(obj->IsCode() == is_codespace);
926 void Deserializer::ReadChunk(
Object** current,
929 Address current_object_address) {
930 Isolate*
const isolate = isolate_;
934 bool write_barrier_needed = (current_object_address !=
NULL &&
940 while (current < limit) {
941 int data = source_->
Get();
943 #define CASE_STATEMENT(where, how, within, space_number) \
944 case where + how + within + space_number: \
945 ASSERT((where & ~kPointedToMask) == 0); \
946 ASSERT((how & ~kHowToCodeMask) == 0); \
947 ASSERT((within & ~kWhereToPointMask) == 0); \
948 ASSERT((space_number & ~kSpaceMask) == 0);
950 #define CASE_BODY(where, how, within, space_number_if_any) \
952 bool emit_write_barrier = false; \
953 bool current_was_incremented = false; \
954 int space_number = space_number_if_any == kAnyOldSpace ? \
955 (data & kSpaceMask) : space_number_if_any; \
956 if (where == kNewObject && how == kPlain && within == kStartOfObject) {\
957 ReadObject(space_number, current); \
958 emit_write_barrier = (space_number == NEW_SPACE); \
960 Object* new_object = NULL; \
961 if (where == kNewObject) { \
962 ReadObject(space_number, &new_object); \
963 } else if (where == kRootArray) { \
964 int root_id = source_->GetInt(); \
965 new_object = isolate->heap()->roots_array_start()[root_id]; \
966 emit_write_barrier = isolate->heap()->InNewSpace(new_object); \
967 } else if (where == kPartialSnapshotCache) { \
968 int cache_index = source_->GetInt(); \
969 new_object = isolate->serialize_partial_snapshot_cache() \
971 emit_write_barrier = isolate->heap()->InNewSpace(new_object); \
972 } else if (where == kExternalReference) { \
973 int skip = source_->GetInt(); \
974 current = reinterpret_cast<Object**>(reinterpret_cast<Address>( \
976 int reference_id = source_->GetInt(); \
977 Address address = external_reference_decoder_-> \
978 Decode(reference_id); \
979 new_object = reinterpret_cast<Object*>(address); \
980 } else if (where == kBackref) { \
981 emit_write_barrier = (space_number == NEW_SPACE); \
982 new_object = GetAddressFromEnd(data & kSpaceMask); \
984 ASSERT(where == kBackrefWithSkip); \
985 int skip = source_->GetInt(); \
986 current = reinterpret_cast<Object**>( \
987 reinterpret_cast<Address>(current) + skip); \
988 emit_write_barrier = (space_number == NEW_SPACE); \
989 new_object = GetAddressFromEnd(data & kSpaceMask); \
991 if (within == kInnerPointer) { \
992 if (space_number != CODE_SPACE || new_object->IsCode()) { \
993 Code* new_code_object = reinterpret_cast<Code*>(new_object); \
994 new_object = reinterpret_cast<Object*>( \
995 new_code_object->instruction_start()); \
997 ASSERT(space_number == CODE_SPACE); \
998 Cell* cell = Cell::cast(new_object); \
999 new_object = reinterpret_cast<Object*>( \
1000 cell->ValueAddress()); \
1003 if (how == kFromCode) { \
1004 Address location_of_branch_data = \
1005 reinterpret_cast<Address>(current); \
1006 Assembler::deserialization_set_special_target_at( \
1007 location_of_branch_data, \
1008 Code::cast(HeapObject::FromAddress(current_object_address)), \
1009 reinterpret_cast<Address>(new_object)); \
1010 location_of_branch_data += Assembler::kSpecialTargetSize; \
1011 current = reinterpret_cast<Object**>(location_of_branch_data); \
1012 current_was_incremented = true; \
1014 *current = new_object; \
1017 if (emit_write_barrier && write_barrier_needed) { \
1018 Address current_address = reinterpret_cast<Address>(current); \
1019 isolate->heap()->RecordWrite( \
1020 current_object_address, \
1021 static_cast<int>(current_address - current_object_address)); \
1023 if (!current_was_incremented) { \
1032 #define ALL_SPACES(where, how, within) \
1033 CASE_STATEMENT(where, how, within, NEW_SPACE) \
1034 CASE_BODY(where, how, within, NEW_SPACE) \
1035 CASE_STATEMENT(where, how, within, OLD_DATA_SPACE) \
1036 CASE_STATEMENT(where, how, within, OLD_POINTER_SPACE) \
1037 CASE_STATEMENT(where, how, within, CODE_SPACE) \
1038 CASE_STATEMENT(where, how, within, CELL_SPACE) \
1039 CASE_STATEMENT(where, how, within, PROPERTY_CELL_SPACE) \
1040 CASE_STATEMENT(where, how, within, MAP_SPACE) \
1041 CASE_BODY(where, how, within, kAnyOldSpace)
1043 #define FOUR_CASES(byte_code) \
1045 case byte_code + 1: \
1046 case byte_code + 2: \
1049 #define SIXTEEN_CASES(byte_code) \
1050 FOUR_CASES(byte_code) \
1051 FOUR_CASES(byte_code + 4) \
1052 FOUR_CASES(byte_code + 8) \
1053 FOUR_CASES(byte_code + 12)
1055 #define COMMON_RAW_LENGTHS(f) \
1090 #define RAW_CASE(index) \
1091 case kRawData + index: { \
1092 byte* raw_data_out = reinterpret_cast<byte*>(current); \
1093 source_->CopyRaw(raw_data_out, index * kPointerSize); \
1095 reinterpret_cast<Object**>(raw_data_out + index * kPointerSize); \
1104 int size = source_->
GetInt();
1105 byte* raw_data_out =
reinterpret_cast<byte*
>(current);
1106 source_->
CopyRaw(raw_data_out, size);
1113 Object*
object = isolate->heap()->roots_array_start()[root_id];
1114 ASSERT(!isolate->heap()->InNewSpace(
object));
1115 *current++ = object;
1122 int skip = source_->
GetInt();
1123 current =
reinterpret_cast<Object**
>(
1124 reinterpret_cast<intptr_t
>(current) + skip);
1125 Object*
object = isolate->heap()->roots_array_start()[root_id];
1126 ASSERT(!isolate->heap()->InNewSpace(
object));
1127 *current++ = object;
1132 int repeats = source_->
GetInt();
1133 Object*
object = current[-1];
1134 ASSERT(!isolate->heap()->InNewSpace(
object));
1135 for (
int i = 0; i < repeats; i++) current[i] =
object;
1148 Object*
object = current[-1];
1149 ASSERT(!isolate->heap()->InNewSpace(
object));
1150 for (
int i = 0; i < repeats; i++) current[i] =
object;
1170 #if defined(V8_TARGET_ARCH_MIPS) || V8_OOL_CONSTANT_POOL
1186 ALL_SPACES(kBackref, kFromCode, kInnerPointer)
1187 ALL_SPACES(kBackrefWithSkip, kFromCode, kInnerPointer)
1189 ALL_SPACES(kBackrefWithSkip, kPlain, kInnerPointer)
1193 CASE_BODY(kRootArray, kPlain, kStartOfObject, 0)
1223 #undef CASE_STATEMENT
1228 int size = source_->
GetInt();
1229 current =
reinterpret_cast<Object**
>(
1230 reinterpret_cast<intptr_t
>(current) + size);
1235 int index = source_->
Get();
1237 NativesExternalStringResource* resource =
1238 new NativesExternalStringResource(isolate->bootstrapper(),
1239 source_vector.start(),
1240 source_vector.length());
1241 *current++ =
reinterpret_cast<Object*
>(resource);
1260 ASSERT(integer < 1 << 22);
1263 if (integer > 0xff) bytes = 2;
1264 if (integer > 0xffff) bytes = 3;
1266 Put(static_cast<int>(integer & 0xff),
"IntPart1");
1267 if (bytes > 1)
Put(static_cast<int>((integer >> 8) & 0xff),
"IntPart2");
1268 if (bytes > 2)
Put(static_cast<int>((integer >> 16) & 0xff),
"IntPart3");
1273 : isolate_(isolate),
1276 root_index_wave_front_(0) {
1306 this->VisitPointer(
object);
1313 return current == &roots[Heap::kStoreBufferTopRootIndex]
1314 || current == &roots[Heap::kStackLimitRootIndex]
1315 || current == &roots[Heap::kRealStackLimitRootIndex];
1322 for (
Object** current = start; current < end; current++) {
1330 }
else if ((*current)->IsSmi()) {
1333 sink_->
Put(reinterpret_cast<byte*>(current)[i],
"Byte");
1351 ObjectVisitor* visitor) {
1353 for (
int i = 0; ; i++) {
1354 if (isolate->serialize_partial_snapshot_cache_length() <= i) {
1359 Object** cache = isolate->serialize_partial_snapshot_cache();
1360 visitor->VisitPointers(&cache[i], &cache[i + 1]);
1363 if (cache[i] == isolate->
heap()->undefined_value()) {
1374 i < isolate->serialize_partial_snapshot_cache_length();
1376 Object* entry = isolate->serialize_partial_snapshot_cache()[i];
1377 if (entry == heap_object)
return i;
1383 int length = isolate->serialize_partial_snapshot_cache_length();
1385 startup_serializer_->VisitPointer(reinterpret_cast<Object**>(&heap_object));
1388 ASSERT(length == isolate->serialize_partial_snapshot_cache_length() - 1);
1398 if (!root->IsSmi() && root == heap_object) {
1399 #if defined(V8_TARGET_ARCH_MIPS) || V8_OOL_CONSTANT_POOL
1400 if (from == kFromCode) {
1428 sink_->
Put(kBackref + how_to_code + where_to_point + space,
"BackRefSer");
1430 sink_->
Put(kBackrefWithSkip + how_to_code + where_to_point + space,
1431 "BackRefSerWithSkip");
1443 CHECK(o->IsHeapObject());
1448 PutRoot(root_index, heap_object, how_to_code, where_to_point, skip);
1484 VisitPointer(&undefined);
1495 if (how_to_code == kPlain &&
1496 where_to_point == kStartOfObject &&
1498 !
isolate()->heap()->InNewSpace(
object)) {
1512 sink_->
Put(kRootArray + how_to_code + where_to_point,
"RootSerialization");
1523 CHECK(o->IsHeapObject());
1526 if (heap_object->IsMap()) {
1530 heap_object->
GetHeap()->empty_fixed_array());
1535 PutRoot(root_index, heap_object, how_to_code, where_to_point, skip);
1542 sink_->
PutInt(skip,
"SkipDistanceFromSerializeObject");
1546 sink_->
Put(kPartialSnapshotCache + how_to_code + where_to_point,
1547 "PartialSnapshotCache");
1548 sink_->
PutInt(cache_index,
"partial_snapshot_cache_index");
1558 ASSERT(!heap_object->IsInternalizedString());
1571 sink_->
PutInt(skip,
"SkipDistanceFromSerializeObject");
1586 int size = object_->
Size();
1588 sink_->
Put(kNewObject + reference_representation_ + space,
1589 "ObjectSerialization");
1592 ASSERT(code_address_map_);
1593 const char* code_name = code_address_map_->Lookup(object_->
address());
1600 int offset = serializer_->
Allocate(space, size);
1607 CHECK_EQ(0, bytes_processed_so_far_);
1616 Object** current = start;
1617 while (current < end) {
1618 while (current < end && (*current)->IsSmi()) current++;
1619 if (current < end) OutputRawData(reinterpret_cast<Address>(current));
1621 while (current < end && !(*current)->IsSmi()) {
1623 int root_index = serializer_->RootIndex(current_contents, kPlain);
1627 if (current != start &&
1630 current_contents == current[-1]) {
1631 ASSERT(!serializer_->isolate()->heap()->InNewSpace(current_contents));
1632 int repeat_count = 1;
1633 while (current < end - 1 && current[repeat_count] == current_contents) {
1636 current += repeat_count;
1637 bytes_processed_so_far_ += repeat_count *
kPointerSize;
1645 serializer_->SerializeObject(
1646 current_contents, kPlain, kStartOfObject, 0);
1657 if (FLAG_enable_ool_constant_pool && rinfo->IsInConstantPool())
return;
1659 int skip = OutputRawData(rinfo->target_address_address(),
1660 kCanReturnSkipInsteadOfSkipping);
1661 HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode :
kPlain;
1662 Object*
object = rinfo->target_object();
1663 serializer_->SerializeObject(
object, how_to_code, kStartOfObject, skip);
1664 bytes_processed_so_far_ += rinfo->target_address_size();
1669 int skip = OutputRawData(reinterpret_cast<Address>(p),
1670 kCanReturnSkipInsteadOfSkipping);
1671 sink_->
Put(kExternalReference + kPlain + kStartOfObject,
"ExternalRef");
1674 sink_->
PutInt(serializer_->EncodeExternalReference(target),
"reference id");
1680 int skip = OutputRawData(rinfo->target_address_address(),
1681 kCanReturnSkipInsteadOfSkipping);
1682 HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode :
kPlain;
1683 sink_->
Put(kExternalReference + how_to_code + kStartOfObject,
"ExternalRef");
1685 Address target = rinfo->target_reference();
1686 sink_->
PutInt(serializer_->EncodeExternalReference(target),
"reference id");
1687 bytes_processed_so_far_ += rinfo->target_address_size();
1692 int skip = OutputRawData(rinfo->target_address_address(),
1693 kCanReturnSkipInsteadOfSkipping);
1694 HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode :
kPlain;
1695 sink_->
Put(kExternalReference + how_to_code + kStartOfObject,
"ExternalRef");
1697 Address target = rinfo->target_address();
1698 sink_->
PutInt(serializer_->EncodeExternalReference(target),
"reference id");
1699 bytes_processed_so_far_ += rinfo->target_address_size();
1705 if (FLAG_enable_ool_constant_pool && rinfo->IsInConstantPool())
return;
1707 int skip = OutputRawData(rinfo->target_address_address(),
1708 kCanReturnSkipInsteadOfSkipping);
1710 serializer_->SerializeObject(
object, kFromCode, kInnerPointer, skip);
1711 bytes_processed_so_far_ += rinfo->target_address_size();
1716 int skip = OutputRawData(entry_address, kCanReturnSkipInsteadOfSkipping);
1718 serializer_->SerializeObject(
object, kPlain, kInnerPointer, skip);
1725 if (FLAG_enable_ool_constant_pool && rinfo->IsInConstantPool())
return;
1727 int skip = OutputRawData(rinfo->pc(), kCanReturnSkipInsteadOfSkipping);
1729 serializer_->SerializeObject(
object, kPlain, kInnerPointer, skip);
1735 Address references_start =
reinterpret_cast<Address>(resource_pointer);
1736 OutputRawData(references_start);
1739 serializer_->isolate()->heap()->natives_source_cache()->get(i);
1740 if (!source->IsUndefined()) {
1743 const Resource* resource =
string->resource();
1744 if (resource == *resource_pointer) {
1747 bytes_processed_so_far_ +=
sizeof(resource);
1765 static void WipeOutRelocations(
Code* code) {
1767 RelocInfo::kCodeTargetMask |
1768 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
1769 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
1772 if (!(FLAG_enable_ool_constant_pool && it.rinfo()->IsInConstantPool())) {
1773 it.rinfo()->WipeOut();
1779 int Serializer::ObjectSerializer::OutputRawData(
1780 Address up_to, Serializer::ObjectSerializer::ReturnSkip return_skip) {
1781 Address object_start = object_->address();
1782 int base = bytes_processed_so_far_;
1783 int up_to_offset =
static_cast<int>(up_to - object_start);
1784 int to_skip = up_to_offset - bytes_processed_so_far_;
1785 int bytes_to_output = to_skip;
1786 bytes_processed_so_far_ += to_skip;
1790 bool outputting_code =
false;
1791 if (to_skip != 0 && code_object_ && !code_has_been_output_) {
1793 bytes_to_output = object_->Size() + to_skip - bytes_processed_so_far_;
1794 outputting_code =
true;
1795 code_has_been_output_ =
true;
1797 if (bytes_to_output != 0 &&
1798 (!code_object_ || outputting_code)) {
1799 #define RAW_CASE(index) \
1800 if (!outputting_code && bytes_to_output == index * kPointerSize && \
1801 index * kPointerSize == to_skip) { \
1802 sink_->PutSection(kRawData + index, "RawDataFixed"); \
1815 Code* code = CloneCodeObject(object_);
1816 WipeOutRelocations(code);
1819 code->WipeOutHeader();
1820 object_start = code->address();
1823 const char* description = code_object_ ?
"Code" :
"Byte";
1824 for (
int i = 0; i < bytes_to_output; i++) {
1827 if (code_object_)
delete[] object_start;
1829 if (to_skip != 0 && return_skip == kIgnoringReturn) {
1853 int allocation_address =
fullness_[space];
1855 return allocation_address;
1871 for (
unsigned i = 0; i <
sizeof(
int32_t) - 1; i++) {
1878 if (0u + length_ - position_ > 2 *
sizeof(uint32_t))
return false;
1879 for (
int x = position_; x < length_; x++) {
Object ** roots_array_start()
virtual void SerializeObject(Object *o, HowToCode how_to_code, WhereToPoint where_to_point, int skip)=0
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
static const int kInvalidRootIndex
void VisitCodeTarget(RelocInfo *target)
int CurrentAllocationAddress(int space)
ExternalReferenceEncoder(Isolate *isolate)
SerializationAddressMapper address_mapper_
#define CHECK_EQ(expected, value)
static int CodePageAreaSize()
void CopyRaw(byte *to, int number_of_bytes)
void AddMapping(HeapObject *obj, int to)
bool InNewSpace(Object *object)
#define INLINE_OPTIMIZED_ENTRY(name, nargs, ressize)
virtual void Serialize(Object **o)
HandleScopeImplementer * handle_scope_implementer()
static bool too_late_to_enable_now_
bool IsMapped(HeapObject *obj)
#define SIXTEEN_CASES(byte_code)
void IterateWeakRoots(ObjectVisitor *v, VisitMode mode)
static Smi * FromInt(int value)
#define LOG(isolate, Call)
static int RootArrayConstantFromByteCode(int byte_code)
static Object * GetObjectFromEntryAddress(Address location_of_address)
#define ACCESSOR_DESCRIPTOR_DECLARATION(name)
static Vector< const char > GetRawScriptSource(int index)
static HeapObject * cast(Object *obj)
const int kDeoptTableSerializeEntryCount
virtual void SerializeStrongReferences()
static const int kOldSpaceRoots
static Map * cast(Object *obj)
void SerializeWeakReferences()
kSerializedDataOffset Object
static AllocationSite * cast(Object *obj)
int SpaceAreaSize(int space)
Serializer(Isolate *isolate, SnapshotByteSink *sink)
void VisitRuntimeEntry(RelocInfo *reloc)
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
#define ASSERT(condition)
bool InSpace(Address addr, AllocationSpace space)
#define CASE_BODY(where, how, within, space_number_if_any)
const int kPointerSizeLog2
void VisitPointers(Object **start, Object **end)
void VisitExternalReference(Address *p)
ThreadManager * thread_manager()
void InitializeWeakObjectToCodeTable()
static int CodeForRepeats(int repeats)
void PutInt(uintptr_t integer, const char *description)
static ExternalAsciiString * cast(Object *obj)
intptr_t root_index_wave_front_
static const int kPageSize
void IterateSmiRoots(ObjectVisitor *v)
static Code * cast(Object *obj)
void PutRoot(int index, HeapObject *object, HowToCode how, WhereToPoint where, int skip)
#define RUNTIME_FUNCTION_LIST(F)
void VisitCodeEntry(Address entry_address)
#define ALL_SPACES(where, how, within)
void VisitPointers(Object **start, Object **end)
#define STATS_COUNTER_LIST_2(SC)
CodeAddressMap(Isolate *isolate)
void IterateBody(InstanceType type, int object_size, ObjectVisitor *v)
static int GetBuiltinsCount()
virtual ~CodeAddressMap()
void RepairFreeListsAfterBoot()
bool ShouldBeSkipped(Object **current)
uint32_t ComputePointerHash(void *ptr)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
void addCodeEventListener(CodeEventListener *listener)
int fullness_[LAST_SPACE+1]
static const int kConstantRepeat
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static Cell * cast(Object *obj)
static const int kMaxRepeats
static void MemCopy(void *dest, const void *src, size_t size)
int NumberOfWeakHandles()
Deserializer(SnapshotByteSource *source)
static const int kRawData
static Address GetDeoptimizationEntry(Isolate *isolate, int id, BailoutType type, GetEntryMode mode=ENSURE_ENTRY_CODE)
virtual int PartialSnapshotCacheIndex(HeapObject *o)
static const int kRootArrayConstants
MemoryAllocator * memory_allocator()
static int SpaceOfObject(HeapObject *object)
#define FOUR_CASES(byte_code)
T * NewArray(size_t size)
EternalHandles * eternal_handles()
SerializationAddressMapper * address_mapper()
#define COUNTER_ENTRY(name, caption)
#define DEF_ENTRY_A(name, kind, state, extra)
virtual void SerializeObject(Object *o, HowToCode how_to_code, WhereToPoint where_to_point, int skip)
GlobalHandles * global_handles()
void set_allocation_sites_list(Object *object)
ExternalReferenceEncoder * external_reference_encoder_
void Deserialize(Isolate *isolate)
void VisitCell(RelocInfo *rinfo)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
Entry * Lookup(void *key, uint32_t hash, bool insert, AllocationPolicy allocator=AllocationPolicy())
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
void ReserveSpace(int *sizes, Address *addresses)
int Allocate(int space, int size)
static const int kNumberOfSpaces
static Code * GetCodeFromTargetAddress(Address address)
static const int kRootArrayNumberOfConstantEncodings
uint32_t Encode(Address key) const
static bool serialization_enabled_
static void Iterate(Isolate *isolate, ObjectVisitor *visitor)
void DeserializePartial(Isolate *isolate, Object **root)
virtual void CodeDeleteEvent(Address from)
static int RepeatsForCode(int byte_code)
#define COMMON_RAW_LENGTHS(f)
void SerializeReferenceToPreviousObject(int space, int address, HowToCode how_to_code, WhereToPoint where_to_point, int skip)
bool has_installed_extensions()
static void Enable(Isolate *isolate)
~ExternalReferenceDecoder()
List< internal::Object ** > * blocks()
static const int kObjectStartOffset
void PushToPartialSnapshotCache(Object *obj)
#define ACCESSOR_DESCRIPTOR_LIST(V)
#define BUILTIN_LIST_DEBUG_A(V)
#define RUNTIME_HIDDEN_ENTRY(name, nargs, ressize)
#define IC_UTIL_LIST(ICU)
#define FOR_EACH_ISOLATE_ADDRESS_NAME(C)
const int kObjectAlignmentBits
#define RUNTIME_HIDDEN_FUNCTION_LIST(F)
TemplateHashMapImpl< FreeStoreAllocationPolicy > HashMap
#define CASE_STATEMENT(where, how, within, space_number)
void VisitExternalAsciiString(v8::String::ExternalAsciiStringResource **resource)
#define ASSERT_EQ(v1, v2)
Isolate * isolate() const
virtual void SerializeObject(Object *o, HowToCode how_to_code, WhereToPoint where_to_point, int skip)
InstanceType instance_type()
static HeapObject * FromAddress(Address address)
static const int kSynchronize
#define ASSERT_NE(v1, v2)
void InitializeLoggingAndCounters()
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
#define BUILD_NAME_LITERAL(CamelName, hacker_name)
#define RUNTIME_ENTRY(name, nargs, ressize)
static const int kNativesStringResource
int RootIndex(HeapObject *heap_object, HowToCode from)
void removeCodeEventListener(CodeEventListener *listener)
virtual void CodeMoveEvent(Address from, Address to)
void DeleteArray(T *array)
#define DEF_ENTRY_C(name, ignored)
void set_array_buffers_list(Object *object)
virtual void Put(int byte, const char *description)=0
#define LOG_CODE_EVENT(isolate, Call)
static ExternalReferenceTable * instance(Isolate *isolate)
virtual bool ShouldBeInThePartialSnapshotCache(HeapObject *o)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
#define BUILTIN_LIST_C(V)
ThreadState * FirstThreadStateInUse()
int MappedTo(HeapObject *obj)
#define STATS_COUNTER_LIST_1(SC)
Object * allocation_sites_list()
#define BUILTIN_LIST_A(V)
virtual void PutSection(int byte, const char *description)
ExternalReferenceDecoder(Isolate *isolate)
const char * Lookup(Address address)
void VisitEmbeddedPointer(RelocInfo *target)
#define INLINE_OPTIMIZED_FUNCTION_LIST(F)
void set_native_contexts_list(Object *object)
const char * NameOfAddress(Address key) const