58 #if V8_TARGET_ARCH_IA32
60 #elif V8_TARGET_ARCH_X64
62 #elif V8_TARGET_ARCH_ARM64
64 #elif V8_TARGET_ARCH_ARM
66 #elif V8_TARGET_ARCH_MIPS
69 #error "Unknown architecture."
73 #ifndef V8_INTERPRETED_REGEXP
74 #if V8_TARGET_ARCH_IA32
76 #elif V8_TARGET_ARCH_X64
78 #elif V8_TARGET_ARCH_ARM64
80 #elif V8_TARGET_ARCH_ARM
82 #elif V8_TARGET_ARCH_MIPS
84 #else // Unknown architecture.
85 #error "Unknown architecture."
86 #endif // Target architecture.
87 #endif // V8_INTERPRETED_REGEXP
108 static DoubleConstant double_constants;
110 const char*
const RelocInfo::kFillerCommentString =
"DEOPTIMIZATION PADDING";
112 static bool math_exp_data_initialized =
false;
113 static Mutex* math_exp_data_mutex =
NULL;
114 static double* math_exp_constants_array =
NULL;
115 static double* math_exp_log_table_array =
NULL;
123 enabled_cpu_features_(0),
124 emit_debug_code_(FLAG_debug_code),
125 predictable_code_size_(
false) {
126 if (FLAG_mask_constants_with_cookie && isolate !=
NULL) {
129 if (buffer ==
NULL) {
133 if (isolate->assembler_spare_buffer() !=
NULL) {
134 buffer = isolate->assembler_spare_buffer();
135 isolate->set_assembler_spare_buffer(
NULL);
138 if (buffer ==
NULL) buffer = NewArray<byte>(buffer_size);
170 : assembler_(assembler),
171 expected_size_(expected_size),
172 start_offset_(assembler->pc_offset()),
173 old_value_(assembler->predictable_code_size()) {
180 if (expected_size_ >= 0) {
192 : assembler_(assembler) {
195 uint64_t mask =
static_cast<uint64_t
>(1) << f;
197 #if V8_TARGET_ARCH_ARM
200 mask |=
static_cast<uint64_t
>(1) <<
ARMv7;
207 CpuFeatureScope::~CpuFeatureScope() {
208 assembler_->set_enabled_cpu_features(old_enabled_);
216 PlatformFeatureScope::PlatformFeatureScope(
CpuFeature f)
217 : old_cross_compile_(CpuFeatures::cross_compile_) {
221 uint64_t mask =
static_cast<uint64_t
>(1) << f;
222 CpuFeatures::cross_compile_ |= mask;
226 PlatformFeatureScope::~PlatformFeatureScope() {
227 CpuFeatures::cross_compile_ = old_cross_compile_;
234 int Label::pos()
const {
235 if (pos_ < 0)
return -pos_ - 1;
236 if (pos_ > 0)
return pos_ - 1;
316 const int kMaxStandardNonCompactModes = 14;
356 uint32_t RelocInfoWriter::WriteVariableLengthPCJump(uint32_t pc_delta) {
365 for (; pc_jump > 0; pc_jump = pc_jump >>
kChunkBits) {
376 void RelocInfoWriter::WriteTaggedPC(uint32_t pc_delta,
int tag) {
378 pc_delta = WriteVariableLengthPCJump(pc_delta);
379 *--pos_ = pc_delta <<
kTagBits | tag;
383 void RelocInfoWriter::WriteTaggedData(intptr_t data_delta,
int tag) {
388 void RelocInfoWriter::WriteExtraTag(
int extra_tag,
int top_tag) {
395 void RelocInfoWriter::WriteExtraTaggedPC(uint32_t pc_delta,
int extra_tag) {
397 pc_delta = WriteVariableLengthPCJump(pc_delta);
398 WriteExtraTag(extra_tag, 0);
403 void RelocInfoWriter::WriteExtraTaggedIntData(
int data_delta,
int top_tag) {
405 for (
int i = 0; i <
kIntSize; i++) {
406 *--pos_ =
static_cast<byte>(data_delta);
413 void RelocInfoWriter::WriteExtraTaggedPoolData(
int data,
int pool_type) {
415 for (
int i = 0; i <
kIntSize; i++) {
416 *--pos_ =
static_cast<byte>(data);
423 void RelocInfoWriter::WriteExtraTaggedData(intptr_t data_delta,
int top_tag) {
426 *--pos_ =
static_cast<byte>(data_delta);
433 void RelocInfoWriter::Write(
const RelocInfo* rinfo) {
435 byte* begin_pos = pos_;
437 ASSERT(rinfo->rmode() < RelocInfo::NUMBER_OF_MODES);
438 ASSERT(rinfo->pc() - last_pc_ >= 0);
439 ASSERT(RelocInfo::LAST_STANDARD_NONCOMPACT_ENUM - RelocInfo::LAST_COMPACT_ENUM
440 <= kMaxStandardNonCompactModes);
442 uint32_t pc_delta =
static_cast<uint32_t
>(rinfo->pc() - last_pc_);
443 RelocInfo::Mode rmode = rinfo->rmode();
446 if (rmode == RelocInfo::EMBEDDED_OBJECT) {
448 }
else if (rmode == RelocInfo::CODE_TARGET) {
450 ASSERT(begin_pos - pos_ <= RelocInfo::kMaxCallSize);
451 }
else if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
453 ASSERT(static_cast<int>(rinfo->data()) == rinfo->data());
454 int id_delta =
static_cast<int>(rinfo->data()) - last_id_;
464 last_id_ =
static_cast<int>(rinfo->data());
465 }
else if (RelocInfo::IsPosition(rmode)) {
467 ASSERT(static_cast<int>(rinfo->data()) == rinfo->data());
468 int pos_delta =
static_cast<int>(rinfo->data()) - last_position_;
474 WriteTaggedData(pos_delta, pos_type_tag);
478 WriteExtraTaggedIntData(pos_delta, pos_type_tag);
480 last_position_ =
static_cast<int>(rinfo->data());
481 }
else if (RelocInfo::IsComment(rmode)) {
485 ASSERT(begin_pos - pos_ >= RelocInfo::kMinRelocCommentSize);
486 }
else if (RelocInfo::IsConstPool(rmode) || RelocInfo::IsVeneerPool(rmode)) {
488 WriteExtraTaggedPoolData(static_cast<int>(rinfo->data()),
492 ASSERT(rmode > RelocInfo::LAST_COMPACT_ENUM);
493 int saved_mode = rmode - RelocInfo::LAST_COMPACT_ENUM;
497 WriteExtraTaggedPC(pc_delta, saved_mode);
499 last_pc_ = rinfo->pc();
501 ASSERT(begin_pos - pos_ <= kMaxSize);
506 inline int RelocIterator::AdvanceGetTag() {
511 inline int RelocIterator::GetExtraTag() {
516 inline int RelocIterator::GetTopTag() {
521 inline void RelocIterator::ReadTaggedPC() {
526 inline void RelocIterator::AdvanceReadPC() {
527 rinfo_.pc_ += *--pos_;
531 void RelocIterator::AdvanceReadId() {
533 for (
int i = 0; i <
kIntSize; i++) {
537 rinfo_.data_ = last_id_;
541 void RelocIterator::AdvanceReadPoolData() {
543 for (
int i = 0; i <
kIntSize; i++) {
550 void RelocIterator::AdvanceReadPosition() {
552 for (
int i = 0; i <
kIntSize; i++) {
556 rinfo_.data_ = last_position_;
560 void RelocIterator::AdvanceReadData() {
563 x |=
static_cast<intptr_t
>(*--pos_) << i *
kBitsPerByte;
569 void RelocIterator::AdvanceReadVariableLengthPCJump() {
573 uint32_t pc_jump = 0;
574 for (
int i = 0; i <
kIntSize; i++) {
575 byte pc_jump_part = *--pos_;
585 inline int RelocIterator::GetLocatableTypeTag() {
590 inline void RelocIterator::ReadTaggedId() {
591 int8_t signed_b = *pos_;
594 rinfo_.data_ = last_id_;
598 inline void RelocIterator::ReadTaggedPosition() {
599 int8_t signed_b = *pos_;
602 rinfo_.data_ = last_position_;
606 static inline RelocInfo::Mode GetPositionModeFromTag(
int tag) {
610 RelocInfo::POSITION :
611 RelocInfo::STATEMENT_POSITION;
622 while (pos_ > end_) {
623 int tag = AdvanceGetTag();
626 if (SetMode(RelocInfo::EMBEDDED_OBJECT))
return;
629 if (SetMode(RelocInfo::CODE_TARGET))
return;
633 int locatable_tag = GetLocatableTypeTag();
635 if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
644 if (mode_mask_ & RelocInfo::kPositionMask) {
645 ReadTaggedPosition();
646 if (SetMode(GetPositionModeFromTag(locatable_tag)))
return;
651 int extra_tag = GetExtraTag();
654 AdvanceReadVariableLengthPCJump();
659 int locatable_tag = GetTopTag();
661 if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
669 if (mode_mask_ & RelocInfo::kPositionMask) {
670 AdvanceReadPosition();
671 if (SetMode(GetPositionModeFromTag(locatable_tag)))
return;
677 if (SetMode(RelocInfo::COMMENT)) {
684 int pool_type = GetTopTag();
687 RelocInfo::CONST_POOL : RelocInfo::VENEER_POOL;
688 if (SetMode(rmode)) {
689 AdvanceReadPoolData();
695 int rmode = extra_tag + RelocInfo::LAST_COMPACT_ENUM;
696 if (SetMode(static_cast<RelocInfo::Mode>(rmode)))
return;
700 if (code_age_sequence_ !=
NULL) {
701 byte* old_code_age_sequence = code_age_sequence_;
702 code_age_sequence_ =
NULL;
703 if (SetMode(RelocInfo::CODE_AGE_SEQUENCE)) {
705 rinfo_.pc_ = old_code_age_sequence;
721 mode_mask_ = mode_mask;
724 byte* sequence = code->FindCodeAgeSequence();
726 code_age_sequence_ = sequence;
728 code_age_sequence_ =
NULL;
730 if (mode_mask_ == 0) pos_ = end_;
742 mode_mask_ = mode_mask;
745 code_age_sequence_ =
NULL;
746 if (mode_mask_ == 0) pos_ = end_;
756 bool RelocInfo::RequiresRelocation(
const CodeDesc& desc) {
760 int mode_mask = RelocInfo::kCodeTargetMask |
761 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
762 RelocInfo::ModeMask(RelocInfo::CELL) |
763 RelocInfo::kApplyMask;
770 #ifdef ENABLE_DISASSEMBLER
771 const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
773 case RelocInfo::NONE32:
774 return "no reloc 32";
775 case RelocInfo::NONE64:
776 return "no reloc 64";
777 case RelocInfo::EMBEDDED_OBJECT:
778 return "embedded object";
779 case RelocInfo::CONSTRUCT_CALL:
780 return "code target (js construct call)";
782 #ifndef ENABLE_DEBUGGER_SUPPORT
785 return "debug break";
786 case RelocInfo::CODE_TARGET:
787 return "code target";
788 case RelocInfo::CODE_TARGET_WITH_ID:
789 return "code target with id";
790 case RelocInfo::CELL:
791 return "property cell";
793 return "runtime entry";
794 case RelocInfo::JS_RETURN:
796 case RelocInfo::COMMENT:
798 case RelocInfo::POSITION:
800 case RelocInfo::STATEMENT_POSITION:
801 return "statement position";
802 case RelocInfo::EXTERNAL_REFERENCE:
803 return "external reference";
804 case RelocInfo::INTERNAL_REFERENCE:
805 return "internal reference";
806 case RelocInfo::CONST_POOL:
807 return "constant pool";
808 case RelocInfo::VENEER_POOL:
809 return "veneer pool";
810 case RelocInfo::DEBUG_BREAK_SLOT:
811 #ifndef ENABLE_DEBUGGER_SUPPORT
814 return "debug break slot";
815 case RelocInfo::CODE_AGE_SEQUENCE:
816 return "code_age_sequence";
817 case RelocInfo::NUMBER_OF_MODES:
819 return "number_of_modes";
821 return "unknown relocation type";
826 PrintF(out,
"%p %s", pc_, RelocModeName(rmode_));
827 if (IsComment(rmode_)) {
828 PrintF(out,
" (%s)", reinterpret_cast<char*>(data_));
829 }
else if (rmode_ == EMBEDDED_OBJECT) {
831 target_object()->ShortPrint(out);
833 }
else if (rmode_ == EXTERNAL_REFERENCE) {
834 ExternalReferenceEncoder ref_encoder(isolate);
836 ref_encoder.NameOfAddress(target_reference()),
838 }
else if (IsCodeTarget(rmode_)) {
842 if (rmode_ == CODE_TARGET_WITH_ID) {
843 PrintF(out,
" (id=%d)", static_cast<int>(data_));
845 }
else if (IsPosition(rmode_)) {
847 }
else if (IsRuntimeEntry(rmode_) &&
848 isolate->deoptimizer_data() !=
NULL) {
853 PrintF(out,
" (deoptimization bailout %d)",
id);
859 #endif // ENABLE_DISASSEMBLER
863 void RelocInfo::Verify() {
865 case EMBEDDED_OBJECT:
866 Object::VerifyPointer(target_object());
869 Object::VerifyPointer(target_cell());
872 #ifndef ENABLE_DEBUGGER_SUPPORT
877 case CODE_TARGET_WITH_ID:
880 Address addr = target_address();
884 Object* found = code->GetIsolate()->FindCodeObject(addr);
885 CHECK(found->IsCode());
893 case STATEMENT_POSITION:
894 case EXTERNAL_REFERENCE:
895 case INTERNAL_REFERENCE:
898 case DEBUG_BREAK_SLOT:
902 case NUMBER_OF_MODES:
905 case CODE_AGE_SEQUENCE:
910 #endif // VERIFY_HEAP
916 void ExternalReference::SetUp() {
917 double_constants.min_int =
kMinInt;
918 double_constants.one_half = 0.5;
919 double_constants.minus_one_half = -0.5;
920 double_constants.minus_zero = -0.0;
921 double_constants.uint8_max_value = 255;
922 double_constants.zero = 0.0;
924 double_constants.the_hole_nan = BitCast<double>(
kHoleNanInt64);
926 double_constants.uint32_bias =
927 static_cast<double>(
static_cast<uint32_t
>(0xFFFFFFFF)) + 1;
929 math_exp_data_mutex =
new Mutex();
933 void ExternalReference::InitializeMathExpData() {
935 if (math_exp_data_initialized)
return;
937 LockGuard<Mutex> lock_guard(math_exp_data_mutex);
938 if (!math_exp_data_initialized) {
940 const int kTableSizeBits = 11;
941 const int kTableSize = 1 << kTableSizeBits;
942 const double kTableSizeDouble =
static_cast<double>(kTableSize);
944 math_exp_constants_array =
new double[9];
946 math_exp_constants_array[0] = -708.39641853226408;
948 math_exp_constants_array[1] = 709.78271289338397;
953 const double constant3 = (1 << kTableSizeBits) /
std::log(2.0);
954 math_exp_constants_array[3] = constant3;
955 math_exp_constants_array[4] =
956 static_cast<double>(
static_cast<int64_t
>(3) << 51);
957 math_exp_constants_array[5] = 1 / constant3;
958 math_exp_constants_array[6] = 3.0000000027955394;
959 math_exp_constants_array[7] = 0.16666666685227835;
960 math_exp_constants_array[8] = 1;
962 math_exp_log_table_array =
new double[kTableSize];
963 for (
int i = 0; i < kTableSize; i++) {
964 double value = std::pow(2, i / kTableSizeDouble);
965 uint64_t bits = BitCast<uint64_t, double>(value);
966 bits &= (
static_cast<uint64_t
>(1) << 52) - 1;
967 double mantissa = BitCast<double, uint64_t>(bits);
968 math_exp_log_table_array[i] = mantissa;
971 math_exp_data_initialized =
true;
976 void ExternalReference::TearDownMathExpData() {
977 delete[] math_exp_constants_array;
978 delete[] math_exp_log_table_array;
979 delete math_exp_data_mutex;
984 : address_(Redirect(isolate, Builtins::c_function_address(id))) {}
987 ExternalReference::ExternalReference(
989 Type type = ExternalReference::BUILTIN_CALL,
990 Isolate* isolate =
NULL)
991 : address_(Redirect(isolate, fun->address(), type)) {}
995 : address_(isolate->builtins()->builtin_address(name)) {}
1000 : address_(Redirect(isolate, Runtime::FunctionForId(id)->entry)) {}
1003 ExternalReference::ExternalReference(
const Runtime::Function* f,
1005 : address_(Redirect(isolate, f->entry)) {}
1008 ExternalReference ExternalReference::isolate_address(Isolate* isolate) {
1009 return ExternalReference(isolate);
1013 ExternalReference::ExternalReference(
const IC_Utility& ic_utility,
1015 : address_(Redirect(isolate, ic_utility.address())) {}
1017 #ifdef ENABLE_DEBUGGER_SUPPORT
1018 ExternalReference::ExternalReference(
const Debug_Address& debug_address,
1020 : address_(debug_address.address(isolate)) {}
1023 ExternalReference::ExternalReference(StatsCounter* counter)
1024 : address_(reinterpret_cast<
Address>(counter->GetInternalPointer())) {}
1028 : address_(isolate->get_address_from_id(id)) {}
1031 ExternalReference::ExternalReference(
const SCTableReference& table_ref)
1032 : address_(table_ref.address()) {}
1035 ExternalReference ExternalReference::
1036 incremental_marking_record_write_function(Isolate* isolate) {
1037 return ExternalReference(Redirect(
1043 ExternalReference ExternalReference::
1044 store_buffer_overflow_function(Isolate* isolate) {
1045 return ExternalReference(Redirect(
1051 ExternalReference ExternalReference::flush_icache_function(Isolate* isolate) {
1052 return ExternalReference(Redirect(isolate,
FUNCTION_ADDR(CPU::FlushICache)));
1056 ExternalReference ExternalReference::perform_gc_function(Isolate* isolate) {
1062 ExternalReference ExternalReference::out_of_memory_function(Isolate* isolate) {
1068 ExternalReference ExternalReference::delete_handle_scope_extensions(
1070 return ExternalReference(Redirect(
1076 ExternalReference ExternalReference::get_date_field_function(
1082 ExternalReference ExternalReference::get_make_code_young_function(
1084 return ExternalReference(Redirect(
1089 ExternalReference ExternalReference::get_mark_code_as_executed_function(
1091 return ExternalReference(Redirect(
1096 ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) {
1097 return ExternalReference(isolate->date_cache()->stamp_address());
1101 ExternalReference ExternalReference::stress_deopt_count(Isolate* isolate) {
1102 return ExternalReference(isolate->stress_deopt_count_address());
1106 ExternalReference ExternalReference::new_deoptimizer_function(
1108 return ExternalReference(
1113 ExternalReference ExternalReference::compute_output_frames_function(
1115 return ExternalReference(
1120 ExternalReference ExternalReference::log_enter_external_function(
1122 return ExternalReference(
1127 ExternalReference ExternalReference::log_leave_external_function(
1129 return ExternalReference(
1134 ExternalReference ExternalReference::keyed_lookup_cache_keys(Isolate* isolate) {
1135 return ExternalReference(isolate->keyed_lookup_cache()->keys_address());
1139 ExternalReference ExternalReference::keyed_lookup_cache_field_offsets(
1141 return ExternalReference(
1142 isolate->keyed_lookup_cache()->field_offsets_address());
1146 ExternalReference ExternalReference::roots_array_start(Isolate* isolate) {
1147 return ExternalReference(isolate->heap()->roots_array_start());
1151 ExternalReference ExternalReference::allocation_sites_list_address(
1153 return ExternalReference(isolate->heap()->allocation_sites_list_address());
1157 ExternalReference ExternalReference::address_of_stack_limit(Isolate* isolate) {
1158 return ExternalReference(isolate->stack_guard()->address_of_jslimit());
1162 ExternalReference ExternalReference::address_of_real_stack_limit(
1164 return ExternalReference(isolate->stack_guard()->address_of_real_jslimit());
1168 ExternalReference ExternalReference::address_of_regexp_stack_limit(
1170 return ExternalReference(isolate->regexp_stack()->limit_address());
1174 ExternalReference ExternalReference::new_space_start(Isolate* isolate) {
1175 return ExternalReference(isolate->heap()->NewSpaceStart());
1179 ExternalReference ExternalReference::store_buffer_top(Isolate* isolate) {
1180 return ExternalReference(isolate->heap()->store_buffer()->TopAddress());
1184 ExternalReference ExternalReference::new_space_mask(Isolate* isolate) {
1185 return ExternalReference(reinterpret_cast<Address>(
1186 isolate->heap()->NewSpaceMask()));
1190 ExternalReference ExternalReference::new_space_allocation_top_address(
1192 return ExternalReference(isolate->heap()->NewSpaceAllocationTopAddress());
1196 ExternalReference ExternalReference::heap_always_allocate_scope_depth(
1198 Heap* heap = isolate->heap();
1199 return ExternalReference(heap->always_allocate_scope_depth_address());
1203 ExternalReference ExternalReference::new_space_allocation_limit_address(
1205 return ExternalReference(isolate->heap()->NewSpaceAllocationLimitAddress());
1209 ExternalReference ExternalReference::old_pointer_space_allocation_top_address(
1211 return ExternalReference(
1212 isolate->heap()->OldPointerSpaceAllocationTopAddress());
1216 ExternalReference ExternalReference::old_pointer_space_allocation_limit_address(
1218 return ExternalReference(
1219 isolate->heap()->OldPointerSpaceAllocationLimitAddress());
1223 ExternalReference ExternalReference::old_data_space_allocation_top_address(
1225 return ExternalReference(
1226 isolate->heap()->OldDataSpaceAllocationTopAddress());
1230 ExternalReference ExternalReference::old_data_space_allocation_limit_address(
1232 return ExternalReference(
1233 isolate->heap()->OldDataSpaceAllocationLimitAddress());
1237 ExternalReference ExternalReference::
1238 new_space_high_promotion_mode_active_address(Isolate* isolate) {
1239 return ExternalReference(
1240 isolate->heap()->NewSpaceHighPromotionModeActiveAddress());
1244 ExternalReference ExternalReference::handle_scope_level_address(
1250 ExternalReference ExternalReference::handle_scope_next_address(
1256 ExternalReference ExternalReference::handle_scope_limit_address(
1262 ExternalReference ExternalReference::scheduled_exception_address(
1264 return ExternalReference(isolate->scheduled_exception_address());
1268 ExternalReference ExternalReference::address_of_pending_message_obj(
1270 return ExternalReference(isolate->pending_message_obj_address());
1274 ExternalReference ExternalReference::address_of_has_pending_message(
1276 return ExternalReference(isolate->has_pending_message_address());
1280 ExternalReference ExternalReference::address_of_pending_message_script(
1282 return ExternalReference(isolate->pending_message_script_address());
1286 ExternalReference ExternalReference::address_of_min_int() {
1287 return ExternalReference(reinterpret_cast<void*>(&double_constants.min_int));
1291 ExternalReference ExternalReference::address_of_one_half() {
1292 return ExternalReference(reinterpret_cast<void*>(&double_constants.one_half));
1296 ExternalReference ExternalReference::address_of_minus_one_half() {
1297 return ExternalReference(
1298 reinterpret_cast<void*>(&double_constants.minus_one_half));
1302 ExternalReference ExternalReference::address_of_minus_zero() {
1303 return ExternalReference(
1304 reinterpret_cast<void*>(&double_constants.minus_zero));
1308 ExternalReference ExternalReference::address_of_zero() {
1309 return ExternalReference(reinterpret_cast<void*>(&double_constants.zero));
1313 ExternalReference ExternalReference::address_of_uint8_max_value() {
1314 return ExternalReference(
1315 reinterpret_cast<void*>(&double_constants.uint8_max_value));
1319 ExternalReference ExternalReference::address_of_negative_infinity() {
1320 return ExternalReference(
1321 reinterpret_cast<void*>(&double_constants.negative_infinity));
1325 ExternalReference ExternalReference::address_of_canonical_non_hole_nan() {
1326 return ExternalReference(
1327 reinterpret_cast<void*>(&double_constants.canonical_non_hole_nan));
1331 ExternalReference ExternalReference::address_of_the_hole_nan() {
1332 return ExternalReference(
1333 reinterpret_cast<void*>(&double_constants.the_hole_nan));
1337 ExternalReference ExternalReference::address_of_uint32_bias() {
1338 return ExternalReference(
1339 reinterpret_cast<void*>(&double_constants.uint32_bias));
1343 #ifndef V8_INTERPRETED_REGEXP
1345 ExternalReference ExternalReference::re_check_stack_guard_state(
1348 #if V8_TARGET_ARCH_X64
1350 #elif V8_TARGET_ARCH_IA32
1352 #elif V8_TARGET_ARCH_ARM64
1354 #elif V8_TARGET_ARCH_ARM
1356 #elif V8_TARGET_ARCH_MIPS
1361 return ExternalReference(Redirect(isolate,
function));
1365 ExternalReference ExternalReference::re_grow_stack(Isolate* isolate) {
1366 return ExternalReference(
1370 ExternalReference ExternalReference::re_case_insensitive_compare_uc16(
1372 return ExternalReference(Redirect(
1378 ExternalReference ExternalReference::re_word_character_map() {
1379 return ExternalReference(
1383 ExternalReference ExternalReference::address_of_static_offsets_vector(
1385 return ExternalReference(
1386 reinterpret_cast<Address>(isolate->jsregexp_static_offsets_vector()));
1389 ExternalReference ExternalReference::address_of_regexp_stack_memory_address(
1391 return ExternalReference(
1392 isolate->regexp_stack()->memory_address());
1395 ExternalReference ExternalReference::address_of_regexp_stack_memory_size(
1397 return ExternalReference(isolate->regexp_stack()->memory_size_address());
1400 #endif // V8_INTERPRETED_REGEXP
1403 ExternalReference ExternalReference::math_log_double_function(
1405 typedef double (*d2d)(
double x);
1406 return ExternalReference(Redirect(isolate,
1412 ExternalReference ExternalReference::math_exp_constants(
int constant_index) {
1413 ASSERT(math_exp_data_initialized);
1414 return ExternalReference(
1415 reinterpret_cast<void*>(math_exp_constants_array + constant_index));
1419 ExternalReference ExternalReference::math_exp_log_table() {
1420 ASSERT(math_exp_data_initialized);
1421 return ExternalReference(reinterpret_cast<void*>(math_exp_log_table_array));
1425 ExternalReference ExternalReference::page_flags(Page* page) {
1426 return ExternalReference(reinterpret_cast<Address>(page) +
1431 ExternalReference ExternalReference::ForDeoptEntry(
Address entry) {
1432 return ExternalReference(entry);
1437 int y_int =
static_cast<int>(y);
1457 double m = (y < 0) ? 1 / x : x;
1458 unsigned n = (y < 0) ? -y : y;
1461 if ((n & 1) != 0) p *= m;
1463 if ((n & 2) != 0) p *= m;
1472 #if defined(__MINGW64_VERSION_MAJOR) && \
1473 (!defined(__MINGW64_VERSION_RC) || __MINGW64_VERSION_RC < 1)
1478 if (std::modf(y, &f) != 0.0) {
1484 int y_int =
static_cast<int>(y);
1486 return std::ldexp(1.0, y_int);
1496 return std::pow(x, y);
1500 ExternalReference ExternalReference::power_double_double_function(
1502 return ExternalReference(Redirect(isolate,
1504 BUILTIN_FP_FP_CALL));
1508 ExternalReference ExternalReference::power_double_int_function(
1510 return ExternalReference(Redirect(isolate,
1512 BUILTIN_FP_INT_CALL));
1520 case Token::EQ_STRICT:
return (op1 == op2);
1521 case Token::NE:
return (op1 != op2);
1522 case Token::LT:
return (op1 < op2);
1523 case Token::GT:
return (op1 > op2);
1524 case Token::LTE:
return (op1 <= op2);
1525 case Token::GTE:
return (op1 >= op2);
1533 ExternalReference ExternalReference::mod_two_doubles_operation(
1535 return ExternalReference(Redirect(isolate,
1537 BUILTIN_FP_FP_CALL));
1541 #ifdef ENABLE_DEBUGGER_SUPPORT
1542 ExternalReference ExternalReference::debug_break(Isolate* isolate) {
1543 return ExternalReference(Redirect(isolate,
FUNCTION_ADDR(Debug_Break)));
1547 ExternalReference ExternalReference::debug_step_in_fp_address(
1549 return ExternalReference(isolate->debug()->step_in_fp_addr());
1554 void PositionsRecorder::RecordPosition(
int pos) {
1555 ASSERT(pos != RelocInfo::kNoPosition);
1557 state_.current_position = pos;
1558 #ifdef ENABLE_GDB_JIT_INTERFACE
1559 if (gdbjit_lineinfo_ !=
NULL) {
1560 gdbjit_lineinfo_->SetPosition(assembler_->pc_offset(), pos,
false);
1564 CodeLinePosInfoAddPositionEvent(jit_handler_data_,
1565 assembler_->pc_offset(),
1570 void PositionsRecorder::RecordStatementPosition(
int pos) {
1571 ASSERT(pos != RelocInfo::kNoPosition);
1573 state_.current_statement_position = pos;
1574 #ifdef ENABLE_GDB_JIT_INTERFACE
1575 if (gdbjit_lineinfo_ !=
NULL) {
1576 gdbjit_lineinfo_->SetPosition(assembler_->pc_offset(), pos,
true);
1580 CodeLinePosInfoAddStatementPositionEvent(
1582 assembler_->pc_offset(),
1587 bool PositionsRecorder::WriteRecordedPositions() {
1588 bool written =
false;
1592 if (state_.current_statement_position != state_.written_statement_position) {
1593 EnsureSpace ensure_space(assembler_);
1594 assembler_->RecordRelocInfo(RelocInfo::STATEMENT_POSITION,
1595 state_.current_statement_position);
1596 state_.written_statement_position = state_.current_statement_position;
1602 if (state_.current_position != state_.written_position &&
1603 state_.current_position != state_.written_statement_position) {
1604 EnsureSpace ensure_space(assembler_);
1605 assembler_->RecordRelocInfo(RelocInfo::POSITION, state_.current_position);
1606 state_.written_position = state_.current_position;
1616 ASSERT(d <= -2 || 2 <= d);
1617 const uint32_t two31 = 0x80000000;
1618 uint32_t ad =
Abs(d);
1619 uint32_t t = two31 + (uint32_t(d) >> 31);
1620 uint32_t anc = t - 1 - t % ad;
1622 uint32_t
q1 = two31 / anc;
1623 uint32_t
r1 = two31 - q1 * anc;
1624 uint32_t
q2 = two31 / ad;
1625 uint32_t
r2 = two31 - q2 * ad;
1642 }
while (q1 < delta || (q1 == delta && r1 == 0));
1644 multiplier_ = (d < 0) ? -mul : mul;
RelocIterator(Code *code, int mode_mask=-1)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
const int kPCJumpExtraTag
Isolate * isolate() const
#define CHECK_EQ(expected, value)
const int kLastChunkTagBits
void PrintF(const char *format,...)
static void EnterExternal(Isolate *isolate)
const int kNonstatementPositionTag
static void ComputeOutputFrames(Deoptimizer *deoptimizer)
static HeapObject * cast(Object *obj)
static bool IsCompareOp(Value op)
double power_helper(double x, double y)
double fast_sqrt(double input)
kSerializedDataOffset Object
TypeImpl< ZoneTypeConfig > Type
static const int kMinimalBufferSize
RandomNumberGenerator * random_number_generator()
#define ASSERT(condition)
static void StoreBufferOverflow(Isolate *isolate)
static bool IsSafeForSnapshot(CpuFeature f)
static const int kFlagsOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including on console Map counters to a file Enable debugger compile events enable GDBJIT enable GDBJIT interface for all code objects dump only objects containing this substring stress the GC compactor to flush out pretty print source code print source AST function name where to insert a breakpoint print scopes for builtins trace contexts operations print stuff during garbage collection report code statistics after GC report handles after GC trace cache state transitions print interface inference details prints when objects are turned into dictionaries report heap spill statistics along with trace isolate state changes trace regexp bytecode execution Minimal Log all events to the log file Log API events to the log file Log heap samples on garbage collection for the hp2ps tool log positions Log suspect operations Used with turns on browser compatible mode for profiling v8 log
const int kLocatableTypeTagBits
static void MarkCodeAsExecuted(byte *sequence, Isolate *isolate)
PredictableCodeSizeScope(AssemblerBase *assembler, int expected_size)
bool is_intn(int64_t x, unsigned n)
static void MakeCodeAgeSequenceYoung(byte *sequence, Isolate *isolate)
const int kStatementPositionTag
const uint64_t kHoleNanInt64
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame)
const int kSmallPCDeltaBits
byte * instruction_start()
static int CaseInsensitiveCompareUC16(Address byte_offset1, Address byte_offset2, size_t byte_length, Isolate *isolate)
static void PerformGC(Object *result, Isolate *isolate)
AssemblerBase(Isolate *isolate, void *buffer, int buffer_size)
double power_double_double(double x, double y)
static int GetDeoptimizationId(Isolate *isolate, Address addr, BailoutType type)
void set_enabled_cpu_features(uint64_t features)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const char * Kind2String(Kind kind)
double modulo(double x, double y)
static Code * GetCodeFromTargetAddress(Address address)
double power_double_int(double x, int y)
byte * relocation_start()
double canonical_non_hole_nan
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame, int start_offset, const byte **input_start, const byte **input_end)
static Address current_limit_address(Isolate *isolate)
MultiplierAndShift(int32_t d)
static Object * GetField(Object *date, Smi *index)
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame)
static Address word_character_map_address()
const int kDataJumpExtraTag
void set_predictable_code_size(bool value)
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame)
static double nan_value()
const int kSmallPCDeltaMask
static const int kNotDeoptimizationEntry
static void LeaveExternal(Isolate *isolate)
uint64_t enabled_cpu_features() const
static void RecordWriteFromCode(HeapObject *obj, Object **slot, Isolate *isolate)
static Address GrowStack(Address stack_pointer, Address *stack_top, Isolate *isolate)
const int kLastChunkTagMask
static bool IsYoungSequence(byte *sequence)
void Print(const v8::FunctionCallbackInfo< v8::Value > &args)
~PredictableCodeSizeScope()
#define RUNTIME_ENTRY(name, nargs, ressize)
void DeleteArray(T *array)
#define LOG_CODE_EVENT(isolate, Call)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame)
static Address current_next_address(Isolate *isolate)
bool EvalComparison(Token::Value op, double op1, double op2)
static void OutOfMemory()
static Address current_level_address(Isolate *isolate)
static void DeleteExtensions(Isolate *isolate)
bool is_uintn(int64_t x, unsigned n)
static Deoptimizer * New(JSFunction *function, BailoutType type, unsigned bailout_id, Address from, int fp_to_sp_delta, Isolate *isolate)
const int kEmbeddedObjectTag
const int kVariableLengthPCJumpTopTag