v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
assembler.cc
Go to the documentation of this file.
1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are
6 // met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the distribution.
14 //
15 // - Neither the name of Sun Microsystems or the names of contributors may
16 // be used to endorse or promote products derived from this software without
17 // specific prior written permission.
18 //
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
20 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
26 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
27 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
28 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 
31 // The original source code covered by the above license above has been
32 // modified significantly by Google Inc.
33 // Copyright 2012 the V8 project authors. All rights reserved.
34 
35 #include "assembler.h"
36 
37 #include <math.h> // For cos, log, pow, sin, tan, etc.
38 #include "api.h"
39 #include "builtins.h"
40 #include "counters.h"
41 #include "cpu.h"
42 #include "debug.h"
43 #include "deoptimizer.h"
44 #include "execution.h"
45 #include "ic.h"
46 #include "isolate.h"
47 #include "jsregexp.h"
48 #include "lazy-instance.h"
49 #include "platform.h"
50 #include "regexp-macro-assembler.h"
51 #include "regexp-stack.h"
52 #include "runtime.h"
53 #include "serialize.h"
54 #include "store-buffer-inl.h"
55 #include "stub-cache.h"
56 #include "token.h"
57 
58 #if V8_TARGET_ARCH_IA32
60 #elif V8_TARGET_ARCH_X64
61 #include "x64/assembler-x64-inl.h"
62 #elif V8_TARGET_ARCH_ARM
63 #include "arm/assembler-arm-inl.h"
64 #elif V8_TARGET_ARCH_MIPS
66 #else
67 #error "Unknown architecture."
68 #endif
69 
70 // Include native regexp-macro-assembler.
71 #ifndef V8_INTERPRETED_REGEXP
72 #if V8_TARGET_ARCH_IA32
74 #elif V8_TARGET_ARCH_X64
76 #elif V8_TARGET_ARCH_ARM
78 #elif V8_TARGET_ARCH_MIPS
80 #else // Unknown architecture.
81 #error "Unknown architecture."
82 #endif // Target architecture.
83 #endif // V8_INTERPRETED_REGEXP
84 
85 namespace v8 {
86 namespace internal {
87 
88 // -----------------------------------------------------------------------------
89 // Common double constants.
90 
91 struct DoubleConstant BASE_EMBEDDED {
92  double min_int;
93  double one_half;
94  double minus_zero;
95  double zero;
99  double the_hole_nan;
100 };
101 
102 static DoubleConstant double_constants;
103 
104 const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
105 
106 // -----------------------------------------------------------------------------
107 // Implementation of AssemblerBase
108 
110  : isolate_(isolate),
111  jit_cookie_(0) {
112  if (FLAG_mask_constants_with_cookie && isolate != NULL) {
113  jit_cookie_ = V8::RandomPrivate(isolate);
114  }
115 }
116 
117 
118 // -----------------------------------------------------------------------------
119 // Implementation of Label
120 
121 int Label::pos() const {
122  if (pos_ < 0) return -pos_ - 1;
123  if (pos_ > 0) return pos_ - 1;
124  UNREACHABLE();
125  return 0;
126 }
127 
128 
129 // -----------------------------------------------------------------------------
130 // Implementation of RelocInfoWriter and RelocIterator
131 //
132 // Relocation information is written backwards in memory, from high addresses
133 // towards low addresses, byte by byte. Therefore, in the encodings listed
134 // below, the first byte listed it at the highest address, and successive
135 // bytes in the record are at progressively lower addresses.
136 //
137 // Encoding
138 //
139 // The most common modes are given single-byte encodings. Also, it is
140 // easy to identify the type of reloc info and skip unwanted modes in
141 // an iteration.
142 //
143 // The encoding relies on the fact that there are fewer than 14
144 // different non-compactly encoded relocation modes.
145 //
146 // The first byte of a relocation record has a tag in its low 2 bits:
147 // Here are the record schemes, depending on the low tag and optional higher
148 // tags.
149 //
150 // Low tag:
151 // 00: embedded_object: [6-bit pc delta] 00
152 //
153 // 01: code_target: [6-bit pc delta] 01
154 //
155 // 10: short_data_record: [6-bit pc delta] 10 followed by
156 // [6-bit data delta] [2-bit data type tag]
157 //
158 // 11: long_record [2-bit high tag][4 bit middle_tag] 11
159 // followed by variable data depending on type.
160 //
161 // 2-bit data type tags, used in short_data_record and data_jump long_record:
162 // code_target_with_id: 00
163 // position: 01
164 // statement_position: 10
165 // comment: 11 (not used in short_data_record)
166 //
167 // Long record format:
168 // 4-bit middle_tag:
169 // 0000 - 1100 : Short record for RelocInfo::Mode middle_tag + 2
170 // (The middle_tag encodes rmode - RelocInfo::LAST_COMPACT_ENUM,
171 // and is between 0000 and 1100)
172 // The format is:
173 // 00 [4 bit middle_tag] 11 followed by
174 // 00 [6 bit pc delta]
175 //
176 // 1101: not used (would allow one more relocation mode to be added)
177 // 1110: long_data_record
178 // The format is: [2-bit data_type_tag] 1110 11
179 // signed intptr_t, lowest byte written first
180 // (except data_type code_target_with_id, which
181 // is followed by a signed int, not intptr_t.)
182 //
183 // 1111: long_pc_jump
184 // The format is:
185 // pc-jump: 00 1111 11,
186 // 00 [6 bits pc delta]
187 // or
188 // pc-jump (variable length):
189 // 01 1111 11,
190 // [7 bits data] 0
191 // ...
192 // [7 bits data] 1
193 // (Bits 6..31 of pc delta, with leading zeroes
194 // dropped, and last non-zero chunk tagged with 1.)
195 
196 
197 const int kMaxRelocModes = 14;
198 
199 const int kTagBits = 2;
200 const int kTagMask = (1 << kTagBits) - 1;
201 const int kExtraTagBits = 4;
202 const int kLocatableTypeTagBits = 2;
204 
205 const int kEmbeddedObjectTag = 0;
206 const int kCodeTargetTag = 1;
207 const int kLocatableTag = 2;
208 const int kDefaultTag = 3;
209 
210 const int kPCJumpExtraTag = (1 << kExtraTagBits) - 1;
211 
213 const int kSmallPCDeltaMask = (1 << kSmallPCDeltaBits) - 1;
214 const int RelocInfo::kMaxSmallPCDelta = kSmallPCDeltaMask;
215 
217 const int kChunkBits = 7;
218 const int kChunkMask = (1 << kChunkBits) - 1;
219 const int kLastChunkTagBits = 1;
220 const int kLastChunkTagMask = 1;
221 const int kLastChunkTag = 1;
222 
223 
225 
226 const int kCodeWithIdTag = 0;
228 const int kStatementPositionTag = 2;
229 const int kCommentTag = 3;
230 
231 
232 uint32_t RelocInfoWriter::WriteVariableLengthPCJump(uint32_t pc_delta) {
233  // Return if the pc_delta can fit in kSmallPCDeltaBits bits.
234  // Otherwise write a variable length PC jump for the bits that do
235  // not fit in the kSmallPCDeltaBits bits.
236  if (is_uintn(pc_delta, kSmallPCDeltaBits)) return pc_delta;
238  uint32_t pc_jump = pc_delta >> kSmallPCDeltaBits;
239  ASSERT(pc_jump > 0);
240  // Write kChunkBits size chunks of the pc_jump.
241  for (; pc_jump > 0; pc_jump = pc_jump >> kChunkBits) {
242  byte b = pc_jump & kChunkMask;
243  *--pos_ = b << kLastChunkTagBits;
244  }
245  // Tag the last chunk so it can be identified.
246  *pos_ = *pos_ | kLastChunkTag;
247  // Return the remaining kSmallPCDeltaBits of the pc_delta.
248  return pc_delta & kSmallPCDeltaMask;
249 }
250 
251 
252 void RelocInfoWriter::WriteTaggedPC(uint32_t pc_delta, int tag) {
253  // Write a byte of tagged pc-delta, possibly preceded by var. length pc-jump.
254  pc_delta = WriteVariableLengthPCJump(pc_delta);
255  *--pos_ = pc_delta << kTagBits | tag;
256 }
257 
258 
259 void RelocInfoWriter::WriteTaggedData(intptr_t data_delta, int tag) {
260  *--pos_ = static_cast<byte>(data_delta << kLocatableTypeTagBits | tag);
261 }
262 
263 
264 void RelocInfoWriter::WriteExtraTag(int extra_tag, int top_tag) {
265  *--pos_ = static_cast<int>(top_tag << (kTagBits + kExtraTagBits) |
266  extra_tag << kTagBits |
267  kDefaultTag);
268 }
269 
270 
271 void RelocInfoWriter::WriteExtraTaggedPC(uint32_t pc_delta, int extra_tag) {
272  // Write two-byte tagged pc-delta, possibly preceded by var. length pc-jump.
273  pc_delta = WriteVariableLengthPCJump(pc_delta);
274  WriteExtraTag(extra_tag, 0);
275  *--pos_ = pc_delta;
276 }
277 
278 
279 void RelocInfoWriter::WriteExtraTaggedIntData(int data_delta, int top_tag) {
280  WriteExtraTag(kDataJumpExtraTag, top_tag);
281  for (int i = 0; i < kIntSize; i++) {
282  *--pos_ = static_cast<byte>(data_delta);
283  // Signed right shift is arithmetic shift. Tested in test-utils.cc.
284  data_delta = data_delta >> kBitsPerByte;
285  }
286 }
287 
288 void RelocInfoWriter::WriteExtraTaggedData(intptr_t data_delta, int top_tag) {
289  WriteExtraTag(kDataJumpExtraTag, top_tag);
290  for (int i = 0; i < kIntptrSize; i++) {
291  *--pos_ = static_cast<byte>(data_delta);
292  // Signed right shift is arithmetic shift. Tested in test-utils.cc.
293  data_delta = data_delta >> kBitsPerByte;
294  }
295 }
296 
297 
298 void RelocInfoWriter::Write(const RelocInfo* rinfo) {
299 #ifdef DEBUG
300  byte* begin_pos = pos_;
301 #endif
302  ASSERT(rinfo->pc() - last_pc_ >= 0);
303  ASSERT(RelocInfo::NUMBER_OF_MODES - RelocInfo::LAST_COMPACT_ENUM <=
305  // Use unsigned delta-encoding for pc.
306  uint32_t pc_delta = static_cast<uint32_t>(rinfo->pc() - last_pc_);
307  RelocInfo::Mode rmode = rinfo->rmode();
308 
309  // The two most common modes are given small tags, and usually fit in a byte.
310  if (rmode == RelocInfo::EMBEDDED_OBJECT) {
311  WriteTaggedPC(pc_delta, kEmbeddedObjectTag);
312  } else if (rmode == RelocInfo::CODE_TARGET) {
313  WriteTaggedPC(pc_delta, kCodeTargetTag);
314  ASSERT(begin_pos - pos_ <= RelocInfo::kMaxCallSize);
315  } else if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
316  // Use signed delta-encoding for id.
317  ASSERT(static_cast<int>(rinfo->data()) == rinfo->data());
318  int id_delta = static_cast<int>(rinfo->data()) - last_id_;
319  // Check if delta is small enough to fit in a tagged byte.
320  if (is_intn(id_delta, kSmallDataBits)) {
321  WriteTaggedPC(pc_delta, kLocatableTag);
322  WriteTaggedData(id_delta, kCodeWithIdTag);
323  } else {
324  // Otherwise, use costly encoding.
325  WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
326  WriteExtraTaggedIntData(id_delta, kCodeWithIdTag);
327  }
328  last_id_ = static_cast<int>(rinfo->data());
329  } else if (RelocInfo::IsPosition(rmode)) {
330  // Use signed delta-encoding for position.
331  ASSERT(static_cast<int>(rinfo->data()) == rinfo->data());
332  int pos_delta = static_cast<int>(rinfo->data()) - last_position_;
333  int pos_type_tag = (rmode == RelocInfo::POSITION) ? kNonstatementPositionTag
335  // Check if delta is small enough to fit in a tagged byte.
336  if (is_intn(pos_delta, kSmallDataBits)) {
337  WriteTaggedPC(pc_delta, kLocatableTag);
338  WriteTaggedData(pos_delta, pos_type_tag);
339  } else {
340  // Otherwise, use costly encoding.
341  WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
342  WriteExtraTaggedIntData(pos_delta, pos_type_tag);
343  }
344  last_position_ = static_cast<int>(rinfo->data());
345  } else if (RelocInfo::IsComment(rmode)) {
346  // Comments are normally not generated, so we use the costly encoding.
347  WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
348  WriteExtraTaggedData(rinfo->data(), kCommentTag);
349  ASSERT(begin_pos - pos_ >= RelocInfo::kMinRelocCommentSize);
350  } else {
351  ASSERT(rmode > RelocInfo::LAST_COMPACT_ENUM);
352  int saved_mode = rmode - RelocInfo::LAST_COMPACT_ENUM;
353  // For all other modes we simply use the mode as the extra tag.
354  // None of these modes need a data component.
355  ASSERT(saved_mode < kPCJumpExtraTag && saved_mode < kDataJumpExtraTag);
356  WriteExtraTaggedPC(pc_delta, saved_mode);
357  }
358  last_pc_ = rinfo->pc();
359 #ifdef DEBUG
360  ASSERT(begin_pos - pos_ <= kMaxSize);
361 #endif
362 }
363 
364 
365 inline int RelocIterator::AdvanceGetTag() {
366  return *--pos_ & kTagMask;
367 }
368 
369 
370 inline int RelocIterator::GetExtraTag() {
371  return (*pos_ >> kTagBits) & ((1 << kExtraTagBits) - 1);
372 }
373 
374 
375 inline int RelocIterator::GetTopTag() {
376  return *pos_ >> (kTagBits + kExtraTagBits);
377 }
378 
379 
380 inline void RelocIterator::ReadTaggedPC() {
381  rinfo_.pc_ += *pos_ >> kTagBits;
382 }
383 
384 
385 inline void RelocIterator::AdvanceReadPC() {
386  rinfo_.pc_ += *--pos_;
387 }
388 
389 
390 void RelocIterator::AdvanceReadId() {
391  int x = 0;
392  for (int i = 0; i < kIntSize; i++) {
393  x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
394  }
395  last_id_ += x;
396  rinfo_.data_ = last_id_;
397 }
398 
399 
400 void RelocIterator::AdvanceReadPosition() {
401  int x = 0;
402  for (int i = 0; i < kIntSize; i++) {
403  x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
404  }
405  last_position_ += x;
406  rinfo_.data_ = last_position_;
407 }
408 
409 
410 void RelocIterator::AdvanceReadData() {
411  intptr_t x = 0;
412  for (int i = 0; i < kIntptrSize; i++) {
413  x |= static_cast<intptr_t>(*--pos_) << i * kBitsPerByte;
414  }
415  rinfo_.data_ = x;
416 }
417 
418 
419 void RelocIterator::AdvanceReadVariableLengthPCJump() {
420  // Read the 32-kSmallPCDeltaBits most significant bits of the
421  // pc jump in kChunkBits bit chunks and shift them into place.
422  // Stop when the last chunk is encountered.
423  uint32_t pc_jump = 0;
424  for (int i = 0; i < kIntSize; i++) {
425  byte pc_jump_part = *--pos_;
426  pc_jump |= (pc_jump_part >> kLastChunkTagBits) << i * kChunkBits;
427  if ((pc_jump_part & kLastChunkTagMask) == 1) break;
428  }
429  // The least significant kSmallPCDeltaBits bits will be added
430  // later.
431  rinfo_.pc_ += pc_jump << kSmallPCDeltaBits;
432 }
433 
434 
435 inline int RelocIterator::GetLocatableTypeTag() {
436  return *pos_ & ((1 << kLocatableTypeTagBits) - 1);
437 }
438 
439 
440 inline void RelocIterator::ReadTaggedId() {
441  int8_t signed_b = *pos_;
442  // Signed right shift is arithmetic shift. Tested in test-utils.cc.
443  last_id_ += signed_b >> kLocatableTypeTagBits;
444  rinfo_.data_ = last_id_;
445 }
446 
447 
448 inline void RelocIterator::ReadTaggedPosition() {
449  int8_t signed_b = *pos_;
450  // Signed right shift is arithmetic shift. Tested in test-utils.cc.
451  last_position_ += signed_b >> kLocatableTypeTagBits;
452  rinfo_.data_ = last_position_;
453 }
454 
455 
456 static inline RelocInfo::Mode GetPositionModeFromTag(int tag) {
458  tag == kStatementPositionTag);
459  return (tag == kNonstatementPositionTag) ?
460  RelocInfo::POSITION :
461  RelocInfo::STATEMENT_POSITION;
462 }
463 
464 
466  ASSERT(!done());
467  // Basically, do the opposite of RelocInfoWriter::Write.
468  // Reading of data is as far as possible avoided for unwanted modes,
469  // but we must always update the pc.
470  //
471  // We exit this loop by returning when we find a mode we want.
472  while (pos_ > end_) {
473  int tag = AdvanceGetTag();
474  if (tag == kEmbeddedObjectTag) {
475  ReadTaggedPC();
476  if (SetMode(RelocInfo::EMBEDDED_OBJECT)) return;
477  } else if (tag == kCodeTargetTag) {
478  ReadTaggedPC();
479  if (SetMode(RelocInfo::CODE_TARGET)) return;
480  } else if (tag == kLocatableTag) {
481  ReadTaggedPC();
482  Advance();
483  int locatable_tag = GetLocatableTypeTag();
484  if (locatable_tag == kCodeWithIdTag) {
485  if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
486  ReadTaggedId();
487  return;
488  }
489  } else {
490  // Compact encoding is never used for comments,
491  // so it must be a position.
492  ASSERT(locatable_tag == kNonstatementPositionTag ||
493  locatable_tag == kStatementPositionTag);
494  if (mode_mask_ & RelocInfo::kPositionMask) {
495  ReadTaggedPosition();
496  if (SetMode(GetPositionModeFromTag(locatable_tag))) return;
497  }
498  }
499  } else {
500  ASSERT(tag == kDefaultTag);
501  int extra_tag = GetExtraTag();
502  if (extra_tag == kPCJumpExtraTag) {
503  int top_tag = GetTopTag();
504  if (top_tag == kVariableLengthPCJumpTopTag) {
505  AdvanceReadVariableLengthPCJump();
506  } else {
507  AdvanceReadPC();
508  }
509  } else if (extra_tag == kDataJumpExtraTag) {
510  int locatable_tag = GetTopTag();
511  if (locatable_tag == kCodeWithIdTag) {
512  if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
513  AdvanceReadId();
514  return;
515  }
516  Advance(kIntSize);
517  } else if (locatable_tag != kCommentTag) {
518  ASSERT(locatable_tag == kNonstatementPositionTag ||
519  locatable_tag == kStatementPositionTag);
520  if (mode_mask_ & RelocInfo::kPositionMask) {
521  AdvanceReadPosition();
522  if (SetMode(GetPositionModeFromTag(locatable_tag))) return;
523  } else {
524  Advance(kIntSize);
525  }
526  } else {
527  ASSERT(locatable_tag == kCommentTag);
528  if (SetMode(RelocInfo::COMMENT)) {
529  AdvanceReadData();
530  return;
531  }
532  Advance(kIntptrSize);
533  }
534  } else {
535  AdvanceReadPC();
536  int rmode = extra_tag + RelocInfo::LAST_COMPACT_ENUM;
537  if (SetMode(static_cast<RelocInfo::Mode>(rmode))) return;
538  }
539  }
540  }
541  done_ = true;
542 }
543 
544 
545 RelocIterator::RelocIterator(Code* code, int mode_mask) {
546  rinfo_.host_ = code;
547  rinfo_.pc_ = code->instruction_start();
548  rinfo_.data_ = 0;
549  // Relocation info is read backwards.
550  pos_ = code->relocation_start() + code->relocation_size();
551  end_ = code->relocation_start();
552  done_ = false;
553  mode_mask_ = mode_mask;
554  last_id_ = 0;
555  last_position_ = 0;
556  if (mode_mask_ == 0) pos_ = end_;
557  next();
558 }
559 
560 
561 RelocIterator::RelocIterator(const CodeDesc& desc, int mode_mask) {
562  rinfo_.pc_ = desc.buffer;
563  rinfo_.data_ = 0;
564  // Relocation info is read backwards.
565  pos_ = desc.buffer + desc.buffer_size;
566  end_ = pos_ - desc.reloc_size;
567  done_ = false;
568  mode_mask_ = mode_mask;
569  last_id_ = 0;
570  last_position_ = 0;
571  if (mode_mask_ == 0) pos_ = end_;
572  next();
573 }
574 
575 
576 // -----------------------------------------------------------------------------
577 // Implementation of RelocInfo
578 
579 
580 #ifdef ENABLE_DISASSEMBLER
581 const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
582  switch (rmode) {
583  case RelocInfo::NONE:
584  return "no reloc";
585  case RelocInfo::EMBEDDED_OBJECT:
586  return "embedded object";
587  case RelocInfo::CONSTRUCT_CALL:
588  return "code target (js construct call)";
589  case RelocInfo::CODE_TARGET_CONTEXT:
590  return "code target (context)";
592 #ifndef ENABLE_DEBUGGER_SUPPORT
593  UNREACHABLE();
594 #endif
595  return "debug break";
596  case RelocInfo::CODE_TARGET:
597  return "code target";
598  case RelocInfo::CODE_TARGET_WITH_ID:
599  return "code target with id";
600  case RelocInfo::GLOBAL_PROPERTY_CELL:
601  return "global property cell";
603  return "runtime entry";
604  case RelocInfo::JS_RETURN:
605  return "js return";
606  case RelocInfo::COMMENT:
607  return "comment";
608  case RelocInfo::POSITION:
609  return "position";
610  case RelocInfo::STATEMENT_POSITION:
611  return "statement position";
612  case RelocInfo::EXTERNAL_REFERENCE:
613  return "external reference";
614  case RelocInfo::INTERNAL_REFERENCE:
615  return "internal reference";
616  case RelocInfo::DEBUG_BREAK_SLOT:
617 #ifndef ENABLE_DEBUGGER_SUPPORT
618  UNREACHABLE();
619 #endif
620  return "debug break slot";
621  case RelocInfo::NUMBER_OF_MODES:
622  UNREACHABLE();
623  return "number_of_modes";
624  }
625  return "unknown relocation type";
626 }
627 
628 
629 void RelocInfo::Print(FILE* out) {
630  PrintF(out, "%p %s", pc_, RelocModeName(rmode_));
631  if (IsComment(rmode_)) {
632  PrintF(out, " (%s)", reinterpret_cast<char*>(data_));
633  } else if (rmode_ == EMBEDDED_OBJECT) {
634  PrintF(out, " (");
635  target_object()->ShortPrint(out);
636  PrintF(out, ")");
637  } else if (rmode_ == EXTERNAL_REFERENCE) {
638  ExternalReferenceEncoder ref_encoder;
639  PrintF(out, " (%s) (%p)",
640  ref_encoder.NameOfAddress(*target_reference_address()),
641  *target_reference_address());
642  } else if (IsCodeTarget(rmode_)) {
643  Code* code = Code::GetCodeFromTargetAddress(target_address());
644  PrintF(out, " (%s) (%p)", Code::Kind2String(code->kind()),
645  target_address());
646  if (rmode_ == CODE_TARGET_WITH_ID) {
647  PrintF(" (id=%d)", static_cast<int>(data_));
648  }
649  } else if (IsPosition(rmode_)) {
650  PrintF(out, " (%" V8_PTR_PREFIX "d)", data());
651  } else if (rmode_ == RelocInfo::RUNTIME_ENTRY &&
652  Isolate::Current()->deoptimizer_data() != NULL) {
653  // Depotimization bailouts are stored as runtime entries.
655  target_address(), Deoptimizer::EAGER);
657  PrintF(out, " (deoptimization bailout %d)", id);
658  }
659  }
660 
661  PrintF(out, "\n");
662 }
663 #endif // ENABLE_DISASSEMBLER
664 
665 
666 #ifdef DEBUG
667 void RelocInfo::Verify() {
668  switch (rmode_) {
669  case EMBEDDED_OBJECT:
670  Object::VerifyPointer(target_object());
671  break;
672  case GLOBAL_PROPERTY_CELL:
673  Object::VerifyPointer(target_cell());
674  break;
675  case DEBUG_BREAK:
676 #ifndef ENABLE_DEBUGGER_SUPPORT
677  UNREACHABLE();
678  break;
679 #endif
680  case CONSTRUCT_CALL:
681  case CODE_TARGET_CONTEXT:
682  case CODE_TARGET_WITH_ID:
683  case CODE_TARGET: {
684  // convert inline target address to code object
685  Address addr = target_address();
686  ASSERT(addr != NULL);
687  // Check that we can find the right code object.
688  Code* code = Code::GetCodeFromTargetAddress(addr);
689  Object* found = HEAP->FindCodeObject(addr);
690  ASSERT(found->IsCode());
691  ASSERT(code->address() == HeapObject::cast(found)->address());
692  break;
693  }
694  case RUNTIME_ENTRY:
695  case JS_RETURN:
696  case COMMENT:
697  case POSITION:
698  case STATEMENT_POSITION:
699  case EXTERNAL_REFERENCE:
700  case INTERNAL_REFERENCE:
701  case DEBUG_BREAK_SLOT:
702  case NONE:
703  break;
704  case NUMBER_OF_MODES:
705  UNREACHABLE();
706  break;
707  }
708 }
709 #endif // DEBUG
710 
711 
712 // -----------------------------------------------------------------------------
713 // Implementation of ExternalReference
714 
715 void ExternalReference::SetUp() {
716  double_constants.min_int = kMinInt;
717  double_constants.one_half = 0.5;
718  double_constants.minus_zero = -0.0;
719  double_constants.uint8_max_value = 255;
720  double_constants.zero = 0.0;
721  double_constants.canonical_non_hole_nan = OS::nan_value();
722  double_constants.the_hole_nan = BitCast<double>(kHoleNanInt64);
723  double_constants.negative_infinity = -V8_INFINITY;
724 }
725 
726 
727 ExternalReference::ExternalReference(Builtins::CFunctionId id, Isolate* isolate)
728  : address_(Redirect(isolate, Builtins::c_function_address(id))) {}
729 
730 
731 ExternalReference::ExternalReference(
732  ApiFunction* fun,
733  Type type = ExternalReference::BUILTIN_CALL,
734  Isolate* isolate = NULL)
735  : address_(Redirect(isolate, fun->address(), type)) {}
736 
737 
738 ExternalReference::ExternalReference(Builtins::Name name, Isolate* isolate)
739  : address_(isolate->builtins()->builtin_address(name)) {}
740 
741 
742 ExternalReference::ExternalReference(Runtime::FunctionId id,
743  Isolate* isolate)
744  : address_(Redirect(isolate, Runtime::FunctionForId(id)->entry)) {}
745 
746 
747 ExternalReference::ExternalReference(const Runtime::Function* f,
748  Isolate* isolate)
749  : address_(Redirect(isolate, f->entry)) {}
750 
751 
752 ExternalReference ExternalReference::isolate_address() {
753  return ExternalReference(Isolate::Current());
754 }
755 
756 
757 ExternalReference::ExternalReference(const IC_Utility& ic_utility,
758  Isolate* isolate)
759  : address_(Redirect(isolate, ic_utility.address())) {}
760 
761 #ifdef ENABLE_DEBUGGER_SUPPORT
762 ExternalReference::ExternalReference(const Debug_Address& debug_address,
763  Isolate* isolate)
764  : address_(debug_address.address(isolate)) {}
765 #endif
766 
767 ExternalReference::ExternalReference(StatsCounter* counter)
768  : address_(reinterpret_cast<Address>(counter->GetInternalPointer())) {}
769 
770 
771 ExternalReference::ExternalReference(Isolate::AddressId id, Isolate* isolate)
772  : address_(isolate->get_address_from_id(id)) {}
773 
774 
775 ExternalReference::ExternalReference(const SCTableReference& table_ref)
776  : address_(table_ref.address()) {}
777 
778 
779 ExternalReference ExternalReference::
780  incremental_marking_record_write_function(Isolate* isolate) {
781  return ExternalReference(Redirect(
782  isolate,
784 }
785 
786 
787 ExternalReference ExternalReference::
788  incremental_evacuation_record_write_function(Isolate* isolate) {
789  return ExternalReference(Redirect(
790  isolate,
792 }
793 
794 
795 ExternalReference ExternalReference::
796  store_buffer_overflow_function(Isolate* isolate) {
797  return ExternalReference(Redirect(
798  isolate,
800 }
801 
802 
803 ExternalReference ExternalReference::flush_icache_function(Isolate* isolate) {
804  return ExternalReference(Redirect(isolate, FUNCTION_ADDR(CPU::FlushICache)));
805 }
806 
807 
808 ExternalReference ExternalReference::perform_gc_function(Isolate* isolate) {
809  return
810  ExternalReference(Redirect(isolate, FUNCTION_ADDR(Runtime::PerformGC)));
811 }
812 
813 
814 ExternalReference ExternalReference::fill_heap_number_with_random_function(
815  Isolate* isolate) {
816  return ExternalReference(Redirect(
817  isolate,
819 }
820 
821 
822 ExternalReference ExternalReference::delete_handle_scope_extensions(
823  Isolate* isolate) {
824  return ExternalReference(Redirect(
825  isolate,
827 }
828 
829 
830 ExternalReference ExternalReference::random_uint32_function(
831  Isolate* isolate) {
832  return ExternalReference(Redirect(isolate, FUNCTION_ADDR(V8::Random)));
833 }
834 
835 
836 ExternalReference ExternalReference::get_date_field_function(
837  Isolate* isolate) {
838  return ExternalReference(Redirect(isolate, FUNCTION_ADDR(JSDate::GetField)));
839 }
840 
841 
842 ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) {
843  return ExternalReference(isolate->date_cache()->stamp_address());
844 }
845 
846 
847 ExternalReference ExternalReference::transcendental_cache_array_address(
848  Isolate* isolate) {
849  return ExternalReference(
850  isolate->transcendental_cache()->cache_array_address());
851 }
852 
853 
854 ExternalReference ExternalReference::new_deoptimizer_function(
855  Isolate* isolate) {
856  return ExternalReference(
857  Redirect(isolate, FUNCTION_ADDR(Deoptimizer::New)));
858 }
859 
860 
861 ExternalReference ExternalReference::compute_output_frames_function(
862  Isolate* isolate) {
863  return ExternalReference(
864  Redirect(isolate, FUNCTION_ADDR(Deoptimizer::ComputeOutputFrames)));
865 }
866 
867 
868 ExternalReference ExternalReference::keyed_lookup_cache_keys(Isolate* isolate) {
869  return ExternalReference(isolate->keyed_lookup_cache()->keys_address());
870 }
871 
872 
873 ExternalReference ExternalReference::keyed_lookup_cache_field_offsets(
874  Isolate* isolate) {
875  return ExternalReference(
876  isolate->keyed_lookup_cache()->field_offsets_address());
877 }
878 
879 
880 ExternalReference ExternalReference::roots_array_start(Isolate* isolate) {
881  return ExternalReference(isolate->heap()->roots_array_start());
882 }
883 
884 
885 ExternalReference ExternalReference::address_of_stack_limit(Isolate* isolate) {
886  return ExternalReference(isolate->stack_guard()->address_of_jslimit());
887 }
888 
889 
890 ExternalReference ExternalReference::address_of_real_stack_limit(
891  Isolate* isolate) {
892  return ExternalReference(isolate->stack_guard()->address_of_real_jslimit());
893 }
894 
895 
896 ExternalReference ExternalReference::address_of_regexp_stack_limit(
897  Isolate* isolate) {
898  return ExternalReference(isolate->regexp_stack()->limit_address());
899 }
900 
901 
902 ExternalReference ExternalReference::new_space_start(Isolate* isolate) {
903  return ExternalReference(isolate->heap()->NewSpaceStart());
904 }
905 
906 
907 ExternalReference ExternalReference::store_buffer_top(Isolate* isolate) {
908  return ExternalReference(isolate->heap()->store_buffer()->TopAddress());
909 }
910 
911 
912 ExternalReference ExternalReference::new_space_mask(Isolate* isolate) {
913  return ExternalReference(reinterpret_cast<Address>(
914  isolate->heap()->NewSpaceMask()));
915 }
916 
917 
918 ExternalReference ExternalReference::new_space_allocation_top_address(
919  Isolate* isolate) {
920  return ExternalReference(isolate->heap()->NewSpaceAllocationTopAddress());
921 }
922 
923 
924 ExternalReference ExternalReference::heap_always_allocate_scope_depth(
925  Isolate* isolate) {
926  Heap* heap = isolate->heap();
927  return ExternalReference(heap->always_allocate_scope_depth_address());
928 }
929 
930 
931 ExternalReference ExternalReference::new_space_allocation_limit_address(
932  Isolate* isolate) {
933  return ExternalReference(isolate->heap()->NewSpaceAllocationLimitAddress());
934 }
935 
936 
937 ExternalReference ExternalReference::handle_scope_level_address() {
938  return ExternalReference(HandleScope::current_level_address());
939 }
940 
941 
942 ExternalReference ExternalReference::handle_scope_next_address() {
943  return ExternalReference(HandleScope::current_next_address());
944 }
945 
946 
947 ExternalReference ExternalReference::handle_scope_limit_address() {
948  return ExternalReference(HandleScope::current_limit_address());
949 }
950 
951 
952 ExternalReference ExternalReference::scheduled_exception_address(
953  Isolate* isolate) {
954  return ExternalReference(isolate->scheduled_exception_address());
955 }
956 
957 
958 ExternalReference ExternalReference::address_of_pending_message_obj(
959  Isolate* isolate) {
960  return ExternalReference(isolate->pending_message_obj_address());
961 }
962 
963 
964 ExternalReference ExternalReference::address_of_has_pending_message(
965  Isolate* isolate) {
966  return ExternalReference(isolate->has_pending_message_address());
967 }
968 
969 
970 ExternalReference ExternalReference::address_of_pending_message_script(
971  Isolate* isolate) {
972  return ExternalReference(isolate->pending_message_script_address());
973 }
974 
975 
976 ExternalReference ExternalReference::address_of_min_int() {
977  return ExternalReference(reinterpret_cast<void*>(&double_constants.min_int));
978 }
979 
980 
981 ExternalReference ExternalReference::address_of_one_half() {
982  return ExternalReference(reinterpret_cast<void*>(&double_constants.one_half));
983 }
984 
985 
986 ExternalReference ExternalReference::address_of_minus_zero() {
987  return ExternalReference(
988  reinterpret_cast<void*>(&double_constants.minus_zero));
989 }
990 
991 
992 ExternalReference ExternalReference::address_of_zero() {
993  return ExternalReference(reinterpret_cast<void*>(&double_constants.zero));
994 }
995 
996 
997 ExternalReference ExternalReference::address_of_uint8_max_value() {
998  return ExternalReference(
999  reinterpret_cast<void*>(&double_constants.uint8_max_value));
1000 }
1001 
1002 
1003 ExternalReference ExternalReference::address_of_negative_infinity() {
1004  return ExternalReference(
1005  reinterpret_cast<void*>(&double_constants.negative_infinity));
1006 }
1007 
1008 
1009 ExternalReference ExternalReference::address_of_canonical_non_hole_nan() {
1010  return ExternalReference(
1011  reinterpret_cast<void*>(&double_constants.canonical_non_hole_nan));
1012 }
1013 
1014 
1015 ExternalReference ExternalReference::address_of_the_hole_nan() {
1016  return ExternalReference(
1017  reinterpret_cast<void*>(&double_constants.the_hole_nan));
1018 }
1019 
1020 
1021 #ifndef V8_INTERPRETED_REGEXP
1022 
1023 ExternalReference ExternalReference::re_check_stack_guard_state(
1024  Isolate* isolate) {
1025  Address function;
1026 #ifdef V8_TARGET_ARCH_X64
1028 #elif V8_TARGET_ARCH_IA32
1030 #elif V8_TARGET_ARCH_ARM
1032 #elif V8_TARGET_ARCH_MIPS
1034 #else
1035  UNREACHABLE();
1036 #endif
1037  return ExternalReference(Redirect(isolate, function));
1038 }
1039 
1040 ExternalReference ExternalReference::re_grow_stack(Isolate* isolate) {
1041  return ExternalReference(
1043 }
1044 
1045 ExternalReference ExternalReference::re_case_insensitive_compare_uc16(
1046  Isolate* isolate) {
1047  return ExternalReference(Redirect(
1048  isolate,
1050 }
1051 
1052 ExternalReference ExternalReference::re_word_character_map() {
1053  return ExternalReference(
1055 }
1056 
1057 ExternalReference ExternalReference::address_of_static_offsets_vector(
1058  Isolate* isolate) {
1059  return ExternalReference(
1060  OffsetsVector::static_offsets_vector_address(isolate));
1061 }
1062 
1063 ExternalReference ExternalReference::address_of_regexp_stack_memory_address(
1064  Isolate* isolate) {
1065  return ExternalReference(
1066  isolate->regexp_stack()->memory_address());
1067 }
1068 
1069 ExternalReference ExternalReference::address_of_regexp_stack_memory_size(
1070  Isolate* isolate) {
1071  return ExternalReference(isolate->regexp_stack()->memory_size_address());
1072 }
1073 
1074 #endif // V8_INTERPRETED_REGEXP
1075 
1076 
1077 static double add_two_doubles(double x, double y) {
1078  return x + y;
1079 }
1080 
1081 
1082 static double sub_two_doubles(double x, double y) {
1083  return x - y;
1084 }
1085 
1086 
1087 static double mul_two_doubles(double x, double y) {
1088  return x * y;
1089 }
1090 
1091 
1092 static double div_two_doubles(double x, double y) {
1093  return x / y;
1094 }
1095 
1096 
1097 static double mod_two_doubles(double x, double y) {
1098  return modulo(x, y);
1099 }
1100 
1101 
1102 static double math_sin_double(double x) {
1103  return sin(x);
1104 }
1105 
1106 
1107 static double math_cos_double(double x) {
1108  return cos(x);
1109 }
1110 
1111 
1112 static double math_tan_double(double x) {
1113  return tan(x);
1114 }
1115 
1116 
1117 static double math_log_double(double x) {
1118  return log(x);
1119 }
1120 
1121 
1122 ExternalReference ExternalReference::math_sin_double_function(
1123  Isolate* isolate) {
1124  return ExternalReference(Redirect(isolate,
1125  FUNCTION_ADDR(math_sin_double),
1126  BUILTIN_FP_CALL));
1127 }
1128 
1129 
1130 ExternalReference ExternalReference::math_cos_double_function(
1131  Isolate* isolate) {
1132  return ExternalReference(Redirect(isolate,
1133  FUNCTION_ADDR(math_cos_double),
1134  BUILTIN_FP_CALL));
1135 }
1136 
1137 
1138 ExternalReference ExternalReference::math_tan_double_function(
1139  Isolate* isolate) {
1140  return ExternalReference(Redirect(isolate,
1141  FUNCTION_ADDR(math_tan_double),
1142  BUILTIN_FP_CALL));
1143 }
1144 
1145 
1146 ExternalReference ExternalReference::math_log_double_function(
1147  Isolate* isolate) {
1148  return ExternalReference(Redirect(isolate,
1149  FUNCTION_ADDR(math_log_double),
1150  BUILTIN_FP_CALL));
1151 }
1152 
1153 
1154 ExternalReference ExternalReference::page_flags(Page* page) {
1155  return ExternalReference(reinterpret_cast<Address>(page) +
1157 }
1158 
1159 
1160 // Helper function to compute x^y, where y is known to be an
1161 // integer. Uses binary decomposition to limit the number of
1162 // multiplications; see the discussion in "Hacker's Delight" by Henry
1163 // S. Warren, Jr., figure 11-6, page 213.
1164 double power_double_int(double x, int y) {
1165  double m = (y < 0) ? 1 / x : x;
1166  unsigned n = (y < 0) ? -y : y;
1167  double p = 1;
1168  while (n != 0) {
1169  if ((n & 1) != 0) p *= m;
1170  m *= m;
1171  if ((n & 2) != 0) p *= m;
1172  m *= m;
1173  n >>= 2;
1174  }
1175  return p;
1176 }
1177 
1178 
1179 double power_double_double(double x, double y) {
1180 #ifdef __MINGW64_VERSION_MAJOR
1181  // MinGW64 has a custom implementation for pow. This handles certain
1182  // special cases that are different.
1183  if ((x == 0.0 || isinf(x)) && isfinite(y)) {
1184  double f;
1185  if (modf(y, &f) != 0.0) return ((x == 0.0) ^ (y > 0)) ? V8_INFINITY : 0;
1186  }
1187 
1188  if (x == 2.0) {
1189  int y_int = static_cast<int>(y);
1190  if (y == y_int) return ldexp(1.0, y_int);
1191  }
1192 #endif
1193 
1194  // The checks for special cases can be dropped in ia32 because it has already
1195  // been done in generated code before bailing out here.
1196  if (isnan(y) || ((x == 1 || x == -1) && isinf(y))) return OS::nan_value();
1197  return pow(x, y);
1198 }
1199 
1200 
1201 ExternalReference ExternalReference::power_double_double_function(
1202  Isolate* isolate) {
1203  return ExternalReference(Redirect(isolate,
1205  BUILTIN_FP_FP_CALL));
1206 }
1207 
1208 
1209 ExternalReference ExternalReference::power_double_int_function(
1210  Isolate* isolate) {
1211  return ExternalReference(Redirect(isolate,
1213  BUILTIN_FP_INT_CALL));
1214 }
1215 
1216 
1217 static int native_compare_doubles(double y, double x) {
1218  if (x == y) return EQUAL;
1219  return x < y ? LESS : GREATER;
1220 }
1221 
1222 
1223 bool EvalComparison(Token::Value op, double op1, double op2) {
1225  switch (op) {
1226  case Token::EQ:
1227  case Token::EQ_STRICT: return (op1 == op2);
1228  case Token::NE: return (op1 != op2);
1229  case Token::LT: return (op1 < op2);
1230  case Token::GT: return (op1 > op2);
1231  case Token::LTE: return (op1 <= op2);
1232  case Token::GTE: return (op1 >= op2);
1233  default:
1234  UNREACHABLE();
1235  return false;
1236  }
1237 }
1238 
1239 
1240 ExternalReference ExternalReference::double_fp_operation(
1241  Token::Value operation, Isolate* isolate) {
1242  typedef double BinaryFPOperation(double x, double y);
1243  BinaryFPOperation* function = NULL;
1244  switch (operation) {
1245  case Token::ADD:
1246  function = &add_two_doubles;
1247  break;
1248  case Token::SUB:
1249  function = &sub_two_doubles;
1250  break;
1251  case Token::MUL:
1252  function = &mul_two_doubles;
1253  break;
1254  case Token::DIV:
1255  function = &div_two_doubles;
1256  break;
1257  case Token::MOD:
1258  function = &mod_two_doubles;
1259  break;
1260  default:
1261  UNREACHABLE();
1262  }
1263  return ExternalReference(Redirect(isolate,
1264  FUNCTION_ADDR(function),
1265  BUILTIN_FP_FP_CALL));
1266 }
1267 
1268 
1269 ExternalReference ExternalReference::compare_doubles(Isolate* isolate) {
1270  return ExternalReference(Redirect(isolate,
1271  FUNCTION_ADDR(native_compare_doubles),
1272  BUILTIN_COMPARE_CALL));
1273 }
1274 
1275 
1276 #ifdef ENABLE_DEBUGGER_SUPPORT
1277 ExternalReference ExternalReference::debug_break(Isolate* isolate) {
1278  return ExternalReference(Redirect(isolate, FUNCTION_ADDR(Debug_Break)));
1279 }
1280 
1281 
1282 ExternalReference ExternalReference::debug_step_in_fp_address(
1283  Isolate* isolate) {
1284  return ExternalReference(isolate->debug()->step_in_fp_addr());
1285 }
1286 #endif
1287 
1288 
1289 void PositionsRecorder::RecordPosition(int pos) {
1290  ASSERT(pos != RelocInfo::kNoPosition);
1291  ASSERT(pos >= 0);
1292  state_.current_position = pos;
1293 #ifdef ENABLE_GDB_JIT_INTERFACE
1294  if (gdbjit_lineinfo_ != NULL) {
1295  gdbjit_lineinfo_->SetPosition(assembler_->pc_offset(), pos, false);
1296  }
1297 #endif
1298 }
1299 
1300 
1301 void PositionsRecorder::RecordStatementPosition(int pos) {
1302  ASSERT(pos != RelocInfo::kNoPosition);
1303  ASSERT(pos >= 0);
1304  state_.current_statement_position = pos;
1305 #ifdef ENABLE_GDB_JIT_INTERFACE
1306  if (gdbjit_lineinfo_ != NULL) {
1307  gdbjit_lineinfo_->SetPosition(assembler_->pc_offset(), pos, true);
1308  }
1309 #endif
1310 }
1311 
1312 
1313 bool PositionsRecorder::WriteRecordedPositions() {
1314  bool written = false;
1315 
1316  // Write the statement position if it is different from what was written last
1317  // time.
1318  if (state_.current_statement_position != state_.written_statement_position) {
1319  EnsureSpace ensure_space(assembler_);
1320  assembler_->RecordRelocInfo(RelocInfo::STATEMENT_POSITION,
1321  state_.current_statement_position);
1322  state_.written_statement_position = state_.current_statement_position;
1323  written = true;
1324  }
1325 
1326  // Write the position if it is different from what was written last time and
1327  // also different from the written statement position.
1328  if (state_.current_position != state_.written_position &&
1329  state_.current_position != state_.written_statement_position) {
1330  EnsureSpace ensure_space(assembler_);
1331  assembler_->RecordRelocInfo(RelocInfo::POSITION, state_.current_position);
1332  state_.written_position = state_.current_position;
1333  written = true;
1334  }
1335 
1336  // Return whether something was written.
1337  return written;
1338 }
1339 
1340 } } // namespace v8::internal
byte * Address
Definition: globals.h:172
RelocIterator(Code *code, int mode_mask=-1)
Definition: assembler.cc:545
const int kMinInt
Definition: globals.h:225
const int kPCJumpExtraTag
Definition: assembler.cc:210
static Address current_level_address()
Definition: handles.cc:115
const int kLastChunkTagBits
Definition: assembler.cc:219
bool is_intn(int x, int n)
Definition: assembler.h:826
void PrintF(const char *format,...)
Definition: v8utils.cc:40
const int kNonstatementPositionTag
Definition: assembler.cc:227
static void PerformGC(Object *result)
Definition: runtime.cc:13582
static void ComputeOutputFrames(Deoptimizer *deoptimizer)
Definition: deoptimizer.cc:339
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
static HeapObject * cast(Object *obj)
static bool IsCompareOp(Value op)
Definition: token.h:214
static Object * FillHeapNumberWithRandom(Object *heap_number, Context *context)
Definition: v8.cc:236
static void RecordWriteForEvacuationFromCode(HeapObject *obj, Object **slot, Isolate *isolate)
static Address current_next_address()
Definition: handles.cc:121
#define ASSERT(condition)
Definition: checks.h:270
v8::Handle< v8::Value > Print(const v8::Arguments &args)
static void StoreBufferOverflow(Isolate *isolate)
static const int kFlagsOffset
Definition: spaces.h:564
const int kTagBits
Definition: assembler.cc:199
static void RecordWriteFromCode(HeapObject *obj, Object *value, Isolate *isolate)
const int kLocatableTypeTagBits
Definition: assembler.cc:202
const int kCommentTag
Definition: assembler.cc:229
int isnan(double x)
const int kIntSize
Definition: globals.h:231
static Address current_limit_address()
Definition: handles.cc:127
#define V8_INFINITY
Definition: globals.h:32
const int kTagMask
Definition: assembler.cc:200
uint8_t byte
Definition: globals.h:171
const int kStatementPositionTag
Definition: assembler.cc:228
const uint64_t kHoleNanInt64
Definition: v8globals.h:480
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame)
#define UNREACHABLE()
Definition: checks.h:50
const int kSmallPCDeltaBits
Definition: assembler.cc:212
const int kMaxRelocModes
Definition: assembler.cc:197
const int kExtraTagBits
Definition: assembler.cc:201
byte * instruction_start()
Definition: objects-inl.h:4376
const int kSmallDataBits
Definition: assembler.cc:203
const int kChunkMask
Definition: assembler.cc:218
static int CaseInsensitiveCompareUC16(Address byte_offset1, Address byte_offset2, size_t byte_length, Isolate *isolate)
double power_double_double(double x, double y)
Definition: assembler.cc:1179
const int kChunkBits
Definition: assembler.cc:217
double modulo(double x, double y)
int isinf(double x)
static Code * GetCodeFromTargetAddress(Address address)
Definition: objects-inl.h:3380
double power_double_int(double x, int y)
Definition: assembler.cc:1164
#define V8_PTR_PREFIX
Definition: globals.h:196
const int kBitsPerByte
Definition: globals.h:251
byte * relocation_start()
Definition: objects-inl.h:4402
#define BASE_EMBEDDED
Definition: allocation.h:68
static Object * GetField(Object *date, Smi *index)
Definition: objects.cc:13076
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame)
AssemblerBase(Isolate *isolate)
Definition: assembler.cc:109
const int kDataJumpExtraTag
Definition: assembler.cc:224
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame)
static double nan_value()
const int kSmallPCDeltaMask
Definition: assembler.cc:213
static uint32_t RandomPrivate(Isolate *isolate)
Definition: v8.cc:178
const int kCodeWithIdTag
Definition: assembler.cc:226
static const int kNotDeoptimizationEntry
Definition: deoptimizer.h:230
#define HEAP
Definition: isolate.h:1408
const int kLastChunkTag
Definition: assembler.cc:221
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
static Address GrowStack(Address stack_pointer, Address *stack_top, Isolate *isolate)
const int kLastChunkTagMask
Definition: assembler.cc:220
#define RUNTIME_ENTRY(name, nargs, ressize)
static void FlushICache(void *start, size_t size)
const int kDefaultTag
Definition: assembler.cc:208
#define FUNCTION_ADDR(f)
Definition: globals.h:307
const int kLocatableTag
Definition: assembler.cc:207
static int GetDeoptimizationId(Address addr, BailoutType type)
Definition: deoptimizer.cc:466
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame)
bool EvalComparison(Token::Value op, double op1, double op2)
Definition: assembler.cc:1223
FlagType type() const
Definition: flags.cc:1358
int isfinite(double x)
const int kIntptrSize
Definition: globals.h:233
const int kCodeTargetTag
Definition: assembler.cc:206
bool is_uintn(int x, int n)
Definition: assembler.h:835
static void DeleteExtensions(Isolate *isolate)
Definition: handles.cc:99
static Deoptimizer * New(JSFunction *function, BailoutType type, unsigned bailout_id, Address from, int fp_to_sp_delta, Isolate *isolate)
Definition: deoptimizer.cc:78
const int kEmbeddedObjectTag
Definition: assembler.cc:205
const int kVariableLengthPCJumpTopTag
Definition: assembler.cc:216
static uint32_t Random(Context *context)
Definition: v8.cc:168