v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
deoptimizer-ia32.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_IA32)
31 
32 #include "codegen.h"
33 #include "deoptimizer.h"
34 #include "full-codegen.h"
35 #include "safepoint-table.h"
36 
37 namespace v8 {
38 namespace internal {
39 
40 const int Deoptimizer::table_entry_size_ = 10;
41 
42 
45 }
46 
47 
49  Isolate* isolate = code->GetIsolate();
50  HandleScope scope(isolate);
51 
52  // Compute the size of relocation information needed for the code
53  // patching in Deoptimizer::DeoptimizeFunction.
54  int min_reloc_size = 0;
55  int prev_pc_offset = 0;
56  DeoptimizationInputData* deopt_data =
57  DeoptimizationInputData::cast(code->deoptimization_data());
58  for (int i = 0; i < deopt_data->DeoptCount(); i++) {
59  int pc_offset = deopt_data->Pc(i)->value();
60  if (pc_offset == -1) continue;
61  ASSERT_GE(pc_offset, prev_pc_offset);
62  int pc_delta = pc_offset - prev_pc_offset;
63  // We use RUNTIME_ENTRY reloc info which has a size of 2 bytes
64  // if encodable with small pc delta encoding and up to 6 bytes
65  // otherwise.
66  if (pc_delta <= RelocInfo::kMaxSmallPCDelta) {
67  min_reloc_size += 2;
68  } else {
69  min_reloc_size += 6;
70  }
71  prev_pc_offset = pc_offset;
72  }
73 
74  // If the relocation information is not big enough we create a new
75  // relocation info object that is padded with comments to make it
76  // big enough for lazy doptimization.
77  int reloc_length = code->relocation_info()->length();
78  if (min_reloc_size > reloc_length) {
79  int comment_reloc_size = RelocInfo::kMinRelocCommentSize;
80  // Padding needed.
81  int min_padding = min_reloc_size - reloc_length;
82  // Number of comments needed to take up at least that much space.
83  int additional_comments =
84  (min_padding + comment_reloc_size - 1) / comment_reloc_size;
85  // Actual padding size.
86  int padding = additional_comments * comment_reloc_size;
87  // Allocate new relocation info and copy old relocation to the end
88  // of the new relocation info array because relocation info is
89  // written and read backwards.
90  Factory* factory = isolate->factory();
91  Handle<ByteArray> new_reloc =
92  factory->NewByteArray(reloc_length + padding, TENURED);
93  memcpy(new_reloc->GetDataStartAddress() + padding,
94  code->relocation_info()->GetDataStartAddress(),
95  reloc_length);
96  // Create a relocation writer to write the comments in the padding
97  // space. Use position 0 for everything to ensure short encoding.
98  RelocInfoWriter reloc_info_writer(
99  new_reloc->GetDataStartAddress() + padding, 0);
100  intptr_t comment_string
101  = reinterpret_cast<intptr_t>(RelocInfo::kFillerCommentString);
102  RelocInfo rinfo(0, RelocInfo::COMMENT, comment_string, NULL);
103  for (int i = 0; i < additional_comments; ++i) {
104 #ifdef DEBUG
105  byte* pos_before = reloc_info_writer.pos();
106 #endif
107  reloc_info_writer.Write(&rinfo);
108  ASSERT(RelocInfo::kMinRelocCommentSize ==
109  pos_before - reloc_info_writer.pos());
110  }
111  // Replace relocation information on the code object.
112  code->set_relocation_info(*new_reloc);
113  }
114 }
115 
116 
117 void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
118  if (!function->IsOptimized()) return;
119 
120  Isolate* isolate = function->GetIsolate();
121  HandleScope scope(isolate);
122  AssertNoAllocation no_allocation;
123 
124  // Get the optimized code.
125  Code* code = function->code();
126  Address code_start_address = code->instruction_start();
127 
128  // We will overwrite the code's relocation info in-place. Relocation info
129  // is written backward. The relocation info is the payload of a byte
130  // array. Later on we will slide this to the start of the byte array and
131  // create a filler object in the remaining space.
132  ByteArray* reloc_info = code->relocation_info();
133  Address reloc_end_address = reloc_info->address() + reloc_info->Size();
134  RelocInfoWriter reloc_info_writer(reloc_end_address, code_start_address);
135 
136  // For each LLazyBailout instruction insert a call to the corresponding
137  // deoptimization entry.
138 
139  // Since the call is a relative encoding, write new
140  // reloc info. We do not need any of the existing reloc info because the
141  // existing code will not be used again (we zap it in debug builds).
142  //
143  // Emit call to lazy deoptimization at all lazy deopt points.
144  DeoptimizationInputData* deopt_data =
145  DeoptimizationInputData::cast(code->deoptimization_data());
146 #ifdef DEBUG
147  Address prev_call_address = NULL;
148 #endif
149  for (int i = 0; i < deopt_data->DeoptCount(); i++) {
150  if (deopt_data->Pc(i)->value() == -1) continue;
151  // Patch lazy deoptimization entry.
152  Address call_address = code_start_address + deopt_data->Pc(i)->value();
153  CodePatcher patcher(call_address, patch_size());
154  Address deopt_entry = GetDeoptimizationEntry(i, LAZY);
155  patcher.masm()->call(deopt_entry, RelocInfo::NONE);
156  // We use RUNTIME_ENTRY for deoptimization bailouts.
157  RelocInfo rinfo(call_address + 1, // 1 after the call opcode.
159  reinterpret_cast<intptr_t>(deopt_entry),
160  NULL);
161  reloc_info_writer.Write(&rinfo);
162  ASSERT_GE(reloc_info_writer.pos(),
163  reloc_info->address() + ByteArray::kHeaderSize);
164  ASSERT(prev_call_address == NULL ||
165  call_address >= prev_call_address + patch_size());
166  ASSERT(call_address + patch_size() <= code->instruction_end());
167 #ifdef DEBUG
168  prev_call_address = call_address;
169 #endif
170  }
171 
172  // Move the relocation info to the beginning of the byte array.
173  int new_reloc_size = reloc_end_address - reloc_info_writer.pos();
174  memmove(code->relocation_start(), reloc_info_writer.pos(), new_reloc_size);
175 
176  // The relocation info is in place, update the size.
177  reloc_info->set_length(new_reloc_size);
178 
179  // Handle the junk part after the new relocation info. We will create
180  // a non-live object in the extra space at the end of the former reloc info.
181  Address junk_address = reloc_info->address() + reloc_info->Size();
182  ASSERT(junk_address <= reloc_end_address);
183  isolate->heap()->CreateFillerObjectAt(junk_address,
184  reloc_end_address - junk_address);
185 
186  // Add the deoptimizing code to the list.
188  DeoptimizerData* data = isolate->deoptimizer_data();
189  node->set_next(data->deoptimizing_code_list_);
190  data->deoptimizing_code_list_ = node;
191 
192  // We might be in the middle of incremental marking with compaction.
193  // Tell collector to treat this code object in a special way and
194  // ignore all slots that might have been recorded on it.
195  isolate->heap()->mark_compact_collector()->InvalidateCode(code);
196 
197  // Set the code for the function to non-optimized version.
198  function->ReplaceCode(function->shared()->code());
199 
200  if (FLAG_trace_deopt) {
201  PrintF("[forced deoptimization: ");
202  function->PrintName();
203  PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function));
204  }
205 }
206 
207 
208 static const byte kJnsInstruction = 0x79;
209 static const byte kJnsOffset = 0x13;
210 static const byte kJaeInstruction = 0x73;
211 static const byte kJaeOffset = 0x07;
212 static const byte kCallInstruction = 0xe8;
213 static const byte kNopByteOne = 0x66;
214 static const byte kNopByteTwo = 0x90;
215 
216 
217 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
218  Address pc_after,
219  Code* check_code,
220  Code* replacement_code) {
221  Address call_target_address = pc_after - kIntSize;
222  ASSERT_EQ(check_code->entry(),
223  Assembler::target_address_at(call_target_address));
224  // The stack check code matches the pattern:
225  //
226  // cmp esp, <limit>
227  // jae ok
228  // call <stack guard>
229  // test eax, <loop nesting depth>
230  // ok: ...
231  //
232  // We will patch away the branch so the code is:
233  //
234  // cmp esp, <limit> ;; Not changed
235  // nop
236  // nop
237  // call <on-stack replacment>
238  // test eax, <loop nesting depth>
239  // ok:
240 
241  if (FLAG_count_based_interrupts) {
242  ASSERT_EQ(kJnsInstruction, *(call_target_address - 3));
243  ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
244  } else {
245  ASSERT_EQ(kJaeInstruction, *(call_target_address - 3));
246  ASSERT_EQ(kJaeOffset, *(call_target_address - 2));
247  }
248  ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
249  *(call_target_address - 3) = kNopByteOne;
250  *(call_target_address - 2) = kNopByteTwo;
251  Assembler::set_target_address_at(call_target_address,
252  replacement_code->entry());
253 
254  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
255  unoptimized_code, call_target_address, replacement_code);
256 }
257 
258 
259 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
260  Address pc_after,
261  Code* check_code,
262  Code* replacement_code) {
263  Address call_target_address = pc_after - kIntSize;
264  ASSERT_EQ(replacement_code->entry(),
265  Assembler::target_address_at(call_target_address));
266 
267  // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to
268  // restore the conditional branch.
269  ASSERT_EQ(kNopByteOne, *(call_target_address - 3));
270  ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
271  ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
272  if (FLAG_count_based_interrupts) {
273  *(call_target_address - 3) = kJnsInstruction;
274  *(call_target_address - 2) = kJnsOffset;
275  } else {
276  *(call_target_address - 3) = kJaeInstruction;
277  *(call_target_address - 2) = kJaeOffset;
278  }
279  Assembler::set_target_address_at(call_target_address,
280  check_code->entry());
281 
282  check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
283  unoptimized_code, call_target_address, check_code);
284 }
285 
286 
287 static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) {
288  ByteArray* translations = data->TranslationByteArray();
289  int length = data->DeoptCount();
290  for (int i = 0; i < length; i++) {
291  if (static_cast<unsigned>(data->AstId(i)->value()) == ast_id) {
292  TranslationIterator it(translations, data->TranslationIndex(i)->value());
293  int value = it.Next();
294  ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value));
295  // Read the number of frames.
296  value = it.Next();
297  if (value == 1) return i;
298  }
299  }
300  UNREACHABLE();
301  return -1;
302 }
303 
304 
305 void Deoptimizer::DoComputeOsrOutputFrame() {
306  DeoptimizationInputData* data = DeoptimizationInputData::cast(
307  optimized_code_->deoptimization_data());
308  unsigned ast_id = data->OsrAstId()->value();
309  // TODO(kasperl): This should not be the bailout_id_. It should be
310  // the ast id. Confusing.
311  ASSERT(bailout_id_ == ast_id);
312 
313  int bailout_id = LookupBailoutId(data, ast_id);
314  unsigned translation_index = data->TranslationIndex(bailout_id)->value();
315  ByteArray* translations = data->TranslationByteArray();
316 
317  TranslationIterator iterator(translations, translation_index);
318  Translation::Opcode opcode =
319  static_cast<Translation::Opcode>(iterator.Next());
320  ASSERT(Translation::BEGIN == opcode);
321  USE(opcode);
322  int count = iterator.Next();
323  iterator.Next(); // Drop JS frames count.
324  ASSERT(count == 1);
325  USE(count);
326 
327  opcode = static_cast<Translation::Opcode>(iterator.Next());
328  USE(opcode);
329  ASSERT(Translation::JS_FRAME == opcode);
330  unsigned node_id = iterator.Next();
331  USE(node_id);
332  ASSERT(node_id == ast_id);
333  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator.Next()));
334  USE(function);
335  ASSERT(function == function_);
336  unsigned height = iterator.Next();
337  unsigned height_in_bytes = height * kPointerSize;
338  USE(height_in_bytes);
339 
340  unsigned fixed_size = ComputeFixedSize(function_);
341  unsigned input_frame_size = input_->GetFrameSize();
342  ASSERT(fixed_size + height_in_bytes == input_frame_size);
343 
344  unsigned stack_slot_size = optimized_code_->stack_slots() * kPointerSize;
345  unsigned outgoing_height = data->ArgumentsStackHeight(bailout_id)->value();
346  unsigned outgoing_size = outgoing_height * kPointerSize;
347  unsigned output_frame_size = fixed_size + stack_slot_size + outgoing_size;
348  ASSERT(outgoing_size == 0); // OSR does not happen in the middle of a call.
349 
350  if (FLAG_trace_osr) {
351  PrintF("[on-stack replacement: begin 0x%08" V8PRIxPTR " ",
352  reinterpret_cast<intptr_t>(function_));
353  function_->PrintName();
354  PrintF(" => node=%u, frame=%d->%d, ebp:esp=0x%08x:0x%08x]\n",
355  ast_id,
356  input_frame_size,
357  output_frame_size,
358  input_->GetRegister(ebp.code()),
359  input_->GetRegister(esp.code()));
360  }
361 
362  // There's only one output frame in the OSR case.
363  output_count_ = 1;
364  output_ = new FrameDescription*[1];
365  output_[0] = new(output_frame_size) FrameDescription(
366  output_frame_size, function_);
367  output_[0]->SetFrameType(StackFrame::JAVA_SCRIPT);
368 
369  // Clear the incoming parameters in the optimized frame to avoid
370  // confusing the garbage collector.
371  unsigned output_offset = output_frame_size - kPointerSize;
372  int parameter_count = function_->shared()->formal_parameter_count() + 1;
373  for (int i = 0; i < parameter_count; ++i) {
374  output_[0]->SetFrameSlot(output_offset, 0);
375  output_offset -= kPointerSize;
376  }
377 
378  // Translate the incoming parameters. This may overwrite some of the
379  // incoming argument slots we've just cleared.
380  int input_offset = input_frame_size - kPointerSize;
381  bool ok = true;
382  int limit = input_offset - (parameter_count * kPointerSize);
383  while (ok && input_offset > limit) {
384  ok = DoOsrTranslateCommand(&iterator, &input_offset);
385  }
386 
387  // There are no translation commands for the caller's pc and fp, the
388  // context, and the function. Set them up explicitly.
391  i -= kPointerSize) {
392  uint32_t input_value = input_->GetFrameSlot(input_offset);
393  if (FLAG_trace_osr) {
394  const char* name = "UNKNOWN";
395  switch (i) {
397  name = "caller's pc";
398  break;
400  name = "fp";
401  break;
403  name = "context";
404  break;
406  name = "function";
407  break;
408  }
409  PrintF(" [sp + %d] <- 0x%08x ; [sp + %d] (fixed part - %s)\n",
410  output_offset,
411  input_value,
412  input_offset,
413  name);
414  }
415  output_[0]->SetFrameSlot(output_offset, input_->GetFrameSlot(input_offset));
416  input_offset -= kPointerSize;
417  output_offset -= kPointerSize;
418  }
419 
420  // All OSR stack frames are dynamically aligned to an 8-byte boundary.
421  int frame_pointer = input_->GetRegister(ebp.code());
422  if ((frame_pointer & kPointerSize) != 0) {
423  frame_pointer -= kPointerSize;
424  has_alignment_padding_ = 1;
425  }
426 
427  int32_t alignment_state = (has_alignment_padding_ == 1) ?
430  if (FLAG_trace_osr) {
431  PrintF(" [sp + %d] <- 0x%08x ; (alignment state)\n",
432  output_offset,
433  alignment_state);
434  }
435  output_[0]->SetFrameSlot(output_offset, alignment_state);
436  output_offset -= kPointerSize;
437 
438  // Translate the rest of the frame.
439  while (ok && input_offset >= 0) {
440  ok = DoOsrTranslateCommand(&iterator, &input_offset);
441  }
442 
443  // If translation of any command failed, continue using the input frame.
444  if (!ok) {
445  delete output_[0];
446  output_[0] = input_;
447  output_[0]->SetPc(reinterpret_cast<uint32_t>(from_));
448  } else {
449  // Set up the frame pointer and the context pointer.
450  output_[0]->SetRegister(ebp.code(), frame_pointer);
451  output_[0]->SetRegister(esi.code(), input_->GetRegister(esi.code()));
452 
453  unsigned pc_offset = data->OsrPcOffset()->value();
454  uint32_t pc = reinterpret_cast<uint32_t>(
455  optimized_code_->entry() + pc_offset);
456  output_[0]->SetPc(pc);
457  }
458  Code* continuation =
459  function->GetIsolate()->builtins()->builtin(Builtins::kNotifyOSR);
460  output_[0]->SetContinuation(
461  reinterpret_cast<uint32_t>(continuation->entry()));
462 
463  if (FLAG_trace_osr) {
464  PrintF("[on-stack replacement translation %s: 0x%08" V8PRIxPTR " ",
465  ok ? "finished" : "aborted",
466  reinterpret_cast<intptr_t>(function));
467  function->PrintName();
468  PrintF(" => pc=0x%0x]\n", output_[0]->GetPc());
469  }
470 }
471 
472 
473 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
474  int frame_index) {
475  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
476  unsigned height = iterator->Next();
477  unsigned height_in_bytes = height * kPointerSize;
478  if (FLAG_trace_deopt) {
479  PrintF(" translating arguments adaptor => height=%d\n", height_in_bytes);
480  }
481 
482  unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
483  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
484 
485  // Allocate and store the output frame description.
486  FrameDescription* output_frame =
487  new(output_frame_size) FrameDescription(output_frame_size, function);
488  output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
489 
490  // Arguments adaptor can not be topmost or bottommost.
491  ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
492  ASSERT(output_[frame_index] == NULL);
493  output_[frame_index] = output_frame;
494 
495  // The top address of the frame is computed from the previous
496  // frame's top and this frame's size.
497  uint32_t top_address;
498  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
499  output_frame->SetTop(top_address);
500 
501  // Compute the incoming parameter translation.
502  int parameter_count = height;
503  unsigned output_offset = output_frame_size;
504  for (int i = 0; i < parameter_count; ++i) {
505  output_offset -= kPointerSize;
506  DoTranslateCommand(iterator, frame_index, output_offset);
507  }
508 
509  // Read caller's PC from the previous frame.
510  output_offset -= kPointerSize;
511  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
512  output_frame->SetFrameSlot(output_offset, callers_pc);
513  if (FLAG_trace_deopt) {
514  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
515  top_address + output_offset, output_offset, callers_pc);
516  }
517 
518  // Read caller's FP from the previous frame, and set this frame's FP.
519  output_offset -= kPointerSize;
520  intptr_t value = output_[frame_index - 1]->GetFp();
521  output_frame->SetFrameSlot(output_offset, value);
522  intptr_t fp_value = top_address + output_offset;
523  output_frame->SetFp(fp_value);
524  if (FLAG_trace_deopt) {
525  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
526  fp_value, output_offset, value);
527  }
528 
529  // A marker value is used in place of the context.
530  output_offset -= kPointerSize;
531  intptr_t context = reinterpret_cast<intptr_t>(
533  output_frame->SetFrameSlot(output_offset, context);
534  if (FLAG_trace_deopt) {
535  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context (adaptor sentinel)\n",
536  top_address + output_offset, output_offset, context);
537  }
538 
539  // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
540  output_offset -= kPointerSize;
541  value = reinterpret_cast<intptr_t>(function);
542  output_frame->SetFrameSlot(output_offset, value);
543  if (FLAG_trace_deopt) {
544  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function\n",
545  top_address + output_offset, output_offset, value);
546  }
547 
548  // Number of incoming arguments.
549  output_offset -= kPointerSize;
550  value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
551  output_frame->SetFrameSlot(output_offset, value);
552  if (FLAG_trace_deopt) {
553  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
554  top_address + output_offset, output_offset, value, height - 1);
555  }
556 
557  ASSERT(0 == output_offset);
558 
559  Builtins* builtins = isolate_->builtins();
560  Code* adaptor_trampoline =
561  builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
562  uint32_t pc = reinterpret_cast<uint32_t>(
563  adaptor_trampoline->instruction_start() +
564  isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
565  output_frame->SetPc(pc);
566 }
567 
568 
569 void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
570  int frame_index) {
571  Builtins* builtins = isolate_->builtins();
572  Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
573  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
574  unsigned height = iterator->Next();
575  unsigned height_in_bytes = height * kPointerSize;
576  if (FLAG_trace_deopt) {
577  PrintF(" translating construct stub => height=%d\n", height_in_bytes);
578  }
579 
580  unsigned fixed_frame_size = 7 * kPointerSize;
581  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
582 
583  // Allocate and store the output frame description.
584  FrameDescription* output_frame =
585  new(output_frame_size) FrameDescription(output_frame_size, function);
586  output_frame->SetFrameType(StackFrame::CONSTRUCT);
587 
588  // Construct stub can not be topmost or bottommost.
589  ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
590  ASSERT(output_[frame_index] == NULL);
591  output_[frame_index] = output_frame;
592 
593  // The top address of the frame is computed from the previous
594  // frame's top and this frame's size.
595  uint32_t top_address;
596  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
597  output_frame->SetTop(top_address);
598 
599  // Compute the incoming parameter translation.
600  int parameter_count = height;
601  unsigned output_offset = output_frame_size;
602  for (int i = 0; i < parameter_count; ++i) {
603  output_offset -= kPointerSize;
604  DoTranslateCommand(iterator, frame_index, output_offset);
605  }
606 
607  // Read caller's PC from the previous frame.
608  output_offset -= kPointerSize;
609  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
610  output_frame->SetFrameSlot(output_offset, callers_pc);
611  if (FLAG_trace_deopt) {
612  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
613  top_address + output_offset, output_offset, callers_pc);
614  }
615 
616  // Read caller's FP from the previous frame, and set this frame's FP.
617  output_offset -= kPointerSize;
618  intptr_t value = output_[frame_index - 1]->GetFp();
619  output_frame->SetFrameSlot(output_offset, value);
620  intptr_t fp_value = top_address + output_offset;
621  output_frame->SetFp(fp_value);
622  if (FLAG_trace_deopt) {
623  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
624  fp_value, output_offset, value);
625  }
626 
627  // The context can be gotten from the previous frame.
628  output_offset -= kPointerSize;
629  value = output_[frame_index - 1]->GetContext();
630  output_frame->SetFrameSlot(output_offset, value);
631  if (FLAG_trace_deopt) {
632  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
633  top_address + output_offset, output_offset, value);
634  }
635 
636  // A marker value is used in place of the function.
637  output_offset -= kPointerSize;
638  value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
639  output_frame->SetFrameSlot(output_offset, value);
640  if (FLAG_trace_deopt) {
641  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function (construct sentinel)\n",
642  top_address + output_offset, output_offset, value);
643  }
644 
645  // The output frame reflects a JSConstructStubGeneric frame.
646  output_offset -= kPointerSize;
647  value = reinterpret_cast<intptr_t>(construct_stub);
648  output_frame->SetFrameSlot(output_offset, value);
649  if (FLAG_trace_deopt) {
650  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; code object\n",
651  top_address + output_offset, output_offset, value);
652  }
653 
654  // Number of incoming arguments.
655  output_offset -= kPointerSize;
656  value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
657  output_frame->SetFrameSlot(output_offset, value);
658  if (FLAG_trace_deopt) {
659  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
660  top_address + output_offset, output_offset, value, height - 1);
661  }
662 
663  // The newly allocated object was passed as receiver in the artificial
664  // constructor stub environment created by HEnvironment::CopyForInlining().
665  output_offset -= kPointerSize;
666  value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
667  output_frame->SetFrameSlot(output_offset, value);
668  if (FLAG_trace_deopt) {
669  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; allocated receiver\n",
670  top_address + output_offset, output_offset, value);
671  }
672 
673  ASSERT(0 == output_offset);
674 
675  uint32_t pc = reinterpret_cast<uint32_t>(
676  construct_stub->instruction_start() +
677  isolate_->heap()->construct_stub_deopt_pc_offset()->value());
678  output_frame->SetPc(pc);
679 }
680 
681 
682 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
683  int frame_index) {
684  int node_id = iterator->Next();
685  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
686  unsigned height = iterator->Next();
687  unsigned height_in_bytes = height * kPointerSize;
688  if (FLAG_trace_deopt) {
689  PrintF(" translating ");
690  function->PrintName();
691  PrintF(" => node=%d, height=%d\n", node_id, height_in_bytes);
692  }
693 
694  // The 'fixed' part of the frame consists of the incoming parameters and
695  // the part described by JavaScriptFrameConstants.
696  unsigned fixed_frame_size = ComputeFixedSize(function);
697  unsigned input_frame_size = input_->GetFrameSize();
698  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
699 
700  // Allocate and store the output frame description.
701  FrameDescription* output_frame =
702  new(output_frame_size) FrameDescription(output_frame_size, function);
703  output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
704 
705  bool is_bottommost = (0 == frame_index);
706  bool is_topmost = (output_count_ - 1 == frame_index);
707  ASSERT(frame_index >= 0 && frame_index < output_count_);
708  ASSERT(output_[frame_index] == NULL);
709  output_[frame_index] = output_frame;
710 
711  // Compute the incoming parameter translation.
712  int parameter_count = function->shared()->formal_parameter_count() + 1;
713  unsigned output_offset = output_frame_size;
714  unsigned input_offset = input_frame_size;
715 
716  unsigned alignment_state_offset =
717  input_offset - parameter_count * kPointerSize -
719  kPointerSize;
722 
723  // The top address for the bottommost output frame can be computed from
724  // the input frame pointer and the output frame's height. For all
725  // subsequent output frames, it can be computed from the previous one's
726  // top address and the current frame's size.
727  uint32_t top_address;
728  if (is_bottommost) {
729  int32_t alignment_state = input_->GetFrameSlot(alignment_state_offset);
730  has_alignment_padding_ =
731  (alignment_state == kAlignmentPaddingPushed) ? 1 : 0;
732  // 2 = context and function in the frame.
733  // If the optimized frame had alignment padding, adjust the frame pointer
734  // to point to the new position of the old frame pointer after padding
735  // is removed. Subtract 2 * kPointerSize for the context and function slots.
736  top_address = input_->GetRegister(ebp.code()) - (2 * kPointerSize) -
737  height_in_bytes + has_alignment_padding_ * kPointerSize;
738  } else {
739  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
740  }
741  output_frame->SetTop(top_address);
742 
743  for (int i = 0; i < parameter_count; ++i) {
744  output_offset -= kPointerSize;
745  DoTranslateCommand(iterator, frame_index, output_offset);
746  }
747  input_offset -= (parameter_count * kPointerSize);
748 
749  // There are no translation commands for the caller's pc and fp, the
750  // context, and the function. Synthesize their values and set them up
751  // explicitly.
752  //
753  // The caller's pc for the bottommost output frame is the same as in the
754  // input frame. For all subsequent output frames, it can be read from the
755  // previous one. This frame's pc can be computed from the non-optimized
756  // function code and AST id of the bailout.
757  output_offset -= kPointerSize;
758  input_offset -= kPointerSize;
759  intptr_t value;
760  if (is_bottommost) {
761  value = input_->GetFrameSlot(input_offset);
762  } else {
763  value = output_[frame_index - 1]->GetPc();
764  }
765  output_frame->SetFrameSlot(output_offset, value);
766  if (FLAG_trace_deopt) {
767  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
768  top_address + output_offset, output_offset, value);
769  }
770 
771  // The caller's frame pointer for the bottommost output frame is the same
772  // as in the input frame. For all subsequent output frames, it can be
773  // read from the previous one. Also compute and set this frame's frame
774  // pointer.
775  output_offset -= kPointerSize;
776  input_offset -= kPointerSize;
777  if (is_bottommost) {
778  value = input_->GetFrameSlot(input_offset);
779  } else {
780  value = output_[frame_index - 1]->GetFp();
781  }
782  output_frame->SetFrameSlot(output_offset, value);
783  intptr_t fp_value = top_address + output_offset;
784  ASSERT(!is_bottommost ||
785  (input_->GetRegister(ebp.code()) + has_alignment_padding_ * kPointerSize) ==
786  fp_value);
787  output_frame->SetFp(fp_value);
788  if (is_topmost) output_frame->SetRegister(ebp.code(), fp_value);
789  if (FLAG_trace_deopt) {
790  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
791  fp_value, output_offset, value);
792  }
793  ASSERT(!is_bottommost || !has_alignment_padding_ ||
794  (fp_value & kPointerSize) != 0);
795 
796  // For the bottommost output frame the context can be gotten from the input
797  // frame. For all subsequent output frames it can be gotten from the function
798  // so long as we don't inline functions that need local contexts.
799  output_offset -= kPointerSize;
800  input_offset -= kPointerSize;
801  if (is_bottommost) {
802  value = input_->GetFrameSlot(input_offset);
803  } else {
804  value = reinterpret_cast<uint32_t>(function->context());
805  }
806  output_frame->SetFrameSlot(output_offset, value);
807  output_frame->SetContext(value);
808  if (is_topmost) output_frame->SetRegister(esi.code(), value);
809  if (FLAG_trace_deopt) {
810  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
811  top_address + output_offset, output_offset, value);
812  }
813 
814  // The function was mentioned explicitly in the BEGIN_FRAME.
815  output_offset -= kPointerSize;
816  input_offset -= kPointerSize;
817  value = reinterpret_cast<uint32_t>(function);
818  // The function for the bottommost output frame should also agree with the
819  // input frame.
820  ASSERT(!is_bottommost || input_->GetFrameSlot(input_offset) == value);
821  output_frame->SetFrameSlot(output_offset, value);
822  if (FLAG_trace_deopt) {
823  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function\n",
824  top_address + output_offset, output_offset, value);
825  }
826 
827  // Translate the rest of the frame.
828  for (unsigned i = 0; i < height; ++i) {
829  output_offset -= kPointerSize;
830  DoTranslateCommand(iterator, frame_index, output_offset);
831  }
832  ASSERT(0 == output_offset);
833 
834  // Compute this frame's PC, state, and continuation.
835  Code* non_optimized_code = function->shared()->code();
836  FixedArray* raw_data = non_optimized_code->deoptimization_data();
837  DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data);
838  Address start = non_optimized_code->instruction_start();
839  unsigned pc_and_state = GetOutputInfo(data, node_id, function->shared());
840  unsigned pc_offset = FullCodeGenerator::PcField::decode(pc_and_state);
841  uint32_t pc_value = reinterpret_cast<uint32_t>(start + pc_offset);
842  output_frame->SetPc(pc_value);
843 
846  output_frame->SetState(Smi::FromInt(state));
847 
848  // Set the continuation for the topmost frame.
849  if (is_topmost && bailout_type_ != DEBUGGER) {
850  Builtins* builtins = isolate_->builtins();
851  Code* continuation = (bailout_type_ == EAGER)
852  ? builtins->builtin(Builtins::kNotifyDeoptimized)
853  : builtins->builtin(Builtins::kNotifyLazyDeoptimized);
854  output_frame->SetContinuation(
855  reinterpret_cast<uint32_t>(continuation->entry()));
856  }
857 }
858 
859 
860 void Deoptimizer::FillInputFrame(Address tos, JavaScriptFrame* frame) {
861  // Set the register values. The values are not important as there are no
862  // callee saved registers in JavaScript frames, so all registers are
863  // spilled. Registers ebp and esp are set to the correct values though.
864 
865  for (int i = 0; i < Register::kNumRegisters; i++) {
866  input_->SetRegister(i, i * 4);
867  }
868  input_->SetRegister(esp.code(), reinterpret_cast<intptr_t>(frame->sp()));
869  input_->SetRegister(ebp.code(), reinterpret_cast<intptr_t>(frame->fp()));
870  for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
871  input_->SetDoubleRegister(i, 0.0);
872  }
873 
874  // Fill the frame content from the actual data on the frame.
875  for (unsigned i = 0; i < input_->GetFrameSize(); i += kPointerSize) {
876  input_->SetFrameSlot(i, Memory::uint32_at(tos + i));
877  }
878 }
879 
880 
881 #define __ masm()->
882 
883 void Deoptimizer::EntryGenerator::Generate() {
884  GeneratePrologue();
885  CpuFeatures::Scope scope(SSE2);
886 
887  Isolate* isolate = masm()->isolate();
888 
889  // Save all general purpose registers before messing with them.
890  const int kNumberOfRegisters = Register::kNumRegisters;
891 
892  const int kDoubleRegsSize = kDoubleSize *
894  __ sub(esp, Immediate(kDoubleRegsSize));
895  for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; ++i) {
896  XMMRegister xmm_reg = XMMRegister::FromAllocationIndex(i);
897  int offset = i * kDoubleSize;
898  __ movdbl(Operand(esp, offset), xmm_reg);
899  }
900 
901  __ pushad();
902 
903  const int kSavedRegistersAreaSize = kNumberOfRegisters * kPointerSize +
904  kDoubleRegsSize;
905 
906  // Get the bailout id from the stack.
907  __ mov(ebx, Operand(esp, kSavedRegistersAreaSize));
908 
909  // Get the address of the location in the code object if possible
910  // and compute the fp-to-sp delta in register edx.
911  if (type() == EAGER) {
912  __ Set(ecx, Immediate(0));
913  __ lea(edx, Operand(esp, kSavedRegistersAreaSize + 1 * kPointerSize));
914  } else {
915  __ mov(ecx, Operand(esp, kSavedRegistersAreaSize + 1 * kPointerSize));
916  __ lea(edx, Operand(esp, kSavedRegistersAreaSize + 2 * kPointerSize));
917  }
918  __ sub(edx, ebp);
919  __ neg(edx);
920 
921  // Allocate a new deoptimizer object.
922  __ PrepareCallCFunction(6, eax);
924  __ mov(Operand(esp, 0 * kPointerSize), eax); // Function.
925  __ mov(Operand(esp, 1 * kPointerSize), Immediate(type())); // Bailout type.
926  __ mov(Operand(esp, 2 * kPointerSize), ebx); // Bailout id.
927  __ mov(Operand(esp, 3 * kPointerSize), ecx); // Code address or 0.
928  __ mov(Operand(esp, 4 * kPointerSize), edx); // Fp-to-sp delta.
929  __ mov(Operand(esp, 5 * kPointerSize),
930  Immediate(ExternalReference::isolate_address()));
931  {
932  AllowExternalCallThatCantCauseGC scope(masm());
933  __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
934  }
935 
936  // Preserve deoptimizer object in register eax and get the input
937  // frame descriptor pointer.
938  __ mov(ebx, Operand(eax, Deoptimizer::input_offset()));
939 
940  // Fill in the input registers.
941  for (int i = kNumberOfRegisters - 1; i >= 0; i--) {
942  int offset = (i * kPointerSize) + FrameDescription::registers_offset();
943  __ pop(Operand(ebx, offset));
944  }
945 
946  // Fill in the double input registers.
947  int double_regs_offset = FrameDescription::double_registers_offset();
948  for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; ++i) {
949  int dst_offset = i * kDoubleSize + double_regs_offset;
950  int src_offset = i * kDoubleSize;
951  __ movdbl(xmm0, Operand(esp, src_offset));
952  __ movdbl(Operand(ebx, dst_offset), xmm0);
953  }
954 
955  // Remove the bailout id and the double registers from the stack.
956  if (type() == EAGER) {
957  __ add(esp, Immediate(kDoubleRegsSize + kPointerSize));
958  } else {
959  __ add(esp, Immediate(kDoubleRegsSize + 2 * kPointerSize));
960  }
961 
962  // Compute a pointer to the unwinding limit in register ecx; that is
963  // the first stack slot not part of the input frame.
964  __ mov(ecx, Operand(ebx, FrameDescription::frame_size_offset()));
965  __ add(ecx, esp);
966 
967  // Unwind the stack down to - but not including - the unwinding
968  // limit and copy the contents of the activation frame to the input
969  // frame description.
971  Label pop_loop;
972  __ bind(&pop_loop);
973  __ pop(Operand(edx, 0));
974  __ add(edx, Immediate(sizeof(uint32_t)));
975  __ cmp(ecx, esp);
976  __ j(not_equal, &pop_loop);
977 
978  // Compute the output frame in the deoptimizer.
979  __ push(eax);
980  __ PrepareCallCFunction(1, ebx);
981  __ mov(Operand(esp, 0 * kPointerSize), eax);
982  {
983  AllowExternalCallThatCantCauseGC scope(masm());
984  __ CallCFunction(
985  ExternalReference::compute_output_frames_function(isolate), 1);
986  }
987  __ pop(eax);
988 
989  if (type() != OSR) {
990  // If frame was dynamically aligned, pop padding.
991  Label no_padding;
993  Immediate(0));
994  __ j(equal, &no_padding);
995  __ pop(ecx);
996  if (FLAG_debug_code) {
997  __ cmp(ecx, Immediate(kAlignmentZapValue));
998  __ Assert(equal, "alignment marker expected");
999  }
1000  __ bind(&no_padding);
1001  } else {
1002  // If frame needs dynamic alignment push padding.
1003  Label no_padding;
1005  Immediate(0));
1006  __ j(equal, &no_padding);
1007  __ push(Immediate(kAlignmentZapValue));
1008  __ bind(&no_padding);
1009  }
1010 
1011  // Replace the current frame with the output frames.
1012  Label outer_push_loop, inner_push_loop;
1013  // Outer loop state: eax = current FrameDescription**, edx = one past the
1014  // last FrameDescription**.
1015  __ mov(edx, Operand(eax, Deoptimizer::output_count_offset()));
1016  __ mov(eax, Operand(eax, Deoptimizer::output_offset()));
1017  __ lea(edx, Operand(eax, edx, times_4, 0));
1018  __ bind(&outer_push_loop);
1019  // Inner loop state: ebx = current FrameDescription*, ecx = loop index.
1020  __ mov(ebx, Operand(eax, 0));
1021  __ mov(ecx, Operand(ebx, FrameDescription::frame_size_offset()));
1022  __ bind(&inner_push_loop);
1023  __ sub(ecx, Immediate(sizeof(uint32_t)));
1025  __ test(ecx, ecx);
1026  __ j(not_zero, &inner_push_loop);
1027  __ add(eax, Immediate(kPointerSize));
1028  __ cmp(eax, edx);
1029  __ j(below, &outer_push_loop);
1030 
1031  // In case of OSR, we have to restore the XMM registers.
1032  if (type() == OSR) {
1033  for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; ++i) {
1034  XMMRegister xmm_reg = XMMRegister::FromAllocationIndex(i);
1035  int src_offset = i * kDoubleSize + double_regs_offset;
1036  __ movdbl(xmm_reg, Operand(ebx, src_offset));
1037  }
1038  }
1039 
1040  // Push state, pc, and continuation from the last output frame.
1041  if (type() != OSR) {
1042  __ push(Operand(ebx, FrameDescription::state_offset()));
1043  }
1044  __ push(Operand(ebx, FrameDescription::pc_offset()));
1045  __ push(Operand(ebx, FrameDescription::continuation_offset()));
1046 
1047 
1048  // Push the registers from the last output frame.
1049  for (int i = 0; i < kNumberOfRegisters; i++) {
1050  int offset = (i * kPointerSize) + FrameDescription::registers_offset();
1051  __ push(Operand(ebx, offset));
1052  }
1053 
1054  // Restore the registers from the stack.
1055  __ popad();
1056 
1057  // Return to the continuation point.
1058  __ ret(0);
1059 }
1060 
1061 
1063  // Create a sequence of deoptimization entries.
1064  Label done;
1065  for (int i = 0; i < count(); i++) {
1066  int start = masm()->pc_offset();
1067  USE(start);
1068  __ push_imm32(i);
1069  __ jmp(&done);
1070  ASSERT(masm()->pc_offset() - start == table_entry_size_);
1071  }
1072  __ bind(&done);
1073 }
1074 
1075 #undef __
1076 
1077 
1078 } } // namespace v8::internal
1079 
1080 #endif // V8_TARGET_ARCH_IA32
byte * Address
Definition: globals.h:172
Code * builtin(Name name)
Definition: builtins.h:312
static DeoptimizationOutputData * cast(Object *obj)
#define V8PRIxPTR
Definition: globals.h:204
void PrintF(const char *format,...)
Definition: v8utils.cc:40
unsigned stack_slots()
Definition: objects-inl.h:3171
static Smi * FromInt(int value)
Definition: objects-inl.h:973
void SetFrameSlot(unsigned offset, intptr_t value)
Definition: deoptimizer.h:399
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
const Register esp
Builtins * builtins()
Definition: isolate.h:909
int int32_t
Definition: unicode.cc:47
static XMMRegister FromAllocationIndex(int index)
static const int kNumAllocatableRegisters
const int kNoAlignmentPadding
Definition: frames-ia32.h:56
static const int kCallInstructionLength
#define ASSERT(condition)
Definition: checks.h:270
static void DeoptimizeFunction(JSFunction *function)
#define ASSERT_GE(v1, v2)
Definition: checks.h:273
intptr_t GetContext() const
Definition: deoptimizer.h:432
void SetFrameType(StackFrame::Type type)
Definition: deoptimizer.h:441
const int kAlignmentPaddingPushed
Definition: frames-ia32.h:57
const int kIntSize
Definition: globals.h:231
static const int kNumRegisters
Definition: assembler-arm.h:73
static int double_registers_offset()
Definition: deoptimizer.h:459
uint8_t byte
Definition: globals.h:171
const Register ebp
#define UNREACHABLE()
Definition: checks.h:50
static int output_offset()
Definition: deoptimizer.h:222
const Register eax
const int kDoubleSize
Definition: globals.h:232
const int kPointerSize
Definition: globals.h:234
const Register ecx
static void set_target_address_at(Address pc, Address target)
#define __
void SetRegister(unsigned n, intptr_t value)
Definition: deoptimizer.h:413
static unsigned decode(uint32_t value)
Definition: utils.h:272
static const int kDynamicAlignmentStateOffset
Definition: frames-ia32.h:127
const Register pc
friend class DeoptimizingCodeListNode
Definition: deoptimizer.h:345
const int kAlignmentZapValue
Definition: frames-ia32.h:58
static int GetOutputInfo(DeoptimizationOutputData *data, unsigned node_id, SharedFunctionInfo *shared)
Definition: deoptimizer.cc:486
static void EnsureRelocSpaceForLazyDeoptimization(Handle< Code > code)
uint32_t GetFrameSize() const
Definition: deoptimizer.h:369
void SetContinuation(intptr_t pc)
Definition: deoptimizer.h:438
static int output_count_offset()
Definition: deoptimizer.h:219
static const int kHeaderSize
Definition: objects.h:2233
static const int kNumAllocatableRegisters
static void RevertStackCheckCodeAt(Code *unoptimized_code, Address pc_after, Code *check_code, Code *replacement_code)
static Address target_address_at(Address pc)
intptr_t GetFrameSlot(unsigned offset)
Definition: deoptimizer.h:378
const Register ebx
static Address GetDeoptimizationEntry(int id, BailoutType type)
Definition: deoptimizer.cc:445
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
const Register esi
friend class FrameDescription
Definition: deoptimizer.h:344
void USE(T)
Definition: globals.h:303
virtual void GeneratePrologue()
static uint32_t & uint32_at(Address addr)
Definition: v8memory.h:47
#define RUNTIME_ENTRY(name, nargs, ressize)
intptr_t GetRegister(unsigned n) const
Definition: deoptimizer.h:403
const Register edx
void SetDoubleRegister(unsigned n, double value)
Definition: deoptimizer.h:418
static void PatchStackCheckCodeAt(Code *unoptimized_code, Address pc_after, Code *check_code, Code *replacement_code)
FlagType type() const
Definition: flags.cc:1358
static int has_alignment_padding_offset()
Definition: deoptimizer.h:224
static DeoptimizationInputData * cast(Object *obj)
const XMMRegister xmm0
static JSFunction * cast(Object *obj)