v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
deoptimizer-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_X64)
31 
32 #include "codegen.h"
33 #include "deoptimizer.h"
34 #include "full-codegen.h"
35 #include "safepoint-table.h"
36 
37 namespace v8 {
38 namespace internal {
39 
40 
41 const int Deoptimizer::table_entry_size_ = 10;
42 
43 
46 }
47 
48 
49 void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
50  HandleScope scope;
51  AssertNoAllocation no_allocation;
52 
53  if (!function->IsOptimized()) return;
54 
55  // Get the optimized code.
56  Code* code = function->code();
57 
58  // Invalidate the relocation information, as it will become invalid by the
59  // code patching below, and is not needed any more.
60  code->InvalidateRelocation();
61 
62  // For each LLazyBailout instruction insert a absolute call to the
63  // corresponding deoptimization entry, or a short call to an absolute
64  // jump if space is short. The absolute jumps are put in a table just
65  // before the safepoint table (space was allocated there when the Code
66  // object was created, if necessary).
67 
68  Address instruction_start = function->code()->instruction_start();
69 #ifdef DEBUG
70  Address prev_call_address = NULL;
71 #endif
72  DeoptimizationInputData* deopt_data =
73  DeoptimizationInputData::cast(code->deoptimization_data());
74  for (int i = 0; i < deopt_data->DeoptCount(); i++) {
75  if (deopt_data->Pc(i)->value() == -1) continue;
76  // Position where Call will be patched in.
77  Address call_address = instruction_start + deopt_data->Pc(i)->value();
78  // There is room enough to write a long call instruction because we pad
79  // LLazyBailout instructions with nops if necessary.
80  CodePatcher patcher(call_address, Assembler::kCallInstructionLength);
81  patcher.masm()->Call(GetDeoptimizationEntry(i, LAZY), RelocInfo::NONE);
82  ASSERT(prev_call_address == NULL ||
83  call_address >= prev_call_address + patch_size());
84  ASSERT(call_address + patch_size() <= code->instruction_end());
85 #ifdef DEBUG
86  prev_call_address = call_address;
87 #endif
88  }
89 
90  Isolate* isolate = code->GetIsolate();
91 
92  // Add the deoptimizing code to the list.
94  DeoptimizerData* data = isolate->deoptimizer_data();
95  node->set_next(data->deoptimizing_code_list_);
96  data->deoptimizing_code_list_ = node;
97 
98  // We might be in the middle of incremental marking with compaction.
99  // Tell collector to treat this code object in a special way and
100  // ignore all slots that might have been recorded on it.
101  isolate->heap()->mark_compact_collector()->InvalidateCode(code);
102 
103  // Set the code for the function to non-optimized version.
104  function->ReplaceCode(function->shared()->code());
105 
106  if (FLAG_trace_deopt) {
107  PrintF("[forced deoptimization: ");
108  function->PrintName();
109  PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
110  }
111 }
112 
113 
114 static const byte kJnsInstruction = 0x79;
115 static const byte kJnsOffset = 0x1f;
116 static const byte kJaeInstruction = 0x73;
117 static const byte kJaeOffset = 0x07;
118 static const byte kCallInstruction = 0xe8;
119 static const byte kNopByteOne = 0x66;
120 static const byte kNopByteTwo = 0x90;
121 
122 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
123  Address pc_after,
124  Code* check_code,
125  Code* replacement_code) {
126  Address call_target_address = pc_after - kIntSize;
127  ASSERT_EQ(check_code->entry(),
128  Assembler::target_address_at(call_target_address));
129  // The stack check code matches the pattern:
130  //
131  // cmp rsp, <limit>
132  // jae ok
133  // call <stack guard>
134  // test rax, <loop nesting depth>
135  // ok: ...
136  //
137  // We will patch away the branch so the code is:
138  //
139  // cmp rsp, <limit> ;; Not changed
140  // nop
141  // nop
142  // call <on-stack replacment>
143  // test rax, <loop nesting depth>
144  // ok:
145  //
146  if (FLAG_count_based_interrupts) {
147  ASSERT_EQ(kJnsInstruction, *(call_target_address - 3));
148  ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
149  } else {
150  ASSERT_EQ(kJaeInstruction, *(call_target_address - 3));
151  ASSERT_EQ(kJaeOffset, *(call_target_address - 2));
152  }
153  ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
154  *(call_target_address - 3) = kNopByteOne;
155  *(call_target_address - 2) = kNopByteTwo;
156  Assembler::set_target_address_at(call_target_address,
157  replacement_code->entry());
158 
159  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
160  unoptimized_code, call_target_address, replacement_code);
161 }
162 
163 
164 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
165  Address pc_after,
166  Code* check_code,
167  Code* replacement_code) {
168  Address call_target_address = pc_after - kIntSize;
169  ASSERT(replacement_code->entry() ==
170  Assembler::target_address_at(call_target_address));
171  // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to
172  // restore the conditional branch.
173  ASSERT_EQ(kNopByteOne, *(call_target_address - 3));
174  ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
175  ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
176  if (FLAG_count_based_interrupts) {
177  *(call_target_address - 3) = kJnsInstruction;
178  *(call_target_address - 2) = kJnsOffset;
179  } else {
180  *(call_target_address - 3) = kJaeInstruction;
181  *(call_target_address - 2) = kJaeOffset;
182  }
183  Assembler::set_target_address_at(call_target_address,
184  check_code->entry());
185 
186  check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
187  unoptimized_code, call_target_address, check_code);
188 }
189 
190 
191 static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) {
192  ByteArray* translations = data->TranslationByteArray();
193  int length = data->DeoptCount();
194  for (int i = 0; i < length; i++) {
195  if (static_cast<unsigned>(data->AstId(i)->value()) == ast_id) {
196  TranslationIterator it(translations, data->TranslationIndex(i)->value());
197  int value = it.Next();
198  ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value));
199  // Read the number of frames.
200  value = it.Next();
201  if (value == 1) return i;
202  }
203  }
204  UNREACHABLE();
205  return -1;
206 }
207 
208 
209 void Deoptimizer::DoComputeOsrOutputFrame() {
210  DeoptimizationInputData* data = DeoptimizationInputData::cast(
211  optimized_code_->deoptimization_data());
212  unsigned ast_id = data->OsrAstId()->value();
213  // TODO(kasperl): This should not be the bailout_id_. It should be
214  // the ast id. Confusing.
215  ASSERT(bailout_id_ == ast_id);
216 
217  int bailout_id = LookupBailoutId(data, ast_id);
218  unsigned translation_index = data->TranslationIndex(bailout_id)->value();
219  ByteArray* translations = data->TranslationByteArray();
220 
221  TranslationIterator iterator(translations, translation_index);
222  Translation::Opcode opcode =
223  static_cast<Translation::Opcode>(iterator.Next());
224  ASSERT(Translation::BEGIN == opcode);
225  USE(opcode);
226  int count = iterator.Next();
227  iterator.Skip(1); // Drop JS frame count.
228  ASSERT(count == 1);
229  USE(count);
230 
231  opcode = static_cast<Translation::Opcode>(iterator.Next());
232  USE(opcode);
233  ASSERT(Translation::JS_FRAME == opcode);
234  unsigned node_id = iterator.Next();
235  USE(node_id);
236  ASSERT(node_id == ast_id);
237  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator.Next()));
238  USE(function);
239  ASSERT(function == function_);
240  unsigned height = iterator.Next();
241  unsigned height_in_bytes = height * kPointerSize;
242  USE(height_in_bytes);
243 
244  unsigned fixed_size = ComputeFixedSize(function_);
245  unsigned input_frame_size = input_->GetFrameSize();
246  ASSERT(fixed_size + height_in_bytes == input_frame_size);
247 
248  unsigned stack_slot_size = optimized_code_->stack_slots() * kPointerSize;
249  unsigned outgoing_height = data->ArgumentsStackHeight(bailout_id)->value();
250  unsigned outgoing_size = outgoing_height * kPointerSize;
251  unsigned output_frame_size = fixed_size + stack_slot_size + outgoing_size;
252  ASSERT(outgoing_size == 0); // OSR does not happen in the middle of a call.
253 
254  if (FLAG_trace_osr) {
255  PrintF("[on-stack replacement: begin 0x%08" V8PRIxPTR " ",
256  reinterpret_cast<intptr_t>(function_));
257  function_->PrintName();
258  PrintF(" => node=%u, frame=%d->%d]\n",
259  ast_id,
260  input_frame_size,
261  output_frame_size);
262  }
263 
264  // There's only one output frame in the OSR case.
265  output_count_ = 1;
266  output_ = new FrameDescription*[1];
267  output_[0] = new(output_frame_size) FrameDescription(
268  output_frame_size, function_);
269  output_[0]->SetFrameType(StackFrame::JAVA_SCRIPT);
270 
271  // Clear the incoming parameters in the optimized frame to avoid
272  // confusing the garbage collector.
273  unsigned output_offset = output_frame_size - kPointerSize;
274  int parameter_count = function_->shared()->formal_parameter_count() + 1;
275  for (int i = 0; i < parameter_count; ++i) {
276  output_[0]->SetFrameSlot(output_offset, 0);
277  output_offset -= kPointerSize;
278  }
279 
280  // Translate the incoming parameters. This may overwrite some of the
281  // incoming argument slots we've just cleared.
282  int input_offset = input_frame_size - kPointerSize;
283  bool ok = true;
284  int limit = input_offset - (parameter_count * kPointerSize);
285  while (ok && input_offset > limit) {
286  ok = DoOsrTranslateCommand(&iterator, &input_offset);
287  }
288 
289  // There are no translation commands for the caller's pc and fp, the
290  // context, and the function. Set them up explicitly.
293  i -= kPointerSize) {
294  intptr_t input_value = input_->GetFrameSlot(input_offset);
295  if (FLAG_trace_osr) {
296  const char* name = "UNKNOWN";
297  switch (i) {
299  name = "caller's pc";
300  break;
302  name = "fp";
303  break;
305  name = "context";
306  break;
308  name = "function";
309  break;
310  }
311  PrintF(" [rsp + %d] <- 0x%08" V8PRIxPTR " ; [rsp + %d] "
312  "(fixed part - %s)\n",
313  output_offset,
314  input_value,
315  input_offset,
316  name);
317  }
318  output_[0]->SetFrameSlot(output_offset, input_->GetFrameSlot(input_offset));
319  input_offset -= kPointerSize;
320  output_offset -= kPointerSize;
321  }
322 
323  // Translate the rest of the frame.
324  while (ok && input_offset >= 0) {
325  ok = DoOsrTranslateCommand(&iterator, &input_offset);
326  }
327 
328  // If translation of any command failed, continue using the input frame.
329  if (!ok) {
330  delete output_[0];
331  output_[0] = input_;
332  output_[0]->SetPc(reinterpret_cast<intptr_t>(from_));
333  } else {
334  // Set up the frame pointer and the context pointer.
335  output_[0]->SetRegister(rbp.code(), input_->GetRegister(rbp.code()));
336  output_[0]->SetRegister(rsi.code(), input_->GetRegister(rsi.code()));
337 
338  unsigned pc_offset = data->OsrPcOffset()->value();
339  intptr_t pc = reinterpret_cast<intptr_t>(
340  optimized_code_->entry() + pc_offset);
341  output_[0]->SetPc(pc);
342  }
343  Code* continuation =
344  function->GetIsolate()->builtins()->builtin(Builtins::kNotifyOSR);
345  output_[0]->SetContinuation(
346  reinterpret_cast<intptr_t>(continuation->entry()));
347 
348  if (FLAG_trace_osr) {
349  PrintF("[on-stack replacement translation %s: 0x%08" V8PRIxPTR " ",
350  ok ? "finished" : "aborted",
351  reinterpret_cast<intptr_t>(function));
352  function->PrintName();
353  PrintF(" => pc=0x%0" V8PRIxPTR "]\n", output_[0]->GetPc());
354  }
355 }
356 
357 
358 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
359  int frame_index) {
360  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
361  unsigned height = iterator->Next();
362  unsigned height_in_bytes = height * kPointerSize;
363  if (FLAG_trace_deopt) {
364  PrintF(" translating arguments adaptor => height=%d\n", height_in_bytes);
365  }
366 
367  unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
368  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
369 
370  // Allocate and store the output frame description.
371  FrameDescription* output_frame =
372  new(output_frame_size) FrameDescription(output_frame_size, function);
373  output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
374 
375  // Arguments adaptor can not be topmost or bottommost.
376  ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
377  ASSERT(output_[frame_index] == NULL);
378  output_[frame_index] = output_frame;
379 
380  // The top address of the frame is computed from the previous
381  // frame's top and this frame's size.
382  intptr_t top_address;
383  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
384  output_frame->SetTop(top_address);
385 
386  // Compute the incoming parameter translation.
387  int parameter_count = height;
388  unsigned output_offset = output_frame_size;
389  for (int i = 0; i < parameter_count; ++i) {
390  output_offset -= kPointerSize;
391  DoTranslateCommand(iterator, frame_index, output_offset);
392  }
393 
394  // Read caller's PC from the previous frame.
395  output_offset -= kPointerSize;
396  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
397  output_frame->SetFrameSlot(output_offset, callers_pc);
398  if (FLAG_trace_deopt) {
399  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
400  V8PRIxPTR " ; caller's pc\n",
401  top_address + output_offset, output_offset, callers_pc);
402  }
403 
404  // Read caller's FP from the previous frame, and set this frame's FP.
405  output_offset -= kPointerSize;
406  intptr_t value = output_[frame_index - 1]->GetFp();
407  output_frame->SetFrameSlot(output_offset, value);
408  intptr_t fp_value = top_address + output_offset;
409  output_frame->SetFp(fp_value);
410  if (FLAG_trace_deopt) {
411  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
412  V8PRIxPTR " ; caller's fp\n",
413  fp_value, output_offset, value);
414  }
415 
416  // A marker value is used in place of the context.
417  output_offset -= kPointerSize;
418  intptr_t context = reinterpret_cast<intptr_t>(
420  output_frame->SetFrameSlot(output_offset, context);
421  if (FLAG_trace_deopt) {
422  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
423  V8PRIxPTR " ; context (adaptor sentinel)\n",
424  top_address + output_offset, output_offset, context);
425  }
426 
427  // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
428  output_offset -= kPointerSize;
429  value = reinterpret_cast<intptr_t>(function);
430  output_frame->SetFrameSlot(output_offset, value);
431  if (FLAG_trace_deopt) {
432  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
433  V8PRIxPTR " ; function\n",
434  top_address + output_offset, output_offset, value);
435  }
436 
437  // Number of incoming arguments.
438  output_offset -= kPointerSize;
439  value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
440  output_frame->SetFrameSlot(output_offset, value);
441  if (FLAG_trace_deopt) {
442  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
443  V8PRIxPTR " ; argc (%d)\n",
444  top_address + output_offset, output_offset, value, height - 1);
445  }
446 
447  ASSERT(0 == output_offset);
448 
449  Builtins* builtins = isolate_->builtins();
450  Code* adaptor_trampoline =
451  builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
452  intptr_t pc_value = reinterpret_cast<intptr_t>(
453  adaptor_trampoline->instruction_start() +
454  isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
455  output_frame->SetPc(pc_value);
456 }
457 
458 
459 void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
460  int frame_index) {
461  Builtins* builtins = isolate_->builtins();
462  Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
463  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
464  unsigned height = iterator->Next();
465  unsigned height_in_bytes = height * kPointerSize;
466  if (FLAG_trace_deopt) {
467  PrintF(" translating construct stub => height=%d\n", height_in_bytes);
468  }
469 
470  unsigned fixed_frame_size = 7 * kPointerSize;
471  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
472 
473  // Allocate and store the output frame description.
474  FrameDescription* output_frame =
475  new(output_frame_size) FrameDescription(output_frame_size, function);
476  output_frame->SetFrameType(StackFrame::CONSTRUCT);
477 
478  // Construct stub can not be topmost or bottommost.
479  ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
480  ASSERT(output_[frame_index] == NULL);
481  output_[frame_index] = output_frame;
482 
483  // The top address of the frame is computed from the previous
484  // frame's top and this frame's size.
485  intptr_t top_address;
486  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
487  output_frame->SetTop(top_address);
488 
489  // Compute the incoming parameter translation.
490  int parameter_count = height;
491  unsigned output_offset = output_frame_size;
492  for (int i = 0; i < parameter_count; ++i) {
493  output_offset -= kPointerSize;
494  DoTranslateCommand(iterator, frame_index, output_offset);
495  }
496 
497  // Read caller's PC from the previous frame.
498  output_offset -= kPointerSize;
499  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
500  output_frame->SetFrameSlot(output_offset, callers_pc);
501  if (FLAG_trace_deopt) {
502  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
503  V8PRIxPTR " ; caller's pc\n",
504  top_address + output_offset, output_offset, callers_pc);
505  }
506 
507  // Read caller's FP from the previous frame, and set this frame's FP.
508  output_offset -= kPointerSize;
509  intptr_t value = output_[frame_index - 1]->GetFp();
510  output_frame->SetFrameSlot(output_offset, value);
511  intptr_t fp_value = top_address + output_offset;
512  output_frame->SetFp(fp_value);
513  if (FLAG_trace_deopt) {
514  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
515  V8PRIxPTR " ; caller's fp\n",
516  fp_value, output_offset, value);
517  }
518 
519  // The context can be gotten from the previous frame.
520  output_offset -= kPointerSize;
521  value = output_[frame_index - 1]->GetContext();
522  output_frame->SetFrameSlot(output_offset, value);
523  if (FLAG_trace_deopt) {
524  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
525  V8PRIxPTR " ; context\n",
526  top_address + output_offset, output_offset, value);
527  }
528 
529  // A marker value is used in place of the function.
530  output_offset -= kPointerSize;
531  value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
532  output_frame->SetFrameSlot(output_offset, value);
533  if (FLAG_trace_deopt) {
534  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
535  V8PRIxPTR " ; function (construct sentinel)\n",
536  top_address + output_offset, output_offset, value);
537  }
538 
539  // The output frame reflects a JSConstructStubGeneric frame.
540  output_offset -= kPointerSize;
541  value = reinterpret_cast<intptr_t>(construct_stub);
542  output_frame->SetFrameSlot(output_offset, value);
543  if (FLAG_trace_deopt) {
544  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
545  V8PRIxPTR " ; code object\n",
546  top_address + output_offset, output_offset, value);
547  }
548 
549  // Number of incoming arguments.
550  output_offset -= kPointerSize;
551  value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
552  output_frame->SetFrameSlot(output_offset, value);
553  if (FLAG_trace_deopt) {
554  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
555  V8PRIxPTR " ; argc (%d)\n",
556  top_address + output_offset, output_offset, value, height - 1);
557  }
558 
559  // The newly allocated object was passed as receiver in the artificial
560  // constructor stub environment created by HEnvironment::CopyForInlining().
561  output_offset -= kPointerSize;
562  value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
563  output_frame->SetFrameSlot(output_offset, value);
564  if (FLAG_trace_deopt) {
565  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
566  V8PRIxPTR " ; allocated receiver\n",
567  top_address + output_offset, output_offset, value);
568  }
569 
570  ASSERT(0 == output_offset);
571 
572  intptr_t pc = reinterpret_cast<intptr_t>(
573  construct_stub->instruction_start() +
574  isolate_->heap()->construct_stub_deopt_pc_offset()->value());
575  output_frame->SetPc(pc);
576 }
577 
578 
579 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
580  int frame_index) {
581  int node_id = iterator->Next();
582  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
583  unsigned height = iterator->Next();
584  unsigned height_in_bytes = height * kPointerSize;
585  if (FLAG_trace_deopt) {
586  PrintF(" translating ");
587  function->PrintName();
588  PrintF(" => node=%d, height=%d\n", node_id, height_in_bytes);
589  }
590 
591  // The 'fixed' part of the frame consists of the incoming parameters and
592  // the part described by JavaScriptFrameConstants.
593  unsigned fixed_frame_size = ComputeFixedSize(function);
594  unsigned input_frame_size = input_->GetFrameSize();
595  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
596 
597  // Allocate and store the output frame description.
598  FrameDescription* output_frame =
599  new(output_frame_size) FrameDescription(output_frame_size, function);
600  output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
601 
602  bool is_bottommost = (0 == frame_index);
603  bool is_topmost = (output_count_ - 1 == frame_index);
604  ASSERT(frame_index >= 0 && frame_index < output_count_);
605  ASSERT(output_[frame_index] == NULL);
606  output_[frame_index] = output_frame;
607 
608  // The top address for the bottommost output frame can be computed from
609  // the input frame pointer and the output frame's height. For all
610  // subsequent output frames, it can be computed from the previous one's
611  // top address and the current frame's size.
612  intptr_t top_address;
613  if (is_bottommost) {
614  // 2 = context and function in the frame.
615  top_address =
616  input_->GetRegister(rbp.code()) - (2 * kPointerSize) - height_in_bytes;
617  } else {
618  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
619  }
620  output_frame->SetTop(top_address);
621 
622  // Compute the incoming parameter translation.
623  int parameter_count = function->shared()->formal_parameter_count() + 1;
624  unsigned output_offset = output_frame_size;
625  unsigned input_offset = input_frame_size;
626  for (int i = 0; i < parameter_count; ++i) {
627  output_offset -= kPointerSize;
628  DoTranslateCommand(iterator, frame_index, output_offset);
629  }
630  input_offset -= (parameter_count * kPointerSize);
631 
632  // There are no translation commands for the caller's pc and fp, the
633  // context, and the function. Synthesize their values and set them up
634  // explicitly.
635  //
636  // The caller's pc for the bottommost output frame is the same as in the
637  // input frame. For all subsequent output frames, it can be read from the
638  // previous one. This frame's pc can be computed from the non-optimized
639  // function code and AST id of the bailout.
640  output_offset -= kPointerSize;
641  input_offset -= kPointerSize;
642  intptr_t value;
643  if (is_bottommost) {
644  value = input_->GetFrameSlot(input_offset);
645  } else {
646  value = output_[frame_index - 1]->GetPc();
647  }
648  output_frame->SetFrameSlot(output_offset, value);
649  if (FLAG_trace_deopt) {
650  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
651  V8PRIxPTR " ; caller's pc\n",
652  top_address + output_offset, output_offset, value);
653  }
654 
655  // The caller's frame pointer for the bottommost output frame is the same
656  // as in the input frame. For all subsequent output frames, it can be
657  // read from the previous one. Also compute and set this frame's frame
658  // pointer.
659  output_offset -= kPointerSize;
660  input_offset -= kPointerSize;
661  if (is_bottommost) {
662  value = input_->GetFrameSlot(input_offset);
663  } else {
664  value = output_[frame_index - 1]->GetFp();
665  }
666  output_frame->SetFrameSlot(output_offset, value);
667  intptr_t fp_value = top_address + output_offset;
668  ASSERT(!is_bottommost || input_->GetRegister(rbp.code()) == fp_value);
669  output_frame->SetFp(fp_value);
670  if (is_topmost) output_frame->SetRegister(rbp.code(), fp_value);
671  if (FLAG_trace_deopt) {
672  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
673  V8PRIxPTR " ; caller's fp\n",
674  fp_value, output_offset, value);
675  }
676 
677  // For the bottommost output frame the context can be gotten from the input
678  // frame. For all subsequent output frames it can be gotten from the function
679  // so long as we don't inline functions that need local contexts.
680  output_offset -= kPointerSize;
681  input_offset -= kPointerSize;
682  if (is_bottommost) {
683  value = input_->GetFrameSlot(input_offset);
684  } else {
685  value = reinterpret_cast<intptr_t>(function->context());
686  }
687  output_frame->SetFrameSlot(output_offset, value);
688  output_frame->SetContext(value);
689  if (is_topmost) output_frame->SetRegister(rsi.code(), value);
690  if (FLAG_trace_deopt) {
691  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
692  V8PRIxPTR "; context\n",
693  top_address + output_offset, output_offset, value);
694  }
695 
696  // The function was mentioned explicitly in the BEGIN_FRAME.
697  output_offset -= kPointerSize;
698  input_offset -= kPointerSize;
699  value = reinterpret_cast<intptr_t>(function);
700  // The function for the bottommost output frame should also agree with the
701  // input frame.
702  ASSERT(!is_bottommost || input_->GetFrameSlot(input_offset) == value);
703  output_frame->SetFrameSlot(output_offset, value);
704  if (FLAG_trace_deopt) {
705  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
706  V8PRIxPTR "; function\n",
707  top_address + output_offset, output_offset, value);
708  }
709 
710  // Translate the rest of the frame.
711  for (unsigned i = 0; i < height; ++i) {
712  output_offset -= kPointerSize;
713  DoTranslateCommand(iterator, frame_index, output_offset);
714  }
715  ASSERT(0 == output_offset);
716 
717  // Compute this frame's PC, state, and continuation.
718  Code* non_optimized_code = function->shared()->code();
719  FixedArray* raw_data = non_optimized_code->deoptimization_data();
720  DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data);
721  Address start = non_optimized_code->instruction_start();
722  unsigned pc_and_state = GetOutputInfo(data, node_id, function->shared());
723  unsigned pc_offset = FullCodeGenerator::PcField::decode(pc_and_state);
724  intptr_t pc_value = reinterpret_cast<intptr_t>(start + pc_offset);
725  output_frame->SetPc(pc_value);
726 
729  output_frame->SetState(Smi::FromInt(state));
730 
731  // Set the continuation for the topmost frame.
732  if (is_topmost && bailout_type_ != DEBUGGER) {
733  Code* continuation = (bailout_type_ == EAGER)
734  ? isolate_->builtins()->builtin(Builtins::kNotifyDeoptimized)
735  : isolate_->builtins()->builtin(Builtins::kNotifyLazyDeoptimized);
736  output_frame->SetContinuation(
737  reinterpret_cast<intptr_t>(continuation->entry()));
738  }
739 }
740 
741 
742 void Deoptimizer::FillInputFrame(Address tos, JavaScriptFrame* frame) {
743  // Set the register values. The values are not important as there are no
744  // callee saved registers in JavaScript frames, so all registers are
745  // spilled. Registers rbp and rsp are set to the correct values though.
746  for (int i = 0; i < Register::kNumRegisters; i++) {
747  input_->SetRegister(i, i * 4);
748  }
749  input_->SetRegister(rsp.code(), reinterpret_cast<intptr_t>(frame->sp()));
750  input_->SetRegister(rbp.code(), reinterpret_cast<intptr_t>(frame->fp()));
751  for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
752  input_->SetDoubleRegister(i, 0.0);
753  }
754 
755  // Fill the frame content from the actual data on the frame.
756  for (unsigned i = 0; i < input_->GetFrameSize(); i += kPointerSize) {
757  input_->SetFrameSlot(i, Memory::uint64_at(tos + i));
758  }
759 }
760 
761 
762 #define __ masm()->
763 
764 void Deoptimizer::EntryGenerator::Generate() {
765  GeneratePrologue();
766 
767  // Save all general purpose registers before messing with them.
768  const int kNumberOfRegisters = Register::kNumRegisters;
769 
770  const int kDoubleRegsSize = kDoubleSize *
772  __ subq(rsp, Immediate(kDoubleRegsSize));
773 
774  for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; ++i) {
775  XMMRegister xmm_reg = XMMRegister::FromAllocationIndex(i);
776  int offset = i * kDoubleSize;
777  __ movsd(Operand(rsp, offset), xmm_reg);
778  }
779 
780  // We push all registers onto the stack, even though we do not need
781  // to restore all later.
782  for (int i = 0; i < kNumberOfRegisters; i++) {
783  Register r = Register::from_code(i);
784  __ push(r);
785  }
786 
787  const int kSavedRegistersAreaSize = kNumberOfRegisters * kPointerSize +
788  kDoubleRegsSize;
789 
790  // When calling new_deoptimizer_function we need to pass the last argument
791  // on the stack on windows and in r8 on linux. The remaining arguments are
792  // all passed in registers (different ones on linux and windows though).
793 
794 #ifdef _WIN64
795  Register arg4 = r9;
796  Register arg3 = r8;
797  Register arg2 = rdx;
798  Register arg1 = rcx;
799 #else
800  Register arg4 = rcx;
801  Register arg3 = rdx;
802  Register arg2 = rsi;
803  Register arg1 = rdi;
804 #endif
805 
806  // We use this to keep the value of the fifth argument temporarily.
807  // Unfortunately we can't store it directly in r8 (used for passing
808  // this on linux), since it is another parameter passing register on windows.
809  Register arg5 = r11;
810 
811  // Get the bailout id from the stack.
812  __ movq(arg3, Operand(rsp, kSavedRegistersAreaSize));
813 
814  // Get the address of the location in the code object if possible
815  // and compute the fp-to-sp delta in register arg5.
816  if (type() == EAGER) {
817  __ Set(arg4, 0);
818  __ lea(arg5, Operand(rsp, kSavedRegistersAreaSize + 1 * kPointerSize));
819  } else {
820  __ movq(arg4, Operand(rsp, kSavedRegistersAreaSize + 1 * kPointerSize));
821  __ lea(arg5, Operand(rsp, kSavedRegistersAreaSize + 2 * kPointerSize));
822  }
823 
824  __ subq(arg5, rbp);
825  __ neg(arg5);
826 
827  // Allocate a new deoptimizer object.
828  __ PrepareCallCFunction(6);
830  __ movq(arg1, rax);
831  __ Set(arg2, type());
832  // Args 3 and 4 are already in the right registers.
833 
834  // On windows put the arguments on the stack (PrepareCallCFunction
835  // has created space for this). On linux pass the arguments in r8 and r9.
836 #ifdef _WIN64
837  __ movq(Operand(rsp, 4 * kPointerSize), arg5);
838  __ LoadAddress(arg5, ExternalReference::isolate_address());
839  __ movq(Operand(rsp, 5 * kPointerSize), arg5);
840 #else
841  __ movq(r8, arg5);
842  __ LoadAddress(r9, ExternalReference::isolate_address());
843 #endif
844 
845  Isolate* isolate = masm()->isolate();
846 
847  {
848  AllowExternalCallThatCantCauseGC scope(masm());
849  __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
850  }
851  // Preserve deoptimizer object in register rax and get the input
852  // frame descriptor pointer.
853  __ movq(rbx, Operand(rax, Deoptimizer::input_offset()));
854 
855  // Fill in the input registers.
856  for (int i = kNumberOfRegisters -1; i >= 0; i--) {
857  int offset = (i * kPointerSize) + FrameDescription::registers_offset();
858  __ pop(Operand(rbx, offset));
859  }
860 
861  // Fill in the double input registers.
862  int double_regs_offset = FrameDescription::double_registers_offset();
863  for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
864  int dst_offset = i * kDoubleSize + double_regs_offset;
865  __ pop(Operand(rbx, dst_offset));
866  }
867 
868  // Remove the bailout id from the stack.
869  if (type() == EAGER) {
870  __ addq(rsp, Immediate(kPointerSize));
871  } else {
872  __ addq(rsp, Immediate(2 * kPointerSize));
873  }
874 
875  // Compute a pointer to the unwinding limit in register rcx; that is
876  // the first stack slot not part of the input frame.
877  __ movq(rcx, Operand(rbx, FrameDescription::frame_size_offset()));
878  __ addq(rcx, rsp);
879 
880  // Unwind the stack down to - but not including - the unwinding
881  // limit and copy the contents of the activation frame to the input
882  // frame description.
884  Label pop_loop;
885  __ bind(&pop_loop);
886  __ pop(Operand(rdx, 0));
887  __ addq(rdx, Immediate(sizeof(intptr_t)));
888  __ cmpq(rcx, rsp);
889  __ j(not_equal, &pop_loop);
890 
891  // Compute the output frame in the deoptimizer.
892  __ push(rax);
893  __ PrepareCallCFunction(2);
894  __ movq(arg1, rax);
895  __ LoadAddress(arg2, ExternalReference::isolate_address());
896  {
897  AllowExternalCallThatCantCauseGC scope(masm());
898  __ CallCFunction(
899  ExternalReference::compute_output_frames_function(isolate), 2);
900  }
901  __ pop(rax);
902 
903  // Replace the current frame with the output frames.
904  Label outer_push_loop, inner_push_loop;
905  // Outer loop state: rax = current FrameDescription**, rdx = one past the
906  // last FrameDescription**.
907  __ movl(rdx, Operand(rax, Deoptimizer::output_count_offset()));
908  __ movq(rax, Operand(rax, Deoptimizer::output_offset()));
909  __ lea(rdx, Operand(rax, rdx, times_8, 0));
910  __ bind(&outer_push_loop);
911  // Inner loop state: rbx = current FrameDescription*, rcx = loop index.
912  __ movq(rbx, Operand(rax, 0));
913  __ movq(rcx, Operand(rbx, FrameDescription::frame_size_offset()));
914  __ bind(&inner_push_loop);
915  __ subq(rcx, Immediate(sizeof(intptr_t)));
917  __ testq(rcx, rcx);
918  __ j(not_zero, &inner_push_loop);
919  __ addq(rax, Immediate(kPointerSize));
920  __ cmpq(rax, rdx);
921  __ j(below, &outer_push_loop);
922 
923  // In case of OSR, we have to restore the XMM registers.
924  if (type() == OSR) {
925  for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; ++i) {
926  XMMRegister xmm_reg = XMMRegister::FromAllocationIndex(i);
927  int src_offset = i * kDoubleSize + double_regs_offset;
928  __ movsd(xmm_reg, Operand(rbx, src_offset));
929  }
930  }
931 
932  // Push state, pc, and continuation from the last output frame.
933  if (type() != OSR) {
934  __ push(Operand(rbx, FrameDescription::state_offset()));
935  }
936  __ push(Operand(rbx, FrameDescription::pc_offset()));
937  __ push(Operand(rbx, FrameDescription::continuation_offset()));
938 
939  // Push the registers from the last output frame.
940  for (int i = 0; i < kNumberOfRegisters; i++) {
941  int offset = (i * kPointerSize) + FrameDescription::registers_offset();
942  __ push(Operand(rbx, offset));
943  }
944 
945  // Restore the registers from the stack.
946  for (int i = kNumberOfRegisters - 1; i >= 0 ; i--) {
947  Register r = Register::from_code(i);
948  // Do not restore rsp, simply pop the value into the next register
949  // and overwrite this afterwards.
950  if (r.is(rsp)) {
951  ASSERT(i > 0);
952  r = Register::from_code(i - 1);
953  }
954  __ pop(r);
955  }
956 
957  // Set up the roots register.
958  __ InitializeRootRegister();
959  __ InitializeSmiConstantRegister();
960 
961  // Return to the continuation point.
962  __ ret(0);
963 }
964 
965 
967  // Create a sequence of deoptimization entries.
968  Label done;
969  for (int i = 0; i < count(); i++) {
970  int start = masm()->pc_offset();
971  USE(start);
972  __ push_imm32(i);
973  __ jmp(&done);
974  ASSERT(masm()->pc_offset() - start == table_entry_size_);
975  }
976  __ bind(&done);
977 }
978 
979 #undef __
980 
981 
982 } } // namespace v8::internal
983 
984 #endif // V8_TARGET_ARCH_X64
byte * Address
Definition: globals.h:172
const Register rdx
Code * builtin(Name name)
Definition: builtins.h:312
static DeoptimizationOutputData * cast(Object *obj)
#define V8PRIxPTR
Definition: globals.h:204
void PrintF(const char *format,...)
Definition: v8utils.cc:40
const Register r11
unsigned stack_slots()
Definition: objects-inl.h:3171
static Smi * FromInt(int value)
Definition: objects-inl.h:973
const Register rbp
void SetFrameSlot(unsigned offset, intptr_t value)
Definition: deoptimizer.h:399
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
const Register rsi
Builtins * builtins()
Definition: isolate.h:909
static XMMRegister FromAllocationIndex(int index)
static const int kNumAllocatableRegisters
static const int kCallInstructionLength
#define ASSERT(condition)
Definition: checks.h:270
static void DeoptimizeFunction(JSFunction *function)
intptr_t GetContext() const
Definition: deoptimizer.h:432
void SetFrameType(StackFrame::Type type)
Definition: deoptimizer.h:441
const int kIntSize
Definition: globals.h:231
static const int kNumRegisters
Definition: assembler-arm.h:73
static int double_registers_offset()
Definition: deoptimizer.h:459
uint8_t byte
Definition: globals.h:171
#define UNREACHABLE()
Definition: checks.h:50
static int output_offset()
Definition: deoptimizer.h:222
const int kDoubleSize
Definition: globals.h:232
const Register r9
const int kPointerSize
Definition: globals.h:234
static void set_target_address_at(Address pc, Address target)
const Register rbx
const Register rsp
#define __
void SetRegister(unsigned n, intptr_t value)
Definition: deoptimizer.h:413
static unsigned decode(uint32_t value)
Definition: utils.h:272
const Register pc
friend class DeoptimizingCodeListNode
Definition: deoptimizer.h:345
static Register from_code(int code)
static int GetOutputInfo(DeoptimizationOutputData *data, unsigned node_id, SharedFunctionInfo *shared)
Definition: deoptimizer.cc:486
const Register rax
const Register rdi
uint32_t GetFrameSize() const
Definition: deoptimizer.h:369
void SetContinuation(intptr_t pc)
Definition: deoptimizer.h:438
static int output_count_offset()
Definition: deoptimizer.h:219
static const int kNumAllocatableRegisters
static void RevertStackCheckCodeAt(Code *unoptimized_code, Address pc_after, Code *check_code, Code *replacement_code)
static Address target_address_at(Address pc)
intptr_t GetFrameSlot(unsigned offset)
Definition: deoptimizer.h:378
static Address GetDeoptimizationEntry(int id, BailoutType type)
Definition: deoptimizer.cc:445
const Register r8
const Register rcx
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
friend class FrameDescription
Definition: deoptimizer.h:344
static uint64_t & uint64_at(Address addr)
Definition: v8memory.h:55
void USE(T)
Definition: globals.h:303
virtual void GeneratePrologue()
intptr_t GetRegister(unsigned n) const
Definition: deoptimizer.h:403
void SetDoubleRegister(unsigned n, double value)
Definition: deoptimizer.h:418
static void PatchStackCheckCodeAt(Code *unoptimized_code, Address pc_after, Code *check_code, Code *replacement_code)
FlagType type() const
Definition: flags.cc:1358
static DeoptimizationInputData * cast(Object *obj)
static JSFunction * cast(Object *obj)