v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
deoptimizer-mips.cc
Go to the documentation of this file.
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "codegen.h"
31 #include "deoptimizer.h"
32 #include "full-codegen.h"
33 #include "safepoint-table.h"
34 
35 namespace v8 {
36 namespace internal {
37 
38 
40  const int kCallInstructionSizeInWords = 4;
41  return kCallInstructionSizeInWords * Assembler::kInstrSize;
42 }
43 
44 
45 void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
46  HandleScope scope;
47  AssertNoAllocation no_allocation;
48 
49  if (!function->IsOptimized()) return;
50 
51  // Get the optimized code.
52  Code* code = function->code();
53  Address code_start_address = code->instruction_start();
54 
55  // Invalidate the relocation information, as it will become invalid by the
56  // code patching below, and is not needed any more.
57  code->InvalidateRelocation();
58 
59  // For each LLazyBailout instruction insert a call to the corresponding
60  // deoptimization entry.
61  DeoptimizationInputData* deopt_data =
62  DeoptimizationInputData::cast(code->deoptimization_data());
63 #ifdef DEBUG
64  Address prev_call_address = NULL;
65 #endif
66  for (int i = 0; i < deopt_data->DeoptCount(); i++) {
67  if (deopt_data->Pc(i)->value() == -1) continue;
68  Address call_address = code_start_address + deopt_data->Pc(i)->value();
69  Address deopt_entry = GetDeoptimizationEntry(i, LAZY);
70  int call_size_in_bytes = MacroAssembler::CallSize(deopt_entry,
72  int call_size_in_words = call_size_in_bytes / Assembler::kInstrSize;
73  ASSERT(call_size_in_bytes % Assembler::kInstrSize == 0);
74  ASSERT(call_size_in_bytes <= patch_size());
75  CodePatcher patcher(call_address, call_size_in_words);
76  patcher.masm()->Call(deopt_entry, RelocInfo::NONE);
77  ASSERT(prev_call_address == NULL ||
78  call_address >= prev_call_address + patch_size());
79  ASSERT(call_address + patch_size() <= code->instruction_end());
80 
81 #ifdef DEBUG
82  prev_call_address = call_address;
83 #endif
84  }
85 
86  Isolate* isolate = code->GetIsolate();
87 
88  // Add the deoptimizing code to the list.
90  DeoptimizerData* data = isolate->deoptimizer_data();
91  node->set_next(data->deoptimizing_code_list_);
92  data->deoptimizing_code_list_ = node;
93 
94  // We might be in the middle of incremental marking with compaction.
95  // Tell collector to treat this code object in a special way and
96  // ignore all slots that might have been recorded on it.
97  isolate->heap()->mark_compact_collector()->InvalidateCode(code);
98 
99  // Set the code for the function to non-optimized version.
100  function->ReplaceCode(function->shared()->code());
101 
102  if (FLAG_trace_deopt) {
103  PrintF("[forced deoptimization: ");
104  function->PrintName();
105  PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function));
106 #ifdef DEBUG
107  if (FLAG_print_code) {
108  code->PrintLn();
109  }
110 #endif
111  }
112 }
113 
114 
115 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
116  Address pc_after,
117  Code* check_code,
118  Code* replacement_code) {
119  const int kInstrSize = Assembler::kInstrSize;
120  // This structure comes from FullCodeGenerator::EmitStackCheck.
121  // The call of the stack guard check has the following form:
122  // sltu at, sp, t0 / slt at, a3, zero_reg (in case of count based interrupts)
123  // beq at, zero_reg, ok
124  // lui t9, <stack guard address> upper
125  // ori t9, <stack guard address> lower
126  // jalr t9
127  // nop
128  // ----- pc_after points here
129 
130  ASSERT(Assembler::IsBeq(Assembler::instr_at(pc_after - 5 * kInstrSize)));
131 
132  // Replace the sltu instruction with load-imm 1 to at, so beq is not taken.
133  CodePatcher patcher(pc_after - 6 * kInstrSize, 1);
134  patcher.masm()->addiu(at, zero_reg, 1);
135 
136  // Replace the stack check address in the load-immediate (lui/ori pair)
137  // with the entry address of the replacement code.
138  ASSERT(reinterpret_cast<uint32_t>(
139  Assembler::target_address_at(pc_after - 4 * kInstrSize)) ==
140  reinterpret_cast<uint32_t>(check_code->entry()));
141  Assembler::set_target_address_at(pc_after - 4 * kInstrSize,
142  replacement_code->entry());
143 
144  // We patched the code to the following form:
145  // addiu at, zero_reg, 1
146  // beq at, zero_reg, ok ;; Not changed
147  // lui t9, <on-stack replacement address> upper
148  // ori t9, <on-stack replacement address> lower
149  // jalr t9 ;; Not changed
150  // nop ;; Not changed
151  // ----- pc_after points here
152 
153  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
154  unoptimized_code, pc_after - 4 * kInstrSize, replacement_code);
155 }
156 
157 
158 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
159  Address pc_after,
160  Code* check_code,
161  Code* replacement_code) {
162  // Exact opposite of the function above.
163  const int kInstrSize = Assembler::kInstrSize;
165  Assembler::instr_at(pc_after - 6 * kInstrSize)));
166  ASSERT(Assembler::IsBeq(Assembler::instr_at(pc_after - 5 * kInstrSize)));
167 
168  // Restore the sltu instruction so beq can be taken again.
169  CodePatcher patcher(pc_after - 6 * kInstrSize, 1);
170  if (FLAG_count_based_interrupts) {
171  patcher.masm()->slt(at, a3, zero_reg);
172  } else {
173  patcher.masm()->sltu(at, sp, t0);
174  }
175 
176  // Replace the on-stack replacement address in the load-immediate (lui/ori
177  // pair) with the entry address of the normal stack-check code.
178  ASSERT(reinterpret_cast<uint32_t>(
179  Assembler::target_address_at(pc_after - 4 * kInstrSize)) ==
180  reinterpret_cast<uint32_t>(replacement_code->entry()));
181  Assembler::set_target_address_at(pc_after - 4 * kInstrSize,
182  check_code->entry());
183 
184  check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
185  unoptimized_code, pc_after - 4 * kInstrSize, check_code);
186 }
187 
188 
189 static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) {
190  ByteArray* translations = data->TranslationByteArray();
191  int length = data->DeoptCount();
192  for (int i = 0; i < length; i++) {
193  if (static_cast<unsigned>(data->AstId(i)->value()) == ast_id) {
194  TranslationIterator it(translations, data->TranslationIndex(i)->value());
195  int value = it.Next();
196  ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value));
197  // Read the number of frames.
198  value = it.Next();
199  if (value == 1) return i;
200  }
201  }
202  UNREACHABLE();
203  return -1;
204 }
205 
206 
207 void Deoptimizer::DoComputeOsrOutputFrame() {
208  DeoptimizationInputData* data = DeoptimizationInputData::cast(
209  optimized_code_->deoptimization_data());
210  unsigned ast_id = data->OsrAstId()->value();
211 
212  int bailout_id = LookupBailoutId(data, ast_id);
213  unsigned translation_index = data->TranslationIndex(bailout_id)->value();
214  ByteArray* translations = data->TranslationByteArray();
215 
216  TranslationIterator iterator(translations, translation_index);
217  Translation::Opcode opcode =
218  static_cast<Translation::Opcode>(iterator.Next());
219  ASSERT(Translation::BEGIN == opcode);
220  USE(opcode);
221  int count = iterator.Next();
222  iterator.Skip(1); // Drop JS frame count.
223  ASSERT(count == 1);
224  USE(count);
225 
226  opcode = static_cast<Translation::Opcode>(iterator.Next());
227  USE(opcode);
228  ASSERT(Translation::JS_FRAME == opcode);
229  unsigned node_id = iterator.Next();
230  USE(node_id);
231  ASSERT(node_id == ast_id);
232  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator.Next()));
233  USE(function);
234  ASSERT(function == function_);
235  unsigned height = iterator.Next();
236  unsigned height_in_bytes = height * kPointerSize;
237  USE(height_in_bytes);
238 
239  unsigned fixed_size = ComputeFixedSize(function_);
240  unsigned input_frame_size = input_->GetFrameSize();
241  ASSERT(fixed_size + height_in_bytes == input_frame_size);
242 
243  unsigned stack_slot_size = optimized_code_->stack_slots() * kPointerSize;
244  unsigned outgoing_height = data->ArgumentsStackHeight(bailout_id)->value();
245  unsigned outgoing_size = outgoing_height * kPointerSize;
246  unsigned output_frame_size = fixed_size + stack_slot_size + outgoing_size;
247  ASSERT(outgoing_size == 0); // OSR does not happen in the middle of a call.
248 
249  if (FLAG_trace_osr) {
250  PrintF("[on-stack replacement: begin 0x%08" V8PRIxPTR " ",
251  reinterpret_cast<intptr_t>(function_));
252  function_->PrintName();
253  PrintF(" => node=%u, frame=%d->%d]\n",
254  ast_id,
255  input_frame_size,
256  output_frame_size);
257  }
258 
259  // There's only one output frame in the OSR case.
260  output_count_ = 1;
261  output_ = new FrameDescription*[1];
262  output_[0] = new(output_frame_size) FrameDescription(
263  output_frame_size, function_);
264  output_[0]->SetFrameType(StackFrame::JAVA_SCRIPT);
265 
266  // Clear the incoming parameters in the optimized frame to avoid
267  // confusing the garbage collector.
268  unsigned output_offset = output_frame_size - kPointerSize;
269  int parameter_count = function_->shared()->formal_parameter_count() + 1;
270  for (int i = 0; i < parameter_count; ++i) {
271  output_[0]->SetFrameSlot(output_offset, 0);
272  output_offset -= kPointerSize;
273  }
274 
275  // Translate the incoming parameters. This may overwrite some of the
276  // incoming argument slots we've just cleared.
277  int input_offset = input_frame_size - kPointerSize;
278  bool ok = true;
279  int limit = input_offset - (parameter_count * kPointerSize);
280  while (ok && input_offset > limit) {
281  ok = DoOsrTranslateCommand(&iterator, &input_offset);
282  }
283 
284  // There are no translation commands for the caller's pc and fp, the
285  // context, and the function. Set them up explicitly.
288  i -= kPointerSize) {
289  uint32_t input_value = input_->GetFrameSlot(input_offset);
290  if (FLAG_trace_osr) {
291  const char* name = "UNKNOWN";
292  switch (i) {
294  name = "caller's pc";
295  break;
297  name = "fp";
298  break;
300  name = "context";
301  break;
303  name = "function";
304  break;
305  }
306  PrintF(" [sp + %d] <- 0x%08x ; [sp + %d] (fixed part - %s)\n",
307  output_offset,
308  input_value,
309  input_offset,
310  name);
311  }
312 
313  output_[0]->SetFrameSlot(output_offset, input_->GetFrameSlot(input_offset));
314  input_offset -= kPointerSize;
315  output_offset -= kPointerSize;
316  }
317 
318  // Translate the rest of the frame.
319  while (ok && input_offset >= 0) {
320  ok = DoOsrTranslateCommand(&iterator, &input_offset);
321  }
322 
323  // If translation of any command failed, continue using the input frame.
324  if (!ok) {
325  delete output_[0];
326  output_[0] = input_;
327  output_[0]->SetPc(reinterpret_cast<uint32_t>(from_));
328  } else {
329  // Set up the frame pointer and the context pointer.
330  output_[0]->SetRegister(fp.code(), input_->GetRegister(fp.code()));
331  output_[0]->SetRegister(cp.code(), input_->GetRegister(cp.code()));
332 
333  unsigned pc_offset = data->OsrPcOffset()->value();
334  uint32_t pc = reinterpret_cast<uint32_t>(
335  optimized_code_->entry() + pc_offset);
336  output_[0]->SetPc(pc);
337  }
338  Code* continuation = isolate_->builtins()->builtin(Builtins::kNotifyOSR);
339  output_[0]->SetContinuation(
340  reinterpret_cast<uint32_t>(continuation->entry()));
341 
342  if (FLAG_trace_osr) {
343  PrintF("[on-stack replacement translation %s: 0x%08" V8PRIxPTR " ",
344  ok ? "finished" : "aborted",
345  reinterpret_cast<intptr_t>(function));
346  function->PrintName();
347  PrintF(" => pc=0x%0x]\n", output_[0]->GetPc());
348  }
349 }
350 
351 
352 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
353  int frame_index) {
354  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
355  unsigned height = iterator->Next();
356  unsigned height_in_bytes = height * kPointerSize;
357  if (FLAG_trace_deopt) {
358  PrintF(" translating arguments adaptor => height=%d\n", height_in_bytes);
359  }
360 
361  unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
362  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
363 
364  // Allocate and store the output frame description.
365  FrameDescription* output_frame =
366  new(output_frame_size) FrameDescription(output_frame_size, function);
367  output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
368 
369  // Arguments adaptor can not be topmost or bottommost.
370  ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
371  ASSERT(output_[frame_index] == NULL);
372  output_[frame_index] = output_frame;
373 
374  // The top address of the frame is computed from the previous
375  // frame's top and this frame's size.
376  uint32_t top_address;
377  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
378  output_frame->SetTop(top_address);
379 
380  // Compute the incoming parameter translation.
381  int parameter_count = height;
382  unsigned output_offset = output_frame_size;
383  for (int i = 0; i < parameter_count; ++i) {
384  output_offset -= kPointerSize;
385  DoTranslateCommand(iterator, frame_index, output_offset);
386  }
387 
388  // Read caller's PC from the previous frame.
389  output_offset -= kPointerSize;
390  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
391  output_frame->SetFrameSlot(output_offset, callers_pc);
392  if (FLAG_trace_deopt) {
393  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
394  top_address + output_offset, output_offset, callers_pc);
395  }
396 
397  // Read caller's FP from the previous frame, and set this frame's FP.
398  output_offset -= kPointerSize;
399  intptr_t value = output_[frame_index - 1]->GetFp();
400  output_frame->SetFrameSlot(output_offset, value);
401  intptr_t fp_value = top_address + output_offset;
402  output_frame->SetFp(fp_value);
403  if (FLAG_trace_deopt) {
404  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
405  fp_value, output_offset, value);
406  }
407 
408  // A marker value is used in place of the context.
409  output_offset -= kPointerSize;
410  intptr_t context = reinterpret_cast<intptr_t>(
412  output_frame->SetFrameSlot(output_offset, context);
413  if (FLAG_trace_deopt) {
414  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context (adaptor sentinel)\n",
415  top_address + output_offset, output_offset, context);
416  }
417 
418  // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
419  output_offset -= kPointerSize;
420  value = reinterpret_cast<intptr_t>(function);
421  output_frame->SetFrameSlot(output_offset, value);
422  if (FLAG_trace_deopt) {
423  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function\n",
424  top_address + output_offset, output_offset, value);
425  }
426 
427  // Number of incoming arguments.
428  output_offset -= kPointerSize;
429  value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
430  output_frame->SetFrameSlot(output_offset, value);
431  if (FLAG_trace_deopt) {
432  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
433  top_address + output_offset, output_offset, value, height - 1);
434  }
435 
436  ASSERT(0 == output_offset);
437 
438  Builtins* builtins = isolate_->builtins();
439  Code* adaptor_trampoline =
440  builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
441  uint32_t pc = reinterpret_cast<uint32_t>(
442  adaptor_trampoline->instruction_start() +
443  isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
444  output_frame->SetPc(pc);
445 }
446 
447 
448 void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
449  int frame_index) {
450  Builtins* builtins = isolate_->builtins();
451  Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
452  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
453  unsigned height = iterator->Next();
454  unsigned height_in_bytes = height * kPointerSize;
455  if (FLAG_trace_deopt) {
456  PrintF(" translating construct stub => height=%d\n", height_in_bytes);
457  }
458 
459  unsigned fixed_frame_size = 8 * kPointerSize;
460  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
461 
462  // Allocate and store the output frame description.
463  FrameDescription* output_frame =
464  new(output_frame_size) FrameDescription(output_frame_size, function);
465  output_frame->SetFrameType(StackFrame::CONSTRUCT);
466 
467  // Construct stub can not be topmost or bottommost.
468  ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
469  ASSERT(output_[frame_index] == NULL);
470  output_[frame_index] = output_frame;
471 
472  // The top address of the frame is computed from the previous
473  // frame's top and this frame's size.
474  uint32_t top_address;
475  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
476  output_frame->SetTop(top_address);
477 
478  // Compute the incoming parameter translation.
479  int parameter_count = height;
480  unsigned output_offset = output_frame_size;
481  for (int i = 0; i < parameter_count; ++i) {
482  output_offset -= kPointerSize;
483  DoTranslateCommand(iterator, frame_index, output_offset);
484  }
485 
486  // Read caller's PC from the previous frame.
487  output_offset -= kPointerSize;
488  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
489  output_frame->SetFrameSlot(output_offset, callers_pc);
490  if (FLAG_trace_deopt) {
491  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
492  top_address + output_offset, output_offset, callers_pc);
493  }
494 
495  // Read caller's FP from the previous frame, and set this frame's FP.
496  output_offset -= kPointerSize;
497  intptr_t value = output_[frame_index - 1]->GetFp();
498  output_frame->SetFrameSlot(output_offset, value);
499  intptr_t fp_value = top_address + output_offset;
500  output_frame->SetFp(fp_value);
501  if (FLAG_trace_deopt) {
502  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
503  fp_value, output_offset, value);
504  }
505 
506  // The context can be gotten from the previous frame.
507  output_offset -= kPointerSize;
508  value = output_[frame_index - 1]->GetContext();
509  output_frame->SetFrameSlot(output_offset, value);
510  if (FLAG_trace_deopt) {
511  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
512  top_address + output_offset, output_offset, value);
513  }
514 
515  // A marker value is used in place of the function.
516  output_offset -= kPointerSize;
517  value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
518  output_frame->SetFrameSlot(output_offset, value);
519  if (FLAG_trace_deopt) {
520  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function (construct sentinel)\n",
521  top_address + output_offset, output_offset, value);
522  }
523 
524  // The output frame reflects a JSConstructStubGeneric frame.
525  output_offset -= kPointerSize;
526  value = reinterpret_cast<intptr_t>(construct_stub);
527  output_frame->SetFrameSlot(output_offset, value);
528  if (FLAG_trace_deopt) {
529  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; code object\n",
530  top_address + output_offset, output_offset, value);
531  }
532 
533  // Number of incoming arguments.
534  output_offset -= kPointerSize;
535  value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
536  output_frame->SetFrameSlot(output_offset, value);
537  if (FLAG_trace_deopt) {
538  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
539  top_address + output_offset, output_offset, value, height - 1);
540  }
541 
542  // Constructor function being invoked by the stub.
543  output_offset -= kPointerSize;
544  value = reinterpret_cast<intptr_t>(function);
545  output_frame->SetFrameSlot(output_offset, value);
546  if (FLAG_trace_deopt) {
547  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; constructor function\n",
548  top_address + output_offset, output_offset, value);
549  }
550 
551  // The newly allocated object was passed as receiver in the artificial
552  // constructor stub environment created by HEnvironment::CopyForInlining().
553  output_offset -= kPointerSize;
554  value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
555  output_frame->SetFrameSlot(output_offset, value);
556  if (FLAG_trace_deopt) {
557  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; allocated receiver\n",
558  top_address + output_offset, output_offset, value);
559  }
560 
561  ASSERT(0 == output_offset);
562 
563  uint32_t pc = reinterpret_cast<uint32_t>(
564  construct_stub->instruction_start() +
565  isolate_->heap()->construct_stub_deopt_pc_offset()->value());
566  output_frame->SetPc(pc);
567 }
568 
569 
570 // This code is very similar to ia32/arm code, but relies on register names
571 // (fp, sp) and how the frame is laid out.
572 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
573  int frame_index) {
574  // Read the ast node id, function, and frame height for this output frame.
575  int node_id = iterator->Next();
576  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
577  unsigned height = iterator->Next();
578  unsigned height_in_bytes = height * kPointerSize;
579  if (FLAG_trace_deopt) {
580  PrintF(" translating ");
581  function->PrintName();
582  PrintF(" => node=%d, height=%d\n", node_id, height_in_bytes);
583  }
584 
585  // The 'fixed' part of the frame consists of the incoming parameters and
586  // the part described by JavaScriptFrameConstants.
587  unsigned fixed_frame_size = ComputeFixedSize(function);
588  unsigned input_frame_size = input_->GetFrameSize();
589  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
590 
591  // Allocate and store the output frame description.
592  FrameDescription* output_frame =
593  new(output_frame_size) FrameDescription(output_frame_size, function);
594  output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
595 
596  bool is_bottommost = (0 == frame_index);
597  bool is_topmost = (output_count_ - 1 == frame_index);
598  ASSERT(frame_index >= 0 && frame_index < output_count_);
599  ASSERT(output_[frame_index] == NULL);
600  output_[frame_index] = output_frame;
601 
602  // The top address for the bottommost output frame can be computed from
603  // the input frame pointer and the output frame's height. For all
604  // subsequent output frames, it can be computed from the previous one's
605  // top address and the current frame's size.
606  uint32_t top_address;
607  if (is_bottommost) {
608  // 2 = context and function in the frame.
609  top_address =
610  input_->GetRegister(fp.code()) - (2 * kPointerSize) - height_in_bytes;
611  } else {
612  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
613  }
614  output_frame->SetTop(top_address);
615 
616  // Compute the incoming parameter translation.
617  int parameter_count = function->shared()->formal_parameter_count() + 1;
618  unsigned output_offset = output_frame_size;
619  unsigned input_offset = input_frame_size;
620  for (int i = 0; i < parameter_count; ++i) {
621  output_offset -= kPointerSize;
622  DoTranslateCommand(iterator, frame_index, output_offset);
623  }
624  input_offset -= (parameter_count * kPointerSize);
625 
626  // There are no translation commands for the caller's pc and fp, the
627  // context, and the function. Synthesize their values and set them up
628  // explicitly.
629  //
630  // The caller's pc for the bottommost output frame is the same as in the
631  // input frame. For all subsequent output frames, it can be read from the
632  // previous one. This frame's pc can be computed from the non-optimized
633  // function code and AST id of the bailout.
634  output_offset -= kPointerSize;
635  input_offset -= kPointerSize;
636  intptr_t value;
637  if (is_bottommost) {
638  value = input_->GetFrameSlot(input_offset);
639  } else {
640  value = output_[frame_index - 1]->GetPc();
641  }
642  output_frame->SetFrameSlot(output_offset, value);
643  if (FLAG_trace_deopt) {
644  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
645  top_address + output_offset, output_offset, value);
646  }
647 
648  // The caller's frame pointer for the bottommost output frame is the same
649  // as in the input frame. For all subsequent output frames, it can be
650  // read from the previous one. Also compute and set this frame's frame
651  // pointer.
652  output_offset -= kPointerSize;
653  input_offset -= kPointerSize;
654  if (is_bottommost) {
655  value = input_->GetFrameSlot(input_offset);
656  } else {
657  value = output_[frame_index - 1]->GetFp();
658  }
659  output_frame->SetFrameSlot(output_offset, value);
660  intptr_t fp_value = top_address + output_offset;
661  ASSERT(!is_bottommost || input_->GetRegister(fp.code()) == fp_value);
662  output_frame->SetFp(fp_value);
663  if (is_topmost) {
664  output_frame->SetRegister(fp.code(), fp_value);
665  }
666  if (FLAG_trace_deopt) {
667  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
668  fp_value, output_offset, value);
669  }
670 
671  // For the bottommost output frame the context can be gotten from the input
672  // frame. For all subsequent output frames it can be gotten from the function
673  // so long as we don't inline functions that need local contexts.
674  output_offset -= kPointerSize;
675  input_offset -= kPointerSize;
676  if (is_bottommost) {
677  value = input_->GetFrameSlot(input_offset);
678  } else {
679  value = reinterpret_cast<intptr_t>(function->context());
680  }
681  output_frame->SetFrameSlot(output_offset, value);
682  output_frame->SetContext(value);
683  if (is_topmost) output_frame->SetRegister(cp.code(), value);
684  if (FLAG_trace_deopt) {
685  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
686  top_address + output_offset, output_offset, value);
687  }
688 
689  // The function was mentioned explicitly in the BEGIN_FRAME.
690  output_offset -= kPointerSize;
691  input_offset -= kPointerSize;
692  value = reinterpret_cast<uint32_t>(function);
693  // The function for the bottommost output frame should also agree with the
694  // input frame.
695  ASSERT(!is_bottommost || input_->GetFrameSlot(input_offset) == value);
696  output_frame->SetFrameSlot(output_offset, value);
697  if (FLAG_trace_deopt) {
698  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function\n",
699  top_address + output_offset, output_offset, value);
700  }
701 
702  // Translate the rest of the frame.
703  for (unsigned i = 0; i < height; ++i) {
704  output_offset -= kPointerSize;
705  DoTranslateCommand(iterator, frame_index, output_offset);
706  }
707  ASSERT(0 == output_offset);
708 
709  // Compute this frame's PC, state, and continuation.
710  Code* non_optimized_code = function->shared()->code();
711  FixedArray* raw_data = non_optimized_code->deoptimization_data();
712  DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data);
713  Address start = non_optimized_code->instruction_start();
714  unsigned pc_and_state = GetOutputInfo(data, node_id, function->shared());
715  unsigned pc_offset = FullCodeGenerator::PcField::decode(pc_and_state);
716  uint32_t pc_value = reinterpret_cast<uint32_t>(start + pc_offset);
717  output_frame->SetPc(pc_value);
718 
721  output_frame->SetState(Smi::FromInt(state));
722 
723 
724  // Set the continuation for the topmost frame.
725  if (is_topmost && bailout_type_ != DEBUGGER) {
726  Builtins* builtins = isolate_->builtins();
727  Code* continuation = (bailout_type_ == EAGER)
728  ? builtins->builtin(Builtins::kNotifyDeoptimized)
729  : builtins->builtin(Builtins::kNotifyLazyDeoptimized);
730  output_frame->SetContinuation(
731  reinterpret_cast<uint32_t>(continuation->entry()));
732  }
733 }
734 
735 void Deoptimizer::FillInputFrame(Address tos, JavaScriptFrame* frame) {
736  // Set the register values. The values are not important as there are no
737  // callee saved registers in JavaScript frames, so all registers are
738  // spilled. Registers fp and sp are set to the correct values though.
739 
740  for (int i = 0; i < Register::kNumRegisters; i++) {
741  input_->SetRegister(i, i * 4);
742  }
743  input_->SetRegister(sp.code(), reinterpret_cast<intptr_t>(frame->sp()));
744  input_->SetRegister(fp.code(), reinterpret_cast<intptr_t>(frame->fp()));
745  for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
746  input_->SetDoubleRegister(i, 0.0);
747  }
748 
749  // Fill the frame content from the actual data on the frame.
750  for (unsigned i = 0; i < input_->GetFrameSize(); i += kPointerSize) {
751  input_->SetFrameSlot(i, Memory::uint32_at(tos + i));
752  }
753 }
754 
755 
756 #define __ masm()->
757 
758 
759 // This code tries to be close to ia32 code so that any changes can be
760 // easily ported.
761 void Deoptimizer::EntryGenerator::Generate() {
762  GeneratePrologue();
763 
764  Isolate* isolate = masm()->isolate();
765 
766  CpuFeatures::Scope scope(FPU);
767  // Unlike on ARM we don't save all the registers, just the useful ones.
768  // For the rest, there are gaps on the stack, so the offsets remain the same.
769  const int kNumberOfRegisters = Register::kNumRegisters;
770 
771  RegList restored_regs = kJSCallerSaved | kCalleeSaved;
772  RegList saved_regs = restored_regs | sp.bit() | ra.bit();
773 
774  const int kDoubleRegsSize =
776 
777  // Save all FPU registers before messing with them.
778  __ Subu(sp, sp, Operand(kDoubleRegsSize));
779  for (int i = 0; i < FPURegister::kNumAllocatableRegisters; ++i) {
780  FPURegister fpu_reg = FPURegister::FromAllocationIndex(i);
781  int offset = i * kDoubleSize;
782  __ sdc1(fpu_reg, MemOperand(sp, offset));
783  }
784 
785  // Push saved_regs (needed to populate FrameDescription::registers_).
786  // Leave gaps for other registers.
787  __ Subu(sp, sp, kNumberOfRegisters * kPointerSize);
788  for (int16_t i = kNumberOfRegisters - 1; i >= 0; i--) {
789  if ((saved_regs & (1 << i)) != 0) {
790  __ sw(ToRegister(i), MemOperand(sp, kPointerSize * i));
791  }
792  }
793 
794  const int kSavedRegistersAreaSize =
795  (kNumberOfRegisters * kPointerSize) + kDoubleRegsSize;
796 
797  // Get the bailout id from the stack.
798  __ lw(a2, MemOperand(sp, kSavedRegistersAreaSize));
799 
800  // Get the address of the location in the code object if possible (a3) (return
801  // address for lazy deoptimization) and compute the fp-to-sp delta in
802  // register t0.
803  if (type() == EAGER) {
804  __ mov(a3, zero_reg);
805  // Correct one word for bailout id.
806  __ Addu(t0, sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize)));
807  } else if (type() == OSR) {
808  __ mov(a3, ra);
809  // Correct one word for bailout id.
810  __ Addu(t0, sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize)));
811  } else {
812  __ mov(a3, ra);
813  // Correct two words for bailout id and return address.
814  __ Addu(t0, sp, Operand(kSavedRegistersAreaSize + (2 * kPointerSize)));
815  }
816 
817  __ Subu(t0, fp, t0);
818 
819  // Allocate a new deoptimizer object.
820  // Pass four arguments in a0 to a3 and fifth & sixth arguments on stack.
821  __ PrepareCallCFunction(6, t1);
823  __ li(a1, Operand(type())); // bailout type,
824  // a2: bailout id already loaded.
825  // a3: code address or 0 already loaded.
826  __ sw(t0, CFunctionArgumentOperand(5)); // Fp-to-sp delta.
827  __ li(t1, Operand(ExternalReference::isolate_address()));
828  __ sw(t1, CFunctionArgumentOperand(6)); // Isolate.
829  // Call Deoptimizer::New().
830  {
831  AllowExternalCallThatCantCauseGC scope(masm());
832  __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
833  }
834 
835  // Preserve "deoptimizer" object in register v0 and get the input
836  // frame descriptor pointer to a1 (deoptimizer->input_);
837  // Move deopt-obj to a0 for call to Deoptimizer::ComputeOutputFrames() below.
838  __ mov(a0, v0);
839  __ lw(a1, MemOperand(v0, Deoptimizer::input_offset()));
840 
841  // Copy core registers into FrameDescription::registers_[kNumRegisters].
842  ASSERT(Register::kNumRegisters == kNumberOfRegisters);
843  for (int i = 0; i < kNumberOfRegisters; i++) {
844  int offset = (i * kPointerSize) + FrameDescription::registers_offset();
845  if ((saved_regs & (1 << i)) != 0) {
846  __ lw(a2, MemOperand(sp, i * kPointerSize));
847  __ sw(a2, MemOperand(a1, offset));
848  } else if (FLAG_debug_code) {
849  __ li(a2, kDebugZapValue);
850  __ sw(a2, MemOperand(a1, offset));
851  }
852  }
853 
854  // Copy FPU registers to
855  // double_registers_[DoubleRegister::kNumAllocatableRegisters]
856  int double_regs_offset = FrameDescription::double_registers_offset();
857  for (int i = 0; i < FPURegister::kNumAllocatableRegisters; ++i) {
858  int dst_offset = i * kDoubleSize + double_regs_offset;
859  int src_offset = i * kDoubleSize + kNumberOfRegisters * kPointerSize;
860  __ ldc1(f0, MemOperand(sp, src_offset));
861  __ sdc1(f0, MemOperand(a1, dst_offset));
862  }
863 
864  // Remove the bailout id, eventually return address, and the saved registers
865  // from the stack.
866  if (type() == EAGER || type() == OSR) {
867  __ Addu(sp, sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize)));
868  } else {
869  __ Addu(sp, sp, Operand(kSavedRegistersAreaSize + (2 * kPointerSize)));
870  }
871 
872  // Compute a pointer to the unwinding limit in register a2; that is
873  // the first stack slot not part of the input frame.
875  __ Addu(a2, a2, sp);
876 
877  // Unwind the stack down to - but not including - the unwinding
878  // limit and copy the contents of the activation frame to the input
879  // frame description.
880  __ Addu(a3, a1, Operand(FrameDescription::frame_content_offset()));
881  Label pop_loop;
882  __ bind(&pop_loop);
883  __ pop(t0);
884  __ sw(t0, MemOperand(a3, 0));
885  __ Branch(USE_DELAY_SLOT, &pop_loop, ne, a2, Operand(sp));
886  __ addiu(a3, a3, sizeof(uint32_t)); // In delay slot.
887 
888  // Compute the output frame in the deoptimizer.
889  __ push(a0); // Preserve deoptimizer object across call.
890  // a0: deoptimizer object; a1: scratch.
891  __ PrepareCallCFunction(1, a1);
892  // Call Deoptimizer::ComputeOutputFrames().
893  {
894  AllowExternalCallThatCantCauseGC scope(masm());
895  __ CallCFunction(
896  ExternalReference::compute_output_frames_function(isolate), 1);
897  }
898  __ pop(a0); // Restore deoptimizer object (class Deoptimizer).
899 
900  // Replace the current (input) frame with the output frames.
901  Label outer_push_loop, inner_push_loop;
902  // Outer loop state: a0 = current "FrameDescription** output_",
903  // a1 = one past the last FrameDescription**.
905  __ lw(a0, MemOperand(a0, Deoptimizer::output_offset())); // a0 is output_.
906  __ sll(a1, a1, kPointerSizeLog2); // Count to offset.
907  __ addu(a1, a0, a1); // a1 = one past the last FrameDescription**.
908  __ bind(&outer_push_loop);
909  // Inner loop state: a2 = current FrameDescription*, a3 = loop index.
910  __ lw(a2, MemOperand(a0, 0)); // output_[ix]
912  __ bind(&inner_push_loop);
913  __ Subu(a3, a3, Operand(sizeof(uint32_t)));
914  __ Addu(t2, a2, Operand(a3));
916  __ push(t3);
917  __ Branch(&inner_push_loop, ne, a3, Operand(zero_reg));
918 
919  __ Addu(a0, a0, Operand(kPointerSize));
920  __ Branch(&outer_push_loop, lt, a0, Operand(a1));
921 
922 
923  // Push state, pc, and continuation from the last output frame.
924  if (type() != OSR) {
926  __ push(t2);
927  }
928 
930  __ push(t2);
932  __ push(t2);
933 
934 
935  // Technically restoring 'at' should work unless zero_reg is also restored
936  // but it's safer to check for this.
937  ASSERT(!(at.bit() & restored_regs));
938  // Restore the registers from the last output frame.
939  __ mov(at, a2);
940  for (int i = kNumberOfRegisters - 1; i >= 0; i--) {
941  int offset = (i * kPointerSize) + FrameDescription::registers_offset();
942  if ((restored_regs & (1 << i)) != 0) {
943  __ lw(ToRegister(i), MemOperand(at, offset));
944  }
945  }
946 
947  __ InitializeRootRegister();
948 
949  __ pop(at); // Get continuation, leave pc on stack.
950  __ pop(ra);
951  __ Jump(at);
952  __ stop("Unreachable.");
953 }
954 
955 
956 // Maximum size of a table entry generated below.
957 const int Deoptimizer::table_entry_size_ = 9 * Assembler::kInstrSize;
958 
960  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm());
961 
962  // Create a sequence of deoptimization entries. Note that any
963  // registers may be still live.
964  Label table_start;
965  __ bind(&table_start);
966  for (int i = 0; i < count(); i++) {
967  Label start;
968  __ bind(&start);
969  if (type() != EAGER) {
970  // Emulate ia32 like call by pushing return address to stack.
971  __ addiu(sp, sp, -2 * kPointerSize);
972  __ sw(ra, MemOperand(sp, 1 * kPointerSize));
973  } else {
974  __ addiu(sp, sp, -1 * kPointerSize);
975  }
976  // Jump over the remaining deopt entries (including this one).
977  // This code is always reached by calling Jump, which puts the target (label
978  // start) into t9.
979  const int remaining_entries = (count() - i) * table_entry_size_;
980  __ Addu(t9, t9, remaining_entries);
981  // 'at' was clobbered so we can only load the current entry value here.
982  __ li(at, i);
983  __ jr(t9); // Expose delay slot.
984  __ sw(at, MemOperand(sp, 0 * kPointerSize)); // In the delay slot.
985 
986  // Pad the rest of the code.
987  while (table_entry_size_ > (masm()->SizeOfCodeGeneratedSince(&start))) {
988  __ nop();
989  }
990 
991  ASSERT_EQ(table_entry_size_, masm()->SizeOfCodeGeneratedSince(&start));
992  }
993 
994  ASSERT_EQ(masm()->SizeOfCodeGeneratedSince(&table_start),
995  count() * table_entry_size_);
996 }
997 
998 #undef __
999 
1000 
1001 } } // namespace v8::internal
byte * Address
Definition: globals.h:172
const Register cp
Code * builtin(Name name)
Definition: builtins.h:312
static DeoptimizationOutputData * cast(Object *obj)
#define V8PRIxPTR
Definition: globals.h:204
static bool IsAddImmediate(Instr instr)
void PrintF(const char *format,...)
Definition: v8utils.cc:40
unsigned stack_slots()
Definition: objects-inl.h:3171
const FPURegister f0
static Smi * FromInt(int value)
Definition: objects-inl.h:973
void SetFrameSlot(unsigned offset, intptr_t value)
Definition: deoptimizer.h:399
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
Builtins * builtins()
Definition: isolate.h:909
uint32_t RegList
Definition: frames.h:38
static const int kNumAllocatableRegisters
#define ASSERT(condition)
Definition: checks.h:270
const RegList kJSCallerSaved
Definition: frames-arm.h:47
static void DeoptimizeFunction(JSFunction *function)
const int kPointerSizeLog2
Definition: globals.h:246
intptr_t GetContext() const
Definition: deoptimizer.h:432
void SetFrameType(StackFrame::Type type)
Definition: deoptimizer.h:441
static const int kNumRegisters
Definition: assembler-arm.h:73
static int double_registers_offset()
Definition: deoptimizer.h:459
const Register sp
#define UNREACHABLE()
Definition: checks.h:50
static int output_offset()
Definition: deoptimizer.h:222
const int kDoubleSize
Definition: globals.h:232
const int kPointerSize
Definition: globals.h:234
static void set_target_address_at(Address pc, Address target)
const RegList kCalleeSaved
Definition: frames-arm.h:63
void SetRegister(unsigned n, intptr_t value)
Definition: deoptimizer.h:413
static unsigned decode(uint32_t value)
Definition: utils.h:272
const Register pc
friend class DeoptimizingCodeListNode
Definition: deoptimizer.h:345
static int GetOutputInfo(DeoptimizationOutputData *data, unsigned node_id, SharedFunctionInfo *shared)
Definition: deoptimizer.cc:486
static int CallSize(Register target, Condition cond=al)
friend class BlockTrampolinePoolScope
uint32_t GetFrameSize() const
Definition: deoptimizer.h:369
void SetContinuation(intptr_t pc)
Definition: deoptimizer.h:438
static int output_count_offset()
Definition: deoptimizer.h:219
static void RevertStackCheckCodeAt(Code *unoptimized_code, Address pc_after, Code *check_code, Code *replacement_code)
static Address target_address_at(Address pc)
intptr_t GetFrameSlot(unsigned offset)
Definition: deoptimizer.h:378
MemOperand CFunctionArgumentOperand(int index)
static Address GetDeoptimizationEntry(int id, BailoutType type)
Definition: deoptimizer.cc:445
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
friend class FrameDescription
Definition: deoptimizer.h:344
void USE(T)
Definition: globals.h:303
virtual void GeneratePrologue()
#define __
const uint32_t kDebugZapValue
Definition: v8globals.h:93
static bool IsBeq(Instr instr)
static const int kInstrSize
static uint32_t & uint32_at(Address addr)
Definition: v8memory.h:47
static const int kNumAllocatableRegisters
const Register fp
intptr_t GetRegister(unsigned n) const
Definition: deoptimizer.h:403
static FPURegister FromAllocationIndex(int index)
signed short int16_t
Definition: unicode.cc:45
void SetDoubleRegister(unsigned n, double value)
Definition: deoptimizer.h:418
Register ToRegister(int num)
static void PatchStackCheckCodeAt(Code *unoptimized_code, Address pc_after, Code *check_code, Code *replacement_code)
FlagType type() const
Definition: flags.cc:1358
static DeoptimizationInputData * cast(Object *obj)
static JSFunction * cast(Object *obj)