v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
deoptimizer-arm.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "codegen.h"
31 #include "deoptimizer.h"
32 #include "full-codegen.h"
33 #include "safepoint-table.h"
34 
35 namespace v8 {
36 namespace internal {
37 
38 const int Deoptimizer::table_entry_size_ = 16;
39 
40 
42  const int kCallInstructionSizeInWords = 3;
43  return kCallInstructionSizeInWords * Assembler::kInstrSize;
44 }
45 
46 
48  HandleScope scope;
49  AssertNoAllocation no_allocation;
50 
51  if (!function->IsOptimized()) return;
52 
53  // Get the optimized code.
54  Code* code = function->code();
55  Address code_start_address = code->instruction_start();
56 
57  // Invalidate the relocation information, as it will become invalid by the
58  // code patching below, and is not needed any more.
59  code->InvalidateRelocation();
60 
61  // For each LLazyBailout instruction insert a call to the corresponding
62  // deoptimization entry.
63  DeoptimizationInputData* deopt_data =
64  DeoptimizationInputData::cast(code->deoptimization_data());
65 #ifdef DEBUG
66  Address prev_call_address = NULL;
67 #endif
68  for (int i = 0; i < deopt_data->DeoptCount(); i++) {
69  if (deopt_data->Pc(i)->value() == -1) continue;
70  Address call_address = code_start_address + deopt_data->Pc(i)->value();
71  Address deopt_entry = GetDeoptimizationEntry(i, LAZY);
72  int call_size_in_bytes = MacroAssembler::CallSize(deopt_entry,
74  int call_size_in_words = call_size_in_bytes / Assembler::kInstrSize;
75  ASSERT(call_size_in_bytes % Assembler::kInstrSize == 0);
76  ASSERT(call_size_in_bytes <= patch_size());
77  CodePatcher patcher(call_address, call_size_in_words);
78  patcher.masm()->Call(deopt_entry, RelocInfo::NONE);
79  ASSERT(prev_call_address == NULL ||
80  call_address >= prev_call_address + patch_size());
81  ASSERT(call_address + patch_size() <= code->instruction_end());
82 #ifdef DEBUG
83  prev_call_address = call_address;
84 #endif
85  }
86 
87  Isolate* isolate = code->GetIsolate();
88 
89  // Add the deoptimizing code to the list.
91  DeoptimizerData* data = isolate->deoptimizer_data();
92  node->set_next(data->deoptimizing_code_list_);
93  data->deoptimizing_code_list_ = node;
94 
95  // We might be in the middle of incremental marking with compaction.
96  // Tell collector to treat this code object in a special way and
97  // ignore all slots that might have been recorded on it.
98  isolate->heap()->mark_compact_collector()->InvalidateCode(code);
99 
100  // Set the code for the function to non-optimized version.
101  function->ReplaceCode(function->shared()->code());
102 
103  if (FLAG_trace_deopt) {
104  PrintF("[forced deoptimization: ");
105  function->PrintName();
106  PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function));
107  }
108 }
109 
110 
111 static const int32_t kBranchBeforeStackCheck = 0x2a000001;
112 static const int32_t kBranchBeforeInterrupt = 0x5a000004;
113 
114 
116  Address pc_after,
117  Code* check_code,
118  Code* replacement_code) {
119  const int kInstrSize = Assembler::kInstrSize;
120  // The call of the stack guard check has the following form:
121  // e1 5d 00 0c cmp sp, <limit>
122  // 2a 00 00 01 bcs ok
123  // e5 9f c? ?? ldr ip, [pc, <stack guard address>]
124  // e1 2f ff 3c blx ip
125  ASSERT(Memory::int32_at(pc_after - kInstrSize) == kBlxIp);
127  Assembler::instr_at(pc_after - 2 * kInstrSize)));
128  if (FLAG_count_based_interrupts) {
129  ASSERT_EQ(kBranchBeforeInterrupt,
130  Memory::int32_at(pc_after - 3 * kInstrSize));
131  } else {
132  ASSERT_EQ(kBranchBeforeStackCheck,
133  Memory::int32_at(pc_after - 3 * kInstrSize));
134  }
135 
136  // We patch the code to the following form:
137  // e1 5d 00 0c cmp sp, <limit>
138  // e1 a0 00 00 mov r0, r0 (NOP)
139  // e5 9f c? ?? ldr ip, [pc, <on-stack replacement address>]
140  // e1 2f ff 3c blx ip
141  // and overwrite the constant containing the
142  // address of the stack check stub.
143 
144  // Replace conditional jump with NOP.
145  CodePatcher patcher(pc_after - 3 * kInstrSize, 1);
146  patcher.masm()->nop();
147 
148  // Replace the stack check address in the constant pool
149  // with the entry address of the replacement code.
150  uint32_t stack_check_address_offset = Memory::uint16_at(pc_after -
151  2 * kInstrSize) & 0xfff;
152  Address stack_check_address_pointer = pc_after + stack_check_address_offset;
153  ASSERT(Memory::uint32_at(stack_check_address_pointer) ==
154  reinterpret_cast<uint32_t>(check_code->entry()));
155  Memory::uint32_at(stack_check_address_pointer) =
156  reinterpret_cast<uint32_t>(replacement_code->entry());
157 
158  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
159  unoptimized_code, pc_after - 2 * kInstrSize, replacement_code);
160 }
161 
162 
164  Address pc_after,
165  Code* check_code,
166  Code* replacement_code) {
167  const int kInstrSize = Assembler::kInstrSize;
168  ASSERT(Memory::int32_at(pc_after - kInstrSize) == kBlxIp);
170  Assembler::instr_at(pc_after - 2 * kInstrSize)));
171 
172  // Replace NOP with conditional jump.
173  CodePatcher patcher(pc_after - 3 * kInstrSize, 1);
174  if (FLAG_count_based_interrupts) {
175  patcher.masm()->b(+16, pl);
176  ASSERT_EQ(kBranchBeforeInterrupt,
177  Memory::int32_at(pc_after - 3 * kInstrSize));
178  } else {
179  patcher.masm()->b(+4, cs);
180  ASSERT_EQ(kBranchBeforeStackCheck,
181  Memory::int32_at(pc_after - 3 * kInstrSize));
182  }
183 
184  // Replace the stack check address in the constant pool
185  // with the entry address of the replacement code.
186  uint32_t stack_check_address_offset = Memory::uint16_at(pc_after -
187  2 * kInstrSize) & 0xfff;
188  Address stack_check_address_pointer = pc_after + stack_check_address_offset;
189  ASSERT(Memory::uint32_at(stack_check_address_pointer) ==
190  reinterpret_cast<uint32_t>(replacement_code->entry()));
191  Memory::uint32_at(stack_check_address_pointer) =
192  reinterpret_cast<uint32_t>(check_code->entry());
193 
195  unoptimized_code, pc_after - 2 * kInstrSize, check_code);
196 }
197 
198 
199 static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) {
200  ByteArray* translations = data->TranslationByteArray();
201  int length = data->DeoptCount();
202  for (int i = 0; i < length; i++) {
203  if (static_cast<unsigned>(data->AstId(i)->value()) == ast_id) {
204  TranslationIterator it(translations, data->TranslationIndex(i)->value());
205  int value = it.Next();
206  ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value));
207  // Read the number of frames.
208  value = it.Next();
209  if (value == 1) return i;
210  }
211  }
212  UNREACHABLE();
213  return -1;
214 }
215 
216 
217 void Deoptimizer::DoComputeOsrOutputFrame() {
218  DeoptimizationInputData* data = DeoptimizationInputData::cast(
219  optimized_code_->deoptimization_data());
220  unsigned ast_id = data->OsrAstId()->value();
221 
222  int bailout_id = LookupBailoutId(data, ast_id);
223  unsigned translation_index = data->TranslationIndex(bailout_id)->value();
224  ByteArray* translations = data->TranslationByteArray();
225 
226  TranslationIterator iterator(translations, translation_index);
227  Translation::Opcode opcode =
228  static_cast<Translation::Opcode>(iterator.Next());
229  ASSERT(Translation::BEGIN == opcode);
230  USE(opcode);
231  int count = iterator.Next();
232  iterator.Skip(1); // Drop JS frame count.
233  ASSERT(count == 1);
234  USE(count);
235 
236  opcode = static_cast<Translation::Opcode>(iterator.Next());
237  USE(opcode);
238  ASSERT(Translation::JS_FRAME == opcode);
239  unsigned node_id = iterator.Next();
240  USE(node_id);
241  ASSERT(node_id == ast_id);
242  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator.Next()));
243  USE(function);
244  ASSERT(function == function_);
245  unsigned height = iterator.Next();
246  unsigned height_in_bytes = height * kPointerSize;
247  USE(height_in_bytes);
248 
249  unsigned fixed_size = ComputeFixedSize(function_);
250  unsigned input_frame_size = input_->GetFrameSize();
251  ASSERT(fixed_size + height_in_bytes == input_frame_size);
252 
253  unsigned stack_slot_size = optimized_code_->stack_slots() * kPointerSize;
254  unsigned outgoing_height = data->ArgumentsStackHeight(bailout_id)->value();
255  unsigned outgoing_size = outgoing_height * kPointerSize;
256  unsigned output_frame_size = fixed_size + stack_slot_size + outgoing_size;
257  ASSERT(outgoing_size == 0); // OSR does not happen in the middle of a call.
258 
259  if (FLAG_trace_osr) {
260  PrintF("[on-stack replacement: begin 0x%08" V8PRIxPTR " ",
261  reinterpret_cast<intptr_t>(function_));
262  function_->PrintName();
263  PrintF(" => node=%u, frame=%d->%d]\n",
264  ast_id,
265  input_frame_size,
266  output_frame_size);
267  }
268 
269  // There's only one output frame in the OSR case.
270  output_count_ = 1;
271  output_ = new FrameDescription*[1];
272  output_[0] = new(output_frame_size) FrameDescription(
273  output_frame_size, function_);
274  output_[0]->SetFrameType(StackFrame::JAVA_SCRIPT);
275 
276  // Clear the incoming parameters in the optimized frame to avoid
277  // confusing the garbage collector.
278  unsigned output_offset = output_frame_size - kPointerSize;
279  int parameter_count = function_->shared()->formal_parameter_count() + 1;
280  for (int i = 0; i < parameter_count; ++i) {
281  output_[0]->SetFrameSlot(output_offset, 0);
282  output_offset -= kPointerSize;
283  }
284 
285  // Translate the incoming parameters. This may overwrite some of the
286  // incoming argument slots we've just cleared.
287  int input_offset = input_frame_size - kPointerSize;
288  bool ok = true;
289  int limit = input_offset - (parameter_count * kPointerSize);
290  while (ok && input_offset > limit) {
291  ok = DoOsrTranslateCommand(&iterator, &input_offset);
292  }
293 
294  // There are no translation commands for the caller's pc and fp, the
295  // context, and the function. Set them up explicitly.
298  i -= kPointerSize) {
299  uint32_t input_value = input_->GetFrameSlot(input_offset);
300  if (FLAG_trace_osr) {
301  const char* name = "UNKNOWN";
302  switch (i) {
304  name = "caller's pc";
305  break;
307  name = "fp";
308  break;
310  name = "context";
311  break;
313  name = "function";
314  break;
315  }
316  PrintF(" [sp + %d] <- 0x%08x ; [sp + %d] (fixed part - %s)\n",
317  output_offset,
318  input_value,
319  input_offset,
320  name);
321  }
322 
323  output_[0]->SetFrameSlot(output_offset, input_->GetFrameSlot(input_offset));
324  input_offset -= kPointerSize;
325  output_offset -= kPointerSize;
326  }
327 
328  // Translate the rest of the frame.
329  while (ok && input_offset >= 0) {
330  ok = DoOsrTranslateCommand(&iterator, &input_offset);
331  }
332 
333  // If translation of any command failed, continue using the input frame.
334  if (!ok) {
335  delete output_[0];
336  output_[0] = input_;
337  output_[0]->SetPc(reinterpret_cast<uint32_t>(from_));
338  } else {
339  // Set up the frame pointer and the context pointer.
340  output_[0]->SetRegister(fp.code(), input_->GetRegister(fp.code()));
341  output_[0]->SetRegister(cp.code(), input_->GetRegister(cp.code()));
342 
343  unsigned pc_offset = data->OsrPcOffset()->value();
344  uint32_t pc = reinterpret_cast<uint32_t>(
345  optimized_code_->entry() + pc_offset);
346  output_[0]->SetPc(pc);
347  }
348  Code* continuation = isolate_->builtins()->builtin(Builtins::kNotifyOSR);
349  output_[0]->SetContinuation(
350  reinterpret_cast<uint32_t>(continuation->entry()));
351 
352  if (FLAG_trace_osr) {
353  PrintF("[on-stack replacement translation %s: 0x%08" V8PRIxPTR " ",
354  ok ? "finished" : "aborted",
355  reinterpret_cast<intptr_t>(function));
356  function->PrintName();
357  PrintF(" => pc=0x%0x]\n", output_[0]->GetPc());
358  }
359 }
360 
361 
362 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
363  int frame_index) {
364  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
365  unsigned height = iterator->Next();
366  unsigned height_in_bytes = height * kPointerSize;
367  if (FLAG_trace_deopt) {
368  PrintF(" translating arguments adaptor => height=%d\n", height_in_bytes);
369  }
370 
371  unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
372  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
373 
374  // Allocate and store the output frame description.
375  FrameDescription* output_frame =
376  new(output_frame_size) FrameDescription(output_frame_size, function);
377  output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
378 
379  // Arguments adaptor can not be topmost or bottommost.
380  ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
381  ASSERT(output_[frame_index] == NULL);
382  output_[frame_index] = output_frame;
383 
384  // The top address of the frame is computed from the previous
385  // frame's top and this frame's size.
386  uint32_t top_address;
387  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
388  output_frame->SetTop(top_address);
389 
390  // Compute the incoming parameter translation.
391  int parameter_count = height;
392  unsigned output_offset = output_frame_size;
393  for (int i = 0; i < parameter_count; ++i) {
394  output_offset -= kPointerSize;
395  DoTranslateCommand(iterator, frame_index, output_offset);
396  }
397 
398  // Read caller's PC from the previous frame.
399  output_offset -= kPointerSize;
400  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
401  output_frame->SetFrameSlot(output_offset, callers_pc);
402  if (FLAG_trace_deopt) {
403  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
404  top_address + output_offset, output_offset, callers_pc);
405  }
406 
407  // Read caller's FP from the previous frame, and set this frame's FP.
408  output_offset -= kPointerSize;
409  intptr_t value = output_[frame_index - 1]->GetFp();
410  output_frame->SetFrameSlot(output_offset, value);
411  intptr_t fp_value = top_address + output_offset;
412  output_frame->SetFp(fp_value);
413  if (FLAG_trace_deopt) {
414  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
415  fp_value, output_offset, value);
416  }
417 
418  // A marker value is used in place of the context.
419  output_offset -= kPointerSize;
420  intptr_t context = reinterpret_cast<intptr_t>(
422  output_frame->SetFrameSlot(output_offset, context);
423  if (FLAG_trace_deopt) {
424  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context (adaptor sentinel)\n",
425  top_address + output_offset, output_offset, context);
426  }
427 
428  // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
429  output_offset -= kPointerSize;
430  value = reinterpret_cast<intptr_t>(function);
431  output_frame->SetFrameSlot(output_offset, value);
432  if (FLAG_trace_deopt) {
433  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function\n",
434  top_address + output_offset, output_offset, value);
435  }
436 
437  // Number of incoming arguments.
438  output_offset -= kPointerSize;
439  value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
440  output_frame->SetFrameSlot(output_offset, value);
441  if (FLAG_trace_deopt) {
442  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
443  top_address + output_offset, output_offset, value, height - 1);
444  }
445 
446  ASSERT(0 == output_offset);
447 
448  Builtins* builtins = isolate_->builtins();
449  Code* adaptor_trampoline =
450  builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
451  uint32_t pc = reinterpret_cast<uint32_t>(
452  adaptor_trampoline->instruction_start() +
453  isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
454  output_frame->SetPc(pc);
455 }
456 
457 
458 void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
459  int frame_index) {
460  Builtins* builtins = isolate_->builtins();
461  Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
462  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
463  unsigned height = iterator->Next();
464  unsigned height_in_bytes = height * kPointerSize;
465  if (FLAG_trace_deopt) {
466  PrintF(" translating construct stub => height=%d\n", height_in_bytes);
467  }
468 
469  unsigned fixed_frame_size = 8 * kPointerSize;
470  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
471 
472  // Allocate and store the output frame description.
473  FrameDescription* output_frame =
474  new(output_frame_size) FrameDescription(output_frame_size, function);
475  output_frame->SetFrameType(StackFrame::CONSTRUCT);
476 
477  // Construct stub can not be topmost or bottommost.
478  ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
479  ASSERT(output_[frame_index] == NULL);
480  output_[frame_index] = output_frame;
481 
482  // The top address of the frame is computed from the previous
483  // frame's top and this frame's size.
484  uint32_t top_address;
485  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
486  output_frame->SetTop(top_address);
487 
488  // Compute the incoming parameter translation.
489  int parameter_count = height;
490  unsigned output_offset = output_frame_size;
491  for (int i = 0; i < parameter_count; ++i) {
492  output_offset -= kPointerSize;
493  DoTranslateCommand(iterator, frame_index, output_offset);
494  }
495 
496  // Read caller's PC from the previous frame.
497  output_offset -= kPointerSize;
498  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
499  output_frame->SetFrameSlot(output_offset, callers_pc);
500  if (FLAG_trace_deopt) {
501  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
502  top_address + output_offset, output_offset, callers_pc);
503  }
504 
505  // Read caller's FP from the previous frame, and set this frame's FP.
506  output_offset -= kPointerSize;
507  intptr_t value = output_[frame_index - 1]->GetFp();
508  output_frame->SetFrameSlot(output_offset, value);
509  intptr_t fp_value = top_address + output_offset;
510  output_frame->SetFp(fp_value);
511  if (FLAG_trace_deopt) {
512  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
513  fp_value, output_offset, value);
514  }
515 
516  // The context can be gotten from the previous frame.
517  output_offset -= kPointerSize;
518  value = output_[frame_index - 1]->GetContext();
519  output_frame->SetFrameSlot(output_offset, value);
520  if (FLAG_trace_deopt) {
521  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
522  top_address + output_offset, output_offset, value);
523  }
524 
525  // A marker value is used in place of the function.
526  output_offset -= kPointerSize;
527  value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
528  output_frame->SetFrameSlot(output_offset, value);
529  if (FLAG_trace_deopt) {
530  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function (construct sentinel)\n",
531  top_address + output_offset, output_offset, value);
532  }
533 
534  // The output frame reflects a JSConstructStubGeneric frame.
535  output_offset -= kPointerSize;
536  value = reinterpret_cast<intptr_t>(construct_stub);
537  output_frame->SetFrameSlot(output_offset, value);
538  if (FLAG_trace_deopt) {
539  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; code object\n",
540  top_address + output_offset, output_offset, value);
541  }
542 
543  // Number of incoming arguments.
544  output_offset -= kPointerSize;
545  value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
546  output_frame->SetFrameSlot(output_offset, value);
547  if (FLAG_trace_deopt) {
548  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
549  top_address + output_offset, output_offset, value, height - 1);
550  }
551 
552  // Constructor function being invoked by the stub.
553  output_offset -= kPointerSize;
554  value = reinterpret_cast<intptr_t>(function);
555  output_frame->SetFrameSlot(output_offset, value);
556  if (FLAG_trace_deopt) {
557  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; constructor function\n",
558  top_address + output_offset, output_offset, value);
559  }
560 
561  // The newly allocated object was passed as receiver in the artificial
562  // constructor stub environment created by HEnvironment::CopyForInlining().
563  output_offset -= kPointerSize;
564  value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
565  output_frame->SetFrameSlot(output_offset, value);
566  if (FLAG_trace_deopt) {
567  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; allocated receiver\n",
568  top_address + output_offset, output_offset, value);
569  }
570 
571  ASSERT(0 == output_offset);
572 
573  uint32_t pc = reinterpret_cast<uint32_t>(
574  construct_stub->instruction_start() +
575  isolate_->heap()->construct_stub_deopt_pc_offset()->value());
576  output_frame->SetPc(pc);
577 }
578 
579 
580 // This code is very similar to ia32 code, but relies on register names (fp, sp)
581 // and how the frame is laid out.
582 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
583  int frame_index) {
584  // Read the ast node id, function, and frame height for this output frame.
585  int node_id = iterator->Next();
586  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
587  unsigned height = iterator->Next();
588  unsigned height_in_bytes = height * kPointerSize;
589  if (FLAG_trace_deopt) {
590  PrintF(" translating ");
591  function->PrintName();
592  PrintF(" => node=%d, height=%d\n", node_id, height_in_bytes);
593  }
594 
595  // The 'fixed' part of the frame consists of the incoming parameters and
596  // the part described by JavaScriptFrameConstants.
597  unsigned fixed_frame_size = ComputeFixedSize(function);
598  unsigned input_frame_size = input_->GetFrameSize();
599  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
600 
601  // Allocate and store the output frame description.
602  FrameDescription* output_frame =
603  new(output_frame_size) FrameDescription(output_frame_size, function);
604  output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
605 
606  bool is_bottommost = (0 == frame_index);
607  bool is_topmost = (output_count_ - 1 == frame_index);
608  ASSERT(frame_index >= 0 && frame_index < output_count_);
609  ASSERT(output_[frame_index] == NULL);
610  output_[frame_index] = output_frame;
611 
612  // The top address for the bottommost output frame can be computed from
613  // the input frame pointer and the output frame's height. For all
614  // subsequent output frames, it can be computed from the previous one's
615  // top address and the current frame's size.
616  uint32_t top_address;
617  if (is_bottommost) {
618  // 2 = context and function in the frame.
619  top_address =
620  input_->GetRegister(fp.code()) - (2 * kPointerSize) - height_in_bytes;
621  } else {
622  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
623  }
624  output_frame->SetTop(top_address);
625 
626  // Compute the incoming parameter translation.
627  int parameter_count = function->shared()->formal_parameter_count() + 1;
628  unsigned output_offset = output_frame_size;
629  unsigned input_offset = input_frame_size;
630  for (int i = 0; i < parameter_count; ++i) {
631  output_offset -= kPointerSize;
632  DoTranslateCommand(iterator, frame_index, output_offset);
633  }
634  input_offset -= (parameter_count * kPointerSize);
635 
636  // There are no translation commands for the caller's pc and fp, the
637  // context, and the function. Synthesize their values and set them up
638  // explicitly.
639  //
640  // The caller's pc for the bottommost output frame is the same as in the
641  // input frame. For all subsequent output frames, it can be read from the
642  // previous one. This frame's pc can be computed from the non-optimized
643  // function code and AST id of the bailout.
644  output_offset -= kPointerSize;
645  input_offset -= kPointerSize;
646  intptr_t value;
647  if (is_bottommost) {
648  value = input_->GetFrameSlot(input_offset);
649  } else {
650  value = output_[frame_index - 1]->GetPc();
651  }
652  output_frame->SetFrameSlot(output_offset, value);
653  if (FLAG_trace_deopt) {
654  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
655  top_address + output_offset, output_offset, value);
656  }
657 
658  // The caller's frame pointer for the bottommost output frame is the same
659  // as in the input frame. For all subsequent output frames, it can be
660  // read from the previous one. Also compute and set this frame's frame
661  // pointer.
662  output_offset -= kPointerSize;
663  input_offset -= kPointerSize;
664  if (is_bottommost) {
665  value = input_->GetFrameSlot(input_offset);
666  } else {
667  value = output_[frame_index - 1]->GetFp();
668  }
669  output_frame->SetFrameSlot(output_offset, value);
670  intptr_t fp_value = top_address + output_offset;
671  ASSERT(!is_bottommost || input_->GetRegister(fp.code()) == fp_value);
672  output_frame->SetFp(fp_value);
673  if (is_topmost) {
674  output_frame->SetRegister(fp.code(), fp_value);
675  }
676  if (FLAG_trace_deopt) {
677  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
678  fp_value, output_offset, value);
679  }
680 
681  // For the bottommost output frame the context can be gotten from the input
682  // frame. For all subsequent output frames it can be gotten from the function
683  // so long as we don't inline functions that need local contexts.
684  output_offset -= kPointerSize;
685  input_offset -= kPointerSize;
686  if (is_bottommost) {
687  value = input_->GetFrameSlot(input_offset);
688  } else {
689  value = reinterpret_cast<intptr_t>(function->context());
690  }
691  output_frame->SetFrameSlot(output_offset, value);
692  output_frame->SetContext(value);
693  if (is_topmost) output_frame->SetRegister(cp.code(), value);
694  if (FLAG_trace_deopt) {
695  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
696  top_address + output_offset, output_offset, value);
697  }
698 
699  // The function was mentioned explicitly in the BEGIN_FRAME.
700  output_offset -= kPointerSize;
701  input_offset -= kPointerSize;
702  value = reinterpret_cast<uint32_t>(function);
703  // The function for the bottommost output frame should also agree with the
704  // input frame.
705  ASSERT(!is_bottommost || input_->GetFrameSlot(input_offset) == value);
706  output_frame->SetFrameSlot(output_offset, value);
707  if (FLAG_trace_deopt) {
708  PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function\n",
709  top_address + output_offset, output_offset, value);
710  }
711 
712  // Translate the rest of the frame.
713  for (unsigned i = 0; i < height; ++i) {
714  output_offset -= kPointerSize;
715  DoTranslateCommand(iterator, frame_index, output_offset);
716  }
717  ASSERT(0 == output_offset);
718 
719  // Compute this frame's PC, state, and continuation.
720  Code* non_optimized_code = function->shared()->code();
721  FixedArray* raw_data = non_optimized_code->deoptimization_data();
722  DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data);
723  Address start = non_optimized_code->instruction_start();
724  unsigned pc_and_state = GetOutputInfo(data, node_id, function->shared());
725  unsigned pc_offset = FullCodeGenerator::PcField::decode(pc_and_state);
726  uint32_t pc_value = reinterpret_cast<uint32_t>(start + pc_offset);
727  output_frame->SetPc(pc_value);
728  if (is_topmost) {
729  output_frame->SetRegister(pc.code(), pc_value);
730  }
731 
734  output_frame->SetState(Smi::FromInt(state));
735 
736 
737  // Set the continuation for the topmost frame.
738  if (is_topmost && bailout_type_ != DEBUGGER) {
739  Builtins* builtins = isolate_->builtins();
740  Code* continuation = (bailout_type_ == EAGER)
741  ? builtins->builtin(Builtins::kNotifyDeoptimized)
742  : builtins->builtin(Builtins::kNotifyLazyDeoptimized);
743  output_frame->SetContinuation(
744  reinterpret_cast<uint32_t>(continuation->entry()));
745  }
746 }
747 
748 
749 void Deoptimizer::FillInputFrame(Address tos, JavaScriptFrame* frame) {
750  // Set the register values. The values are not important as there are no
751  // callee saved registers in JavaScript frames, so all registers are
752  // spilled. Registers fp and sp are set to the correct values though.
753 
754  for (int i = 0; i < Register::kNumRegisters; i++) {
755  input_->SetRegister(i, i * 4);
756  }
757  input_->SetRegister(sp.code(), reinterpret_cast<intptr_t>(frame->sp()));
758  input_->SetRegister(fp.code(), reinterpret_cast<intptr_t>(frame->fp()));
759  for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
760  input_->SetDoubleRegister(i, 0.0);
761  }
762 
763  // Fill the frame content from the actual data on the frame.
764  for (unsigned i = 0; i < input_->GetFrameSize(); i += kPointerSize) {
765  input_->SetFrameSlot(i, Memory::uint32_at(tos + i));
766  }
767 }
768 
769 
770 #define __ masm()->
771 
772 // This code tries to be close to ia32 code so that any changes can be
773 // easily ported.
774 void Deoptimizer::EntryGenerator::Generate() {
775  GeneratePrologue();
776 
777  Isolate* isolate = masm()->isolate();
778 
779  CpuFeatures::Scope scope(VFP3);
780  // Save all general purpose registers before messing with them.
781  const int kNumberOfRegisters = Register::kNumRegisters;
782 
783  // Everything but pc, lr and ip which will be saved but not restored.
784  RegList restored_regs = kJSCallerSaved | kCalleeSaved | ip.bit();
785 
786  const int kDoubleRegsSize =
788 
789  // Save all VFP registers before messing with them.
790  DwVfpRegister first = DwVfpRegister::FromAllocationIndex(0);
791  DwVfpRegister last =
793  DwVfpRegister::kNumAllocatableRegisters - 1);
794  ASSERT(last.code() > first.code());
795  ASSERT((last.code() - first.code()) ==
796  (DwVfpRegister::kNumAllocatableRegisters - 1));
797 #ifdef DEBUG
798  for (int i = 0; i <= (DwVfpRegister::kNumAllocatableRegisters - 1); i++) {
799  ASSERT((DwVfpRegister::FromAllocationIndex(i).code() <= last.code()) &&
800  (DwVfpRegister::FromAllocationIndex(i).code() >= first.code()));
801  }
802 #endif
803  __ vstm(db_w, sp, first, last);
804 
805  // Push all 16 registers (needed to populate FrameDescription::registers_).
806  // TODO(1588) Note that using pc with stm is deprecated, so we should perhaps
807  // handle this a bit differently.
808  __ stm(db_w, sp, restored_regs | sp.bit() | lr.bit() | pc.bit());
809 
810  const int kSavedRegistersAreaSize =
811  (kNumberOfRegisters * kPointerSize) + kDoubleRegsSize;
812 
813  // Get the bailout id from the stack.
814  __ ldr(r2, MemOperand(sp, kSavedRegistersAreaSize));
815 
816  // Get the address of the location in the code object if possible (r3) (return
817  // address for lazy deoptimization) and compute the fp-to-sp delta in
818  // register r4.
819  if (type() == EAGER) {
820  __ mov(r3, Operand(0));
821  // Correct one word for bailout id.
822  __ add(r4, sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize)));
823  } else if (type() == OSR) {
824  __ mov(r3, lr);
825  // Correct one word for bailout id.
826  __ add(r4, sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize)));
827  } else {
828  __ mov(r3, lr);
829  // Correct two words for bailout id and return address.
830  __ add(r4, sp, Operand(kSavedRegistersAreaSize + (2 * kPointerSize)));
831  }
832  __ sub(r4, fp, r4);
833 
834  // Allocate a new deoptimizer object.
835  // Pass four arguments in r0 to r3 and fifth argument on stack.
836  __ PrepareCallCFunction(6, r5);
838  __ mov(r1, Operand(type())); // bailout type,
839  // r2: bailout id already loaded.
840  // r3: code address or 0 already loaded.
841  __ str(r4, MemOperand(sp, 0 * kPointerSize)); // Fp-to-sp delta.
842  __ mov(r5, Operand(ExternalReference::isolate_address()));
843  __ str(r5, MemOperand(sp, 1 * kPointerSize)); // Isolate.
844  // Call Deoptimizer::New().
845  {
846  AllowExternalCallThatCantCauseGC scope(masm());
847  __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
848  }
849 
850  // Preserve "deoptimizer" object in register r0 and get the input
851  // frame descriptor pointer to r1 (deoptimizer->input_);
853 
854  // Copy core registers into FrameDescription::registers_[kNumRegisters].
855  ASSERT(Register::kNumRegisters == kNumberOfRegisters);
856  for (int i = 0; i < kNumberOfRegisters; i++) {
857  int offset = (i * kPointerSize) + FrameDescription::registers_offset();
858  __ ldr(r2, MemOperand(sp, i * kPointerSize));
859  __ str(r2, MemOperand(r1, offset));
860  }
861 
862  // Copy VFP registers to
863  // double_registers_[DoubleRegister::kNumAllocatableRegisters]
864  int double_regs_offset = FrameDescription::double_registers_offset();
865  for (int i = 0; i < DwVfpRegister::kNumAllocatableRegisters; ++i) {
866  int dst_offset = i * kDoubleSize + double_regs_offset;
867  int src_offset = i * kDoubleSize + kNumberOfRegisters * kPointerSize;
868  __ vldr(d0, sp, src_offset);
869  __ vstr(d0, r1, dst_offset);
870  }
871 
872  // Remove the bailout id, eventually return address, and the saved registers
873  // from the stack.
874  if (type() == EAGER || type() == OSR) {
875  __ add(sp, sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize)));
876  } else {
877  __ add(sp, sp, Operand(kSavedRegistersAreaSize + (2 * kPointerSize)));
878  }
879 
880  // Compute a pointer to the unwinding limit in register r2; that is
881  // the first stack slot not part of the input frame.
883  __ add(r2, r2, sp);
884 
885  // Unwind the stack down to - but not including - the unwinding
886  // limit and copy the contents of the activation frame to the input
887  // frame description.
889  Label pop_loop;
890  __ bind(&pop_loop);
891  __ pop(r4);
892  __ str(r4, MemOperand(r3, 0));
893  __ add(r3, r3, Operand(sizeof(uint32_t)));
894  __ cmp(r2, sp);
895  __ b(ne, &pop_loop);
896 
897  // Compute the output frame in the deoptimizer.
898  __ push(r0); // Preserve deoptimizer object across call.
899  // r0: deoptimizer object; r1: scratch.
900  __ PrepareCallCFunction(1, r1);
901  // Call Deoptimizer::ComputeOutputFrames().
902  {
903  AllowExternalCallThatCantCauseGC scope(masm());
904  __ CallCFunction(
905  ExternalReference::compute_output_frames_function(isolate), 1);
906  }
907  __ pop(r0); // Restore deoptimizer object (class Deoptimizer).
908 
909  // Replace the current (input) frame with the output frames.
910  Label outer_push_loop, inner_push_loop;
911  // Outer loop state: r0 = current "FrameDescription** output_",
912  // r1 = one past the last FrameDescription**.
914  __ ldr(r0, MemOperand(r0, Deoptimizer::output_offset())); // r0 is output_.
915  __ add(r1, r0, Operand(r1, LSL, 2));
916  __ bind(&outer_push_loop);
917  // Inner loop state: r2 = current FrameDescription*, r3 = loop index.
918  __ ldr(r2, MemOperand(r0, 0)); // output_[ix]
920  __ bind(&inner_push_loop);
921  __ sub(r3, r3, Operand(sizeof(uint32_t)));
922  __ add(r6, r2, Operand(r3));
924  __ push(r7);
925  __ cmp(r3, Operand(0));
926  __ b(ne, &inner_push_loop); // test for gt?
927  __ add(r0, r0, Operand(kPointerSize));
928  __ cmp(r0, r1);
929  __ b(lt, &outer_push_loop);
930 
931  // Push state, pc, and continuation from the last output frame.
932  if (type() != OSR) {
934  __ push(r6);
935  }
936 
938  __ push(r6);
940  __ push(r6);
941 
942  // Push the registers from the last output frame.
943  for (int i = kNumberOfRegisters - 1; i >= 0; i--) {
944  int offset = (i * kPointerSize) + FrameDescription::registers_offset();
945  __ ldr(r6, MemOperand(r2, offset));
946  __ push(r6);
947  }
948 
949  // Restore the registers from the stack.
950  __ ldm(ia_w, sp, restored_regs); // all but pc registers.
951  __ pop(ip); // remove sp
952  __ pop(ip); // remove lr
953 
954  __ InitializeRootRegister();
955 
956  __ pop(ip); // remove pc
957  __ pop(r7); // get continuation, leave pc on stack
958  __ pop(lr);
959  __ Jump(r7);
960  __ stop("Unreachable.");
961 }
962 
963 
965  // Create a sequence of deoptimization entries. Note that any
966  // registers may be still live.
967  Label done;
968  for (int i = 0; i < count(); i++) {
969  int start = masm()->pc_offset();
970  USE(start);
971  if (type() == EAGER) {
972  __ nop();
973  } else {
974  // Emulate ia32 like call by pushing return address to stack.
975  __ push(lr);
976  }
977  __ mov(ip, Operand(i));
978  __ push(ip);
979  __ b(&done);
980  ASSERT(masm()->pc_offset() - start == table_entry_size_);
981  }
982  __ bind(&done);
983 }
984 
985 #undef __
986 
987 } } // namespace v8::internal
byte * Address
Definition: globals.h:172
const Register cp
Code * builtin(Name name)
Definition: builtins.h:312
static DeoptimizationOutputData * cast(Object *obj)
void InvalidateRelocation()
Definition: objects.cc:8107
#define V8PRIxPTR
Definition: globals.h:204
const Register r3
void PrintF(const char *format,...)
Definition: v8utils.cc:40
unsigned stack_slots()
Definition: objects-inl.h:3171
static Smi * FromInt(int value)
Definition: objects-inl.h:973
void SetFrameSlot(unsigned offset, intptr_t value)
Definition: deoptimizer.h:399
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
const DwVfpRegister d0
const Register r6
void b(int branch_offset, Condition cond=al)
Builtins * builtins()
Definition: isolate.h:909
int int32_t
Definition: unicode.cc:47
uint32_t RegList
Definition: frames.h:38
static const int kNumAllocatableRegisters
void nop(int type=0)
byte * instruction_end()
Definition: objects-inl.h:4381
#define ASSERT(condition)
Definition: checks.h:270
const RegList kJSCallerSaved
Definition: frames-arm.h:47
static void DeoptimizeFunction(JSFunction *function)
intptr_t GetContext() const
Definition: deoptimizer.h:432
static uint16_t & uint16_at(Address addr)
Definition: v8memory.h:43
void RecordCodeTargetPatch(Code *host, Address pc, HeapObject *value)
void SetFrameType(StackFrame::Type type)
Definition: deoptimizer.h:441
const Register r2
static const int kNumRegisters
Definition: assembler-arm.h:73
static int double_registers_offset()
Definition: deoptimizer.h:459
static DwVfpRegister FromAllocationIndex(int index)
const Register sp
#define UNREACHABLE()
Definition: checks.h:50
static int output_offset()
Definition: deoptimizer.h:222
const int kDoubleSize
Definition: globals.h:232
const Register ip
byte * instruction_start()
Definition: objects-inl.h:4376
const int kPointerSize
Definition: globals.h:234
const RegList kCalleeSaved
Definition: frames-arm.h:63
static int32_t & int32_at(Address addr)
Definition: v8memory.h:51
const Instr kBlxIp
#define __
void SetRegister(unsigned n, intptr_t value)
Definition: deoptimizer.h:413
static unsigned decode(uint32_t value)
Definition: utils.h:272
const Register pc
friend class DeoptimizingCodeListNode
Definition: deoptimizer.h:345
void set_next(DeoptimizingCodeListNode *next)
Definition: deoptimizer.h:626
static int GetOutputInfo(DeoptimizationOutputData *data, unsigned node_id, SharedFunctionInfo *shared)
Definition: deoptimizer.cc:486
const Register r0
static int CallSize(Register target, Condition cond=al)
DeoptimizerData * deoptimizer_data()
Definition: isolate.h:823
uint32_t GetFrameSize() const
Definition: deoptimizer.h:369
void SetContinuation(intptr_t pc)
Definition: deoptimizer.h:438
static int output_count_offset()
Definition: deoptimizer.h:219
const Register lr
static void RevertStackCheckCodeAt(Code *unoptimized_code, Address pc_after, Code *check_code, Code *replacement_code)
intptr_t GetFrameSlot(unsigned offset)
Definition: deoptimizer.h:378
const Register r1
static Address GetDeoptimizationEntry(int id, BailoutType type)
Definition: deoptimizer.cc:445
IncrementalMarking * incremental_marking()
Definition: heap.h:1524
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
friend class FrameDescription
Definition: deoptimizer.h:344
void Call(Register target, Condition cond=al)
void USE(T)
Definition: globals.h:303
virtual void GeneratePrologue()
static const int kInstrSize
static uint32_t & uint32_at(Address addr)
Definition: v8memory.h:47
const Register fp
intptr_t GetRegister(unsigned n) const
Definition: deoptimizer.h:403
void SetDoubleRegister(unsigned n, double value)
Definition: deoptimizer.h:418
static void PatchStackCheckCodeAt(Code *unoptimized_code, Address pc_after, Code *check_code, Code *replacement_code)
FlagType type() const
Definition: flags.cc:1358
MarkCompactCollector * mark_compact_collector()
Definition: heap.h:1512
const Register r5
static bool IsLdrPcImmediateOffset(Instr instr)
static DeoptimizationInputData * cast(Object *obj)
const Register r4
const Register r7
static JSFunction * cast(Object *obj)