v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
frames.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "ast.h"
31 #include "deoptimizer.h"
32 #include "frames-inl.h"
33 #include "full-codegen.h"
34 #include "lazy-instance.h"
35 #include "mark-compact.h"
36 #include "safepoint-table.h"
37 #include "scopeinfo.h"
38 #include "string-stream.h"
39 #include "vm-state-inl.h"
40 
41 namespace v8 {
42 namespace internal {
43 
44 
46  StackFrame::return_address_location_resolver_ = NULL;
47 
48 
49 // Iterator that supports traversing the stack handlers of a
50 // particular frame. Needs to know the top of the handler chain.
51 class StackHandlerIterator BASE_EMBEDDED {
52  public:
53  StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
54  : limit_(frame->fp()), handler_(handler) {
55  // Make sure the handler has already been unwound to this frame.
56  ASSERT(frame->sp() <= handler->address());
57  }
58 
59  StackHandler* handler() const { return handler_; }
60 
61  bool done() {
62  return handler_ == NULL || handler_->address() > limit_;
63  }
64  void Advance() {
65  ASSERT(!done());
66  handler_ = handler_->next();
67  }
68 
69  private:
70  const Address limit_;
71  StackHandler* handler_;
72 };
73 
74 
75 // -------------------------------------------------------------------------
76 
77 
78 #define INITIALIZE_SINGLETON(type, field) field##_(this),
79 StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate,
80  bool can_access_heap_objects)
81  : isolate_(isolate),
83  frame_(NULL), handler_(NULL),
84  can_access_heap_objects_(can_access_heap_objects) {
85 }
86 #undef INITIALIZE_SINGLETON
87 
88 
89 StackFrameIterator::StackFrameIterator(Isolate* isolate)
90  : StackFrameIteratorBase(isolate, true) {
91  Reset(isolate->thread_local_top());
92 }
93 
94 
95 StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
96  : StackFrameIteratorBase(isolate, true) {
97  Reset(t);
98 }
99 
100 
102  ASSERT(!done());
103  // Compute the state of the calling frame before restoring
104  // callee-saved registers and unwinding handlers. This allows the
105  // frame code that computes the caller state to access the top
106  // handler and the value of any callee-saved register if needed.
107  StackFrame::State state;
108  StackFrame::Type type = frame_->GetCallerState(&state);
109 
110  // Unwind handlers corresponding to the current frame.
111  StackHandlerIterator it(frame_, handler_);
112  while (!it.done()) it.Advance();
113  handler_ = it.handler();
114 
115  // Advance to the calling frame.
116  frame_ = SingletonFor(type, &state);
117 
118  // When we're done iterating over the stack frames, the handler
119  // chain must have been completely unwound.
120  ASSERT(!done() || handler_ == NULL);
121 }
122 
123 
124 void StackFrameIterator::Reset(ThreadLocalTop* top) {
125  StackFrame::State state;
127  Isolate::c_entry_fp(top), &state);
128  handler_ = StackHandler::FromAddress(Isolate::handler(top));
129  if (SingletonFor(type) == NULL) return;
130  frame_ = SingletonFor(type, &state);
131 }
132 
133 
134 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type,
135  StackFrame::State* state) {
136  if (type == StackFrame::NONE) return NULL;
137  StackFrame* result = SingletonFor(type);
138  ASSERT(result != NULL);
139  result->state_ = *state;
140  return result;
141 }
142 
143 
144 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) {
145 #define FRAME_TYPE_CASE(type, field) \
146  case StackFrame::type: result = &field##_; break;
147 
148  StackFrame* result = NULL;
149  switch (type) {
150  case StackFrame::NONE: return NULL;
152  default: break;
153  }
154  return result;
155 
156 #undef FRAME_TYPE_CASE
157 }
158 
159 
160 // -------------------------------------------------------------------------
161 
162 
163 JavaScriptFrameIterator::JavaScriptFrameIterator(
164  Isolate* isolate, StackFrame::Id id)
165  : iterator_(isolate) {
166  while (!done()) {
167  Advance();
168  if (frame()->id() == id) return;
169  }
170 }
171 
172 
173 void JavaScriptFrameIterator::Advance() {
174  do {
175  iterator_.Advance();
176  } while (!iterator_.done() && !iterator_.frame()->is_java_script());
177 }
178 
179 
180 void JavaScriptFrameIterator::AdvanceToArgumentsFrame() {
181  if (!frame()->has_adapted_arguments()) return;
182  iterator_.Advance();
183  ASSERT(iterator_.frame()->is_arguments_adaptor());
184 }
185 
186 
187 // -------------------------------------------------------------------------
188 
189 
191  : JavaScriptFrameIterator(isolate) {
192  if (!done() && !IsValidFrame()) Advance();
193 }
194 
195 
197  while (true) {
198  JavaScriptFrameIterator::Advance();
199  if (done()) return;
200  if (IsValidFrame()) return;
201  }
202 }
203 
204 
205 bool StackTraceFrameIterator::IsValidFrame() {
206  if (!frame()->function()->IsJSFunction()) return false;
207  Object* script = frame()->function()->shared()->script();
208  // Don't show functions from native scripts to user.
209  return (script->IsScript() &&
210  Script::TYPE_NATIVE != Script::cast(script)->type()->value());
211 }
212 
213 
214 // -------------------------------------------------------------------------
215 
216 
218  Isolate* isolate,
219  Address fp, Address sp, Address js_entry_sp)
220  : StackFrameIteratorBase(isolate, false),
221  low_bound_(sp),
222  high_bound_(js_entry_sp),
223  top_frame_type_(StackFrame::NONE),
224  external_callback_scope_(isolate->external_callback_scope()) {
225  StackFrame::State state;
226  StackFrame::Type type;
227  ThreadLocalTop* top = isolate->thread_local_top();
228  if (IsValidTop(top)) {
230  top_frame_type_ = type;
231  } else if (IsValidStackAddress(fp)) {
232  ASSERT(fp != NULL);
233  state.fp = fp;
234  state.sp = sp;
235  state.pc_address = StackFrame::ResolveReturnAddressLocation(
236  reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp)));
237  // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset,
238  // we check only that kMarkerOffset is within the stack bounds and do
239  // compile time check that kContextOffset slot is pushed on the stack before
240  // kMarkerOffset.
244  if (IsValidStackAddress(frame_marker)) {
245  type = StackFrame::ComputeType(this, &state);
246  top_frame_type_ = type;
247  } else {
248  // Mark the frame as JAVA_SCRIPT if we cannot determine its type.
249  // The frame anyways will be skipped.
250  type = StackFrame::JAVA_SCRIPT;
251  // Top frame is incomplete so we cannot reliably determine its type.
252  top_frame_type_ = StackFrame::NONE;
253  }
254  } else {
255  return;
256  }
257  if (SingletonFor(type) == NULL) return;
258  frame_ = SingletonFor(type, &state);
259  if (frame_ == NULL) return;
260 
261  Advance();
262 
263  if (frame_ != NULL && !frame_->is_exit() &&
264  external_callback_scope_ != NULL &&
265  external_callback_scope_->scope_address() < frame_->fp()) {
266  // Skip top ExternalCallbackScope if we already advanced to a JS frame
267  // under it. Sampler will anyways take this top external callback.
268  external_callback_scope_ = external_callback_scope_->previous();
269  }
270 }
271 
272 
273 bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const {
274  Address c_entry_fp = Isolate::c_entry_fp(top);
275  if (!IsValidExitFrame(c_entry_fp)) return false;
276  // There should be at least one JS_ENTRY stack handler.
277  Address handler = Isolate::handler(top);
278  if (handler == NULL) return false;
279  // Check that there are no js frames on top of the native frames.
280  return c_entry_fp < handler;
281 }
282 
283 
284 void SafeStackFrameIterator::AdvanceOneFrame() {
285  ASSERT(!done());
286  StackFrame* last_frame = frame_;
287  Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
288  // Before advancing to the next stack frame, perform pointer validity tests.
289  if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) {
290  frame_ = NULL;
291  return;
292  }
293 
294  // Advance to the previous frame.
295  StackFrame::State state;
296  StackFrame::Type type = frame_->GetCallerState(&state);
297  frame_ = SingletonFor(type, &state);
298  if (frame_ == NULL) return;
299 
300  // Check that we have actually moved to the previous frame in the stack.
301  if (frame_->sp() < last_sp || frame_->fp() < last_fp) {
302  frame_ = NULL;
303  }
304 }
305 
306 
307 bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const {
308  return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp());
309 }
310 
311 
312 bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) {
313  StackFrame::State state;
314  if (frame->is_entry() || frame->is_entry_construct()) {
315  // See EntryFrame::GetCallerState. It computes the caller FP address
316  // and calls ExitFrame::GetStateForFramePointer on it. We need to be
317  // sure that caller FP address is valid.
318  Address caller_fp = Memory::Address_at(
320  if (!IsValidExitFrame(caller_fp)) return false;
321  } else if (frame->is_arguments_adaptor()) {
322  // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that
323  // the number of arguments is stored on stack as Smi. We need to check
324  // that it really an Smi.
325  Object* number_of_args = reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->
326  GetExpression(0);
327  if (!number_of_args->IsSmi()) {
328  return false;
329  }
330  }
331  frame->ComputeCallerState(&state);
332  return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) &&
333  SingletonFor(frame->GetCallerState(&state)) != NULL;
334 }
335 
336 
337 bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const {
338  if (!IsValidStackAddress(fp)) return false;
340  if (!IsValidStackAddress(sp)) return false;
341  StackFrame::State state;
342  ExitFrame::FillState(fp, sp, &state);
343  if (!IsValidStackAddress(reinterpret_cast<Address>(state.pc_address))) {
344  return false;
345  }
346  return *state.pc_address != NULL;
347 }
348 
349 
351  while (true) {
352  AdvanceOneFrame();
353  if (done()) return;
354  if (frame_->is_java_script()) return;
355  if (frame_->is_exit() && external_callback_scope_) {
356  // Some of the EXIT frames may have ExternalCallbackScope allocated on
357  // top of them. In that case the scope corresponds to the first EXIT
358  // frame beneath it. There may be other EXIT frames on top of the
359  // ExternalCallbackScope, just skip them as we cannot collect any useful
360  // information about them.
361  if (external_callback_scope_->scope_address() < frame_->fp()) {
362  Address* callback_address =
363  external_callback_scope_->callback_address();
364  if (*callback_address != NULL) {
365  frame_->state_.pc_address = callback_address;
366  }
367  external_callback_scope_ = external_callback_scope_->previous();
368  ASSERT(external_callback_scope_ == NULL ||
369  external_callback_scope_->scope_address() > frame_->fp());
370  return;
371  }
372  }
373  }
374 }
375 
376 
377 // -------------------------------------------------------------------------
378 
379 
380 Code* StackFrame::GetSafepointData(Isolate* isolate,
381  Address inner_pointer,
382  SafepointEntry* safepoint_entry,
383  unsigned* stack_slots) {
385  isolate->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
386  if (!entry->safepoint_entry.is_valid()) {
387  entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer);
388  ASSERT(entry->safepoint_entry.is_valid());
389  } else {
390  ASSERT(entry->safepoint_entry.Equals(
391  entry->code->GetSafepointEntry(inner_pointer)));
392  }
393 
394  // Fill in the results and return the code.
395  Code* code = entry->code;
396  *safepoint_entry = entry->safepoint_entry;
397  *stack_slots = code->stack_slots();
398  return code;
399 }
400 
401 
402 bool StackFrame::HasHandler() const {
403  StackHandlerIterator it(this, top_handler());
404  return !it.done();
405 }
406 
407 
408 #ifdef DEBUG
409 static bool GcSafeCodeContains(HeapObject* object, Address addr);
410 #endif
411 
412 
413 void StackFrame::IteratePc(ObjectVisitor* v,
414  Address* pc_address,
415  Code* holder) {
416  Address pc = *pc_address;
417  ASSERT(GcSafeCodeContains(holder, pc));
418  unsigned pc_offset = static_cast<unsigned>(pc - holder->instruction_start());
419  Object* code = holder;
420  v->VisitPointer(&code);
421  if (code != holder) {
422  holder = reinterpret_cast<Code*>(code);
423  pc = holder->instruction_start() + pc_offset;
424  *pc_address = pc;
425  }
426 }
427 
428 
429 void StackFrame::SetReturnAddressLocationResolver(
431  ASSERT(return_address_location_resolver_ == NULL);
432  return_address_location_resolver_ = resolver;
433 }
434 
435 
436 StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
437  State* state) {
438  ASSERT(state->fp != NULL);
440  return ARGUMENTS_ADAPTOR;
441  }
442  // The marker and function offsets overlap. If the marker isn't a
443  // smi then the frame is a JavaScript frame -- and the marker is
444  // really the function.
445  const int offset = StandardFrameConstants::kMarkerOffset;
446  Object* marker = Memory::Object_at(state->fp + offset);
447  if (!marker->IsSmi()) {
448  // If we're using a "safe" stack iterator, we treat optimized
449  // frames as normal JavaScript frames to avoid having to look
450  // into the heap to determine the state. This is safe as long
451  // as nobody tries to GC...
452  if (!iterator->can_access_heap_objects_) return JAVA_SCRIPT;
453  Code::Kind kind = GetContainingCode(iterator->isolate(),
454  *(state->pc_address))->kind();
455  ASSERT(kind == Code::FUNCTION || kind == Code::OPTIMIZED_FUNCTION);
456  return (kind == Code::OPTIMIZED_FUNCTION) ? OPTIMIZED : JAVA_SCRIPT;
457  }
458  return static_cast<StackFrame::Type>(Smi::cast(marker)->value());
459 }
460 
461 
462 #ifdef DEBUG
463 bool StackFrame::can_access_heap_objects() const {
464  return iterator_->can_access_heap_objects_;
465 }
466 #endif
467 
468 
469 StackFrame::Type StackFrame::GetCallerState(State* state) const {
470  ComputeCallerState(state);
471  return ComputeType(iterator_, state);
472 }
473 
474 
475 Address StackFrame::UnpaddedFP() const {
476 #if V8_TARGET_ARCH_IA32
477  if (!is_optimized()) return fp();
478  int32_t alignment_state = Memory::int32_at(
480 
481  return (alignment_state == kAlignmentPaddingPushed) ?
482  (fp() + kPointerSize) : fp();
483 #else
484  return fp();
485 #endif
486 }
487 
488 
490  return isolate()->heap()->js_entry_code();
491 }
492 
493 
494 void EntryFrame::ComputeCallerState(State* state) const {
495  GetCallerState(state);
496 }
497 
498 
500  const int offset = EntryFrameConstants::kCallerFPOffset;
501  Memory::Address_at(this->fp() + offset) = caller_fp;
502 }
503 
504 
505 StackFrame::Type EntryFrame::GetCallerState(State* state) const {
506  const int offset = EntryFrameConstants::kCallerFPOffset;
507  Address fp = Memory::Address_at(this->fp() + offset);
508  return ExitFrame::GetStateForFramePointer(fp, state);
509 }
510 
511 
513  return isolate()->heap()->js_construct_entry_code();
514 }
515 
516 
518  const int offset = ExitFrameConstants::kCodeOffset;
519  return Memory::Object_at(fp() + offset);
520 }
521 
522 
524  return reinterpret_cast<Code*>(code_slot());
525 }
526 
527 
528 void ExitFrame::ComputeCallerState(State* state) const {
529  // Set up the caller state.
530  state->sp = caller_sp();
532  state->pc_address = ResolveReturnAddressLocation(
533  reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
534  if (FLAG_enable_ool_constant_pool) {
535  state->constant_pool_address = reinterpret_cast<Address*>(
537  }
538 }
539 
540 
543 }
544 
545 
546 void ExitFrame::Iterate(ObjectVisitor* v) const {
547  // The arguments are traversed as part of the expression stack of
548  // the calling frame.
549  IteratePc(v, pc_address(), LookupCode());
550  v->VisitPointer(&code_slot());
551  if (FLAG_enable_ool_constant_pool) {
552  v->VisitPointer(&constant_pool_slot());
553  }
554 }
555 
556 
559 }
560 
561 
563  if (fp == 0) return NONE;
564  Address sp = ComputeStackPointer(fp);
565  FillState(fp, sp, state);
566  ASSERT(*state->pc_address != NULL);
567  return EXIT;
568 }
569 
570 
573 }
574 
575 
576 void ExitFrame::FillState(Address fp, Address sp, State* state) {
577  state->sp = sp;
578  state->fp = fp;
579  state->pc_address = ResolveReturnAddressLocation(
580  reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
581  state->constant_pool_address =
582  reinterpret_cast<Address*>(fp + ExitFrameConstants::kConstantPoolOffset);
583 }
584 
585 
588  return fp() + offset - n * kPointerSize;
589 }
590 
591 
593  return Memory::Object_at(GetExpressionAddress(fp, index));
594 }
595 
596 
599  return fp + offset - n * kPointerSize;
600 }
601 
602 
604  const int offset =
606  Address base = fp() + offset;
607  Address limit = sp();
608  ASSERT(base >= limit); // stack grows downwards
609  // Include register-allocated locals in number of expressions.
610  return static_cast<int>((base - limit) / kPointerSize);
611 }
612 
613 
614 void StandardFrame::ComputeCallerState(State* state) const {
615  state->sp = caller_sp();
616  state->fp = caller_fp();
617  state->pc_address = ResolveReturnAddressLocation(
618  reinterpret_cast<Address*>(ComputePCAddress(fp())));
619  state->constant_pool_address =
620  reinterpret_cast<Address*>(ComputeConstantPoolAddress(fp()));
621 }
622 
623 
626  caller_fp;
627 }
628 
629 
631  Address address = GetExpressionAddress(n);
632  for (StackHandlerIterator it(this, top_handler()); !it.done(); it.Advance()) {
633  if (it.handler()->includes(address)) return true;
634  }
635  return false;
636 }
637 
638 
639 void StandardFrame::IterateCompiledFrame(ObjectVisitor* v) const {
640  // Make sure that we're not doing "safe" stack frame iteration. We cannot
641  // possibly find pointers in optimized frames in that state.
642  ASSERT(can_access_heap_objects());
643 
644  // Compute the safepoint information.
645  unsigned stack_slots = 0;
646  SafepointEntry safepoint_entry;
647  Code* code = StackFrame::GetSafepointData(
648  isolate(), pc(), &safepoint_entry, &stack_slots);
649  unsigned slot_space = stack_slots * kPointerSize;
650 
651  // Visit the outgoing parameters.
652  Object** parameters_base = &Memory::Object_at(sp());
653  Object** parameters_limit = &Memory::Object_at(
655 
656  // Visit the parameters that may be on top of the saved registers.
657  if (safepoint_entry.argument_count() > 0) {
658  v->VisitPointers(parameters_base,
659  parameters_base + safepoint_entry.argument_count());
660  parameters_base += safepoint_entry.argument_count();
661  }
662 
663  // Skip saved double registers.
664  if (safepoint_entry.has_doubles()) {
665  // Number of doubles not known at snapshot time.
667  parameters_base += DoubleRegister::NumAllocatableRegisters() *
669  }
670 
671  // Visit the registers that contain pointers if any.
672  if (safepoint_entry.HasRegisters()) {
673  for (int i = kNumSafepointRegisters - 1; i >=0; i--) {
674  if (safepoint_entry.HasRegisterAt(i)) {
675  int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i);
676  v->VisitPointer(parameters_base + reg_stack_index);
677  }
678  }
679  // Skip the words containing the register values.
680  parameters_base += kNumSafepointRegisters;
681  }
682 
683  // We're done dealing with the register bits.
684  uint8_t* safepoint_bits = safepoint_entry.bits();
685  safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2;
686 
687  // Visit the rest of the parameters.
688  v->VisitPointers(parameters_base, parameters_limit);
689 
690  // Visit pointer spill slots and locals.
691  for (unsigned index = 0; index < stack_slots; index++) {
692  int byte_index = index >> kBitsPerByteLog2;
693  int bit_index = index & (kBitsPerByte - 1);
694  if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) {
695  v->VisitPointer(parameters_limit + index);
696  }
697  }
698 
699  // Visit the return address in the callee and incoming arguments.
700  IteratePc(v, pc_address(), code);
701 
702  // Visit the context in stub frame and JavaScript frame.
703  // Visit the function in JavaScript frame.
704  Object** fixed_base = &Memory::Object_at(
706  Object** fixed_limit = &Memory::Object_at(fp());
707  v->VisitPointers(fixed_base, fixed_limit);
708 }
709 
710 
711 void StubFrame::Iterate(ObjectVisitor* v) const {
713 }
714 
715 
717  return static_cast<Code*>(isolate()->FindCodeObject(pc()));
718 }
719 
720 
723 }
724 
725 
727  return 0;
728 }
729 
730 
731 void OptimizedFrame::Iterate(ObjectVisitor* v) const {
732 #ifdef DEBUG
733  // Make sure that optimized frames do not contain any stack handlers.
734  StackHandlerIterator it(this, top_handler());
735  ASSERT(it.done());
736 #endif
737 
739 }
740 
741 
742 void JavaScriptFrame::SetParameterValue(int index, Object* value) const {
743  Memory::Object_at(GetParameterSlot(index)) = value;
744 }
745 
746 
748  Address fp = caller_fp();
749  if (has_adapted_arguments()) {
750  // Skip the arguments adaptor frame and look at the real caller.
752  }
753  return IsConstructFrame(fp);
754 }
755 
756 
758  // If there is an arguments adaptor frame get the arguments length from it.
759  if (has_adapted_arguments()) {
760  return Smi::cast(GetExpression(caller_fp(), 0))->value();
761  } else {
763  }
764 }
765 
766 
768  return function()->code();
769 }
770 
771 
773  ASSERT(can_access_heap_objects() &&
774  isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
775 
776  return function()->shared()->formal_parameter_count();
777 }
778 
779 
782 }
783 
784 
786  ASSERT(functions->length() == 0);
787  functions->Add(function());
788 }
789 
790 
792  ASSERT(functions->length() == 0);
793  Code* code_pointer = LookupCode();
794  int offset = static_cast<int>(pc() - code_pointer->address());
795  FrameSummary summary(receiver(),
796  function(),
797  code_pointer,
798  offset,
799  IsConstructor());
800  functions->Add(summary);
801 }
802 
803 
805  FILE* file,
806  bool print_args,
807  bool print_line_number) {
808  // constructor calls
809  HandleScope scope(isolate);
810  DisallowHeapAllocation no_allocation;
811  JavaScriptFrameIterator it(isolate);
812  while (!it.done()) {
813  if (it.frame()->is_java_script()) {
814  JavaScriptFrame* frame = it.frame();
815  if (frame->IsConstructor()) PrintF(file, "new ");
816  // function name
817  JSFunction* fun = frame->function();
818  fun->PrintName();
819  Code* js_code = frame->unchecked_code();
820  Address pc = frame->pc();
821  int code_offset =
822  static_cast<int>(pc - js_code->instruction_start());
823  PrintF("+%d", code_offset);
824  SharedFunctionInfo* shared = fun->shared();
825  if (print_line_number) {
826  Code* code = Code::cast(isolate->FindCodeObject(pc));
827  int source_pos = code->SourcePosition(pc);
828  Object* maybe_script = shared->script();
829  if (maybe_script->IsScript()) {
830  Handle<Script> script(Script::cast(maybe_script));
831  int line = GetScriptLineNumberSafe(script, source_pos) + 1;
832  Object* script_name_raw = script->name();
833  if (script_name_raw->IsString()) {
834  String* script_name = String::cast(script->name());
835  SmartArrayPointer<char> c_script_name =
836  script_name->ToCString(DISALLOW_NULLS,
838  PrintF(file, " at %s:%d", c_script_name.get(), line);
839  } else {
840  PrintF(file, " at <unknown>:%d", line);
841  }
842  } else {
843  PrintF(file, " at <unknown>:<unknown>");
844  }
845  }
846 
847  if (print_args) {
848  // function arguments
849  // (we are intentionally only printing the actually
850  // supplied parameters, not all parameters required)
851  PrintF(file, "(this=");
852  frame->receiver()->ShortPrint(file);
853  const int length = frame->ComputeParametersCount();
854  for (int i = 0; i < length; i++) {
855  PrintF(file, ", ");
856  frame->GetParameter(i)->ShortPrint(file);
857  }
858  PrintF(file, ")");
859  }
860  break;
861  }
862  it.Advance();
863  }
864 }
865 
866 
868  int* stack_handler_index) const {
869  int operands_count = store->length();
870  ASSERT_LE(operands_count, ComputeOperandsCount());
871 
872  // Visit the stack in LIFO order, saving operands and stack handlers into the
873  // array. The saved stack handlers store a link to the next stack handler,
874  // which will allow RestoreOperandStack to rewind the handlers.
875  StackHandlerIterator it(this, top_handler());
876  int i = operands_count - 1;
877  *stack_handler_index = -1;
878  for (; !it.done(); it.Advance()) {
879  StackHandler* handler = it.handler();
880  // Save operands pushed after the handler was pushed.
881  for (; GetOperandSlot(i) < handler->address(); i--) {
882  store->set(i, GetOperand(i));
883  }
885  ASSERT_EQ(handler->address(), GetOperandSlot(i));
886  int next_stack_handler_index = i + 1 - StackHandlerConstants::kSlotCount;
887  handler->Unwind(isolate(), store, next_stack_handler_index,
888  *stack_handler_index);
889  *stack_handler_index = next_stack_handler_index;
891  }
892 
893  // Save any remaining operands.
894  for (; i >= 0; i--) {
895  store->set(i, GetOperand(i));
896  }
897 }
898 
899 
901  int stack_handler_index) {
902  int operands_count = store->length();
903  ASSERT_LE(operands_count, ComputeOperandsCount());
904  int i = 0;
905  while (i <= stack_handler_index) {
906  if (i < stack_handler_index) {
907  // An operand.
908  ASSERT_EQ(GetOperand(i), isolate()->heap()->the_hole_value());
909  Memory::Object_at(GetOperandSlot(i)) = store->get(i);
910  i++;
911  } else {
912  // A stack handler.
913  ASSERT_EQ(i, stack_handler_index);
914  // The FixedArray store grows up. The stack grows down. So the operand
915  // slot for i actually points to the bottom of the top word in the
916  // handler. The base of the StackHandler* is the address of the bottom
917  // word, which will be the last slot that is in the handler.
918  int handler_slot_index = i + StackHandlerConstants::kSlotCount - 1;
919  StackHandler *handler =
920  StackHandler::FromAddress(GetOperandSlot(handler_slot_index));
921  stack_handler_index = handler->Rewind(isolate(), store, i, fp());
923  }
924  }
925 
926  for (; i < operands_count; i++) {
927  ASSERT_EQ(GetOperand(i), isolate()->heap()->the_hole_value());
928  Memory::Object_at(GetOperandSlot(i)) = store->get(i);
929  }
930 }
931 
932 
933 void FrameSummary::Print() {
934  PrintF("receiver: ");
935  receiver_->ShortPrint();
936  PrintF("\nfunction: ");
937  function_->shared()->DebugName()->ShortPrint();
938  PrintF("\ncode: ");
939  code_->ShortPrint();
940  if (code_->kind() == Code::FUNCTION) PrintF(" NON-OPT");
941  if (code_->kind() == Code::OPTIMIZED_FUNCTION) PrintF(" OPT");
942  PrintF("\npc: %d\n", offset_);
943 }
944 
945 
946 JSFunction* OptimizedFrame::LiteralAt(FixedArray* literal_array,
947  int literal_id) {
948  if (literal_id == Translation::kSelfLiteralId) {
949  return function();
950  }
951 
952  return JSFunction::cast(literal_array->get(literal_id));
953 }
954 
955 
957  ASSERT(frames->length() == 0);
958  ASSERT(is_optimized());
959 
960  int deopt_index = Safepoint::kNoDeoptimizationIndex;
961  DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
962  FixedArray* literal_array = data->LiteralArray();
963 
964  // BUG(3243555): Since we don't have a lazy-deopt registered at
965  // throw-statements, we can't use the translation at the call-site of
966  // throw. An entry with no deoptimization index indicates a call-site
967  // without a lazy-deopt. As a consequence we are not allowed to inline
968  // functions containing throw.
969  if (deopt_index == Safepoint::kNoDeoptimizationIndex) {
971  return;
972  }
973 
974  TranslationIterator it(data->TranslationByteArray(),
975  data->TranslationIndex(deopt_index)->value());
976  Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
977  ASSERT(opcode == Translation::BEGIN);
978  it.Next(); // Drop frame count.
979  int jsframe_count = it.Next();
980 
981  // We create the summary in reverse order because the frames
982  // in the deoptimization translation are ordered bottom-to-top.
983  bool is_constructor = IsConstructor();
984  int i = jsframe_count;
985  while (i > 0) {
986  opcode = static_cast<Translation::Opcode>(it.Next());
987  if (opcode == Translation::JS_FRAME) {
988  i--;
989  BailoutId ast_id = BailoutId(it.Next());
990  JSFunction* function = LiteralAt(literal_array, it.Next());
991  it.Next(); // Skip height.
992 
993  // The translation commands are ordered and the receiver is always
994  // at the first position. Since we are always at a call when we need
995  // to construct a stack trace, the receiver is always in a stack slot.
996  opcode = static_cast<Translation::Opcode>(it.Next());
997  ASSERT(opcode == Translation::STACK_SLOT ||
998  opcode == Translation::LITERAL ||
999  opcode == Translation::CAPTURED_OBJECT ||
1000  opcode == Translation::DUPLICATED_OBJECT);
1001  int index = it.Next();
1002 
1003  // Get the correct receiver in the optimized frame.
1004  Object* receiver = NULL;
1005  if (opcode == Translation::LITERAL) {
1006  receiver = data->LiteralArray()->get(index);
1007  } else if (opcode == Translation::STACK_SLOT) {
1008  // Positive index means the value is spilled to the locals
1009  // area. Negative means it is stored in the incoming parameter
1010  // area.
1011  if (index >= 0) {
1012  receiver = GetExpression(index);
1013  } else {
1014  // Index -1 overlaps with last parameter, -n with the first parameter,
1015  // (-n - 1) with the receiver with n being the number of parameters
1016  // of the outermost, optimized frame.
1017  int parameter_count = ComputeParametersCount();
1018  int parameter_index = index + parameter_count;
1019  receiver = (parameter_index == -1)
1020  ? this->receiver()
1021  : this->GetParameter(parameter_index);
1022  }
1023  } else {
1024  // TODO(3029): Materializing a captured object (or duplicated
1025  // object) is hard, we return undefined for now. This breaks the
1026  // produced stack trace, as constructor frames aren't marked as
1027  // such anymore.
1028  receiver = isolate()->heap()->undefined_value();
1029  }
1030 
1031  Code* code = function->shared()->code();
1032  DeoptimizationOutputData* output_data =
1033  DeoptimizationOutputData::cast(code->deoptimization_data());
1034  unsigned entry = Deoptimizer::GetOutputInfo(output_data,
1035  ast_id,
1036  function->shared());
1037  unsigned pc_offset =
1039  ASSERT(pc_offset > 0);
1040 
1041  FrameSummary summary(receiver, function, code, pc_offset, is_constructor);
1042  frames->Add(summary);
1043  is_constructor = false;
1044  } else if (opcode == Translation::CONSTRUCT_STUB_FRAME) {
1045  // The next encountered JS_FRAME will be marked as a constructor call.
1046  it.Skip(Translation::NumberOfOperandsFor(opcode));
1047  ASSERT(!is_constructor);
1048  is_constructor = true;
1049  } else {
1050  // Skip over operands to advance to the next opcode.
1051  it.Skip(Translation::NumberOfOperandsFor(opcode));
1052  }
1053  }
1054  ASSERT(!is_constructor);
1055 }
1056 
1057 
1059  int* deopt_index) {
1060  ASSERT(is_optimized());
1061 
1062  JSFunction* opt_function = function();
1063  Code* code = opt_function->code();
1064 
1065  // The code object may have been replaced by lazy deoptimization. Fall
1066  // back to a slow search in this case to find the original optimized
1067  // code object.
1068  if (!code->contains(pc())) {
1069  code = isolate()->inner_pointer_to_code_cache()->
1070  GcSafeFindCodeForInnerPointer(pc());
1071  }
1072  ASSERT(code != NULL);
1073  ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
1074 
1075  SafepointEntry safepoint_entry = code->GetSafepointEntry(pc());
1076  *deopt_index = safepoint_entry.deoptimization_index();
1077  ASSERT(*deopt_index != Safepoint::kNoDeoptimizationIndex);
1078 
1079  return DeoptimizationInputData::cast(code->deoptimization_data());
1080 }
1081 
1082 
1084  ASSERT(is_optimized());
1085 
1086  int deopt_index = Safepoint::kNoDeoptimizationIndex;
1087  DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
1088 
1089  TranslationIterator it(data->TranslationByteArray(),
1090  data->TranslationIndex(deopt_index)->value());
1091  Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1092  ASSERT(opcode == Translation::BEGIN);
1093  USE(opcode);
1094  it.Next(); // Drop frame count.
1095  int jsframe_count = it.Next();
1096  return jsframe_count;
1097 }
1098 
1099 
1101  ASSERT(functions->length() == 0);
1102  ASSERT(is_optimized());
1103 
1104  int deopt_index = Safepoint::kNoDeoptimizationIndex;
1105  DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
1106  FixedArray* literal_array = data->LiteralArray();
1107 
1108  TranslationIterator it(data->TranslationByteArray(),
1109  data->TranslationIndex(deopt_index)->value());
1110  Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1111  ASSERT(opcode == Translation::BEGIN);
1112  it.Next(); // Drop frame count.
1113  int jsframe_count = it.Next();
1114 
1115  // We insert the frames in reverse order because the frames
1116  // in the deoptimization translation are ordered bottom-to-top.
1117  while (jsframe_count > 0) {
1118  opcode = static_cast<Translation::Opcode>(it.Next());
1119  if (opcode == Translation::JS_FRAME) {
1120  jsframe_count--;
1121  it.Next(); // Skip ast id.
1122  JSFunction* function = LiteralAt(literal_array, it.Next());
1123  it.Next(); // Skip height.
1124  functions->Add(function);
1125  } else {
1126  // Skip over operands to advance to the next opcode.
1127  it.Skip(Translation::NumberOfOperandsFor(opcode));
1128  }
1129  }
1130 }
1131 
1132 
1134  return Smi::cast(GetExpression(0))->value();
1135 }
1136 
1137 
1140 }
1141 
1142 
1144  // Internal frames have no arguments. The stack pointer of the
1145  // caller is at a fixed offset from the frame pointer.
1147 }
1148 
1149 
1151  return isolate()->builtins()->builtin(
1152  Builtins::kArgumentsAdaptorTrampoline);
1153 }
1154 
1155 
1157  const int offset = InternalFrameConstants::kCodeOffset;
1158  Object* code = Memory::Object_at(fp() + offset);
1159  ASSERT(code != NULL);
1160  return reinterpret_cast<Code*>(code);
1161 }
1162 
1163 
1164 void StackFrame::PrintIndex(StringStream* accumulator,
1165  PrintMode mode,
1166  int index) {
1167  accumulator->Add((mode == OVERVIEW) ? "%5d: " : "[%d]: ", index);
1168 }
1169 
1170 
1172  PrintMode mode,
1173  int index) const {
1174  HandleScope scope(isolate());
1175  Object* receiver = this->receiver();
1176  JSFunction* function = this->function();
1177 
1178  accumulator->PrintSecurityTokenIfChanged(function);
1179  PrintIndex(accumulator, mode, index);
1180  Code* code = NULL;
1181  if (IsConstructor()) accumulator->Add("new ");
1182  accumulator->PrintFunction(function, receiver, &code);
1183 
1184  // Get scope information for nicer output, if possible. If code is NULL, or
1185  // doesn't contain scope info, scope_info will return 0 for the number of
1186  // parameters, stack local variables, context local variables, stack slots,
1187  // or context slots.
1188  Handle<ScopeInfo> scope_info(ScopeInfo::Empty(isolate()));
1189 
1190  Handle<SharedFunctionInfo> shared(function->shared());
1191  scope_info = Handle<ScopeInfo>(shared->scope_info());
1192  Object* script_obj = shared->script();
1193  if (script_obj->IsScript()) {
1194  Handle<Script> script(Script::cast(script_obj));
1195  accumulator->Add(" [");
1196  accumulator->PrintName(script->name());
1197 
1198  Address pc = this->pc();
1199  if (code != NULL && code->kind() == Code::FUNCTION &&
1200  pc >= code->instruction_start() && pc < code->instruction_end()) {
1201  int source_pos = code->SourcePosition(pc);
1202  int line = GetScriptLineNumberSafe(script, source_pos) + 1;
1203  accumulator->Add(":%d", line);
1204  } else {
1205  int function_start_pos = shared->start_position();
1206  int line = GetScriptLineNumberSafe(script, function_start_pos) + 1;
1207  accumulator->Add(":~%d", line);
1208  }
1209 
1210  accumulator->Add("] ");
1211  }
1212 
1213  accumulator->Add("(this=%o", receiver);
1214 
1215  // Print the parameters.
1216  int parameters_count = ComputeParametersCount();
1217  for (int i = 0; i < parameters_count; i++) {
1218  accumulator->Add(",");
1219  // If we have a name for the parameter we print it. Nameless
1220  // parameters are either because we have more actual parameters
1221  // than formal parameters or because we have no scope information.
1222  if (i < scope_info->ParameterCount()) {
1223  accumulator->PrintName(scope_info->ParameterName(i));
1224  accumulator->Add("=");
1225  }
1226  accumulator->Add("%o", GetParameter(i));
1227  }
1228 
1229  accumulator->Add(")");
1230  if (mode == OVERVIEW) {
1231  accumulator->Add("\n");
1232  return;
1233  }
1234  if (is_optimized()) {
1235  accumulator->Add(" {\n// optimized frame\n}\n");
1236  return;
1237  }
1238  accumulator->Add(" {\n");
1239 
1240  // Compute the number of locals and expression stack elements.
1241  int stack_locals_count = scope_info->StackLocalCount();
1242  int heap_locals_count = scope_info->ContextLocalCount();
1243  int expressions_count = ComputeExpressionsCount();
1244 
1245  // Print stack-allocated local variables.
1246  if (stack_locals_count > 0) {
1247  accumulator->Add(" // stack-allocated locals\n");
1248  }
1249  for (int i = 0; i < stack_locals_count; i++) {
1250  accumulator->Add(" var ");
1251  accumulator->PrintName(scope_info->StackLocalName(i));
1252  accumulator->Add(" = ");
1253  if (i < expressions_count) {
1254  accumulator->Add("%o", GetExpression(i));
1255  } else {
1256  accumulator->Add("// no expression found - inconsistent frame?");
1257  }
1258  accumulator->Add("\n");
1259  }
1260 
1261  // Try to get hold of the context of this frame.
1262  Context* context = NULL;
1263  if (this->context() != NULL && this->context()->IsContext()) {
1264  context = Context::cast(this->context());
1265  }
1266 
1267  // Print heap-allocated local variables.
1268  if (heap_locals_count > 0) {
1269  accumulator->Add(" // heap-allocated locals\n");
1270  }
1271  for (int i = 0; i < heap_locals_count; i++) {
1272  accumulator->Add(" var ");
1273  accumulator->PrintName(scope_info->ContextLocalName(i));
1274  accumulator->Add(" = ");
1275  if (context != NULL) {
1276  if (i < context->length()) {
1277  accumulator->Add("%o", context->get(Context::MIN_CONTEXT_SLOTS + i));
1278  } else {
1279  accumulator->Add(
1280  "// warning: missing context slot - inconsistent frame?");
1281  }
1282  } else {
1283  accumulator->Add("// warning: no context found - inconsistent frame?");
1284  }
1285  accumulator->Add("\n");
1286  }
1287 
1288  // Print the expression stack.
1289  int expressions_start = stack_locals_count;
1290  if (expressions_start < expressions_count) {
1291  accumulator->Add(" // expression stack (top to bottom)\n");
1292  }
1293  for (int i = expressions_count - 1; i >= expressions_start; i--) {
1294  if (IsExpressionInsideHandler(i)) continue;
1295  accumulator->Add(" [%02d] : %o\n", i, GetExpression(i));
1296  }
1297 
1298  // Print details about the function.
1299  if (FLAG_max_stack_trace_source_length != 0 && code != NULL) {
1300  SharedFunctionInfo* shared = function->shared();
1301  accumulator->Add("--------- s o u r c e c o d e ---------\n");
1302  shared->SourceCodePrint(accumulator, FLAG_max_stack_trace_source_length);
1303  accumulator->Add("\n-----------------------------------------\n");
1304  }
1305 
1306  accumulator->Add("}\n\n");
1307 }
1308 
1309 
1311  PrintMode mode,
1312  int index) const {
1313  int actual = ComputeParametersCount();
1314  int expected = -1;
1315  JSFunction* function = this->function();
1316  expected = function->shared()->formal_parameter_count();
1317 
1318  PrintIndex(accumulator, mode, index);
1319  accumulator->Add("arguments adaptor frame: %d->%d", actual, expected);
1320  if (mode == OVERVIEW) {
1321  accumulator->Add("\n");
1322  return;
1323  }
1324  accumulator->Add(" {\n");
1325 
1326  // Print actual arguments.
1327  if (actual > 0) accumulator->Add(" // actual arguments\n");
1328  for (int i = 0; i < actual; i++) {
1329  accumulator->Add(" [%02d] : %o", i, GetParameter(i));
1330  if (expected != -1 && i >= expected) {
1331  accumulator->Add(" // not passed to callee");
1332  }
1333  accumulator->Add("\n");
1334  }
1335 
1336  accumulator->Add("}\n\n");
1337 }
1338 
1339 
1340 void EntryFrame::Iterate(ObjectVisitor* v) const {
1341  StackHandlerIterator it(this, top_handler());
1342  ASSERT(!it.done());
1343  StackHandler* handler = it.handler();
1344  ASSERT(handler->is_js_entry());
1345  handler->Iterate(v, LookupCode());
1346 #ifdef DEBUG
1347  // Make sure that the entry frame does not contain more than one
1348  // stack handler.
1349  it.Advance();
1350  ASSERT(it.done());
1351 #endif
1352  IteratePc(v, pc_address(), LookupCode());
1353 }
1354 
1355 
1356 void StandardFrame::IterateExpressions(ObjectVisitor* v) const {
1357  const int offset = StandardFrameConstants::kLastObjectOffset;
1358  Object** base = &Memory::Object_at(sp());
1359  Object** limit = &Memory::Object_at(fp() + offset) + 1;
1360  for (StackHandlerIterator it(this, top_handler()); !it.done(); it.Advance()) {
1361  StackHandler* handler = it.handler();
1362  // Traverse pointers down to - but not including - the next
1363  // handler in the handler chain. Update the base to skip the
1364  // handler and allow the handler to traverse its own pointers.
1365  const Address address = handler->address();
1366  v->VisitPointers(base, reinterpret_cast<Object**>(address));
1367  base = reinterpret_cast<Object**>(address + StackHandlerConstants::kSize);
1368  // Traverse the pointers in the handler itself.
1369  handler->Iterate(v, LookupCode());
1370  }
1371  v->VisitPointers(base, limit);
1372 }
1373 
1374 
1375 void JavaScriptFrame::Iterate(ObjectVisitor* v) const {
1376  IterateExpressions(v);
1377  IteratePc(v, pc_address(), LookupCode());
1378 }
1379 
1380 
1381 void InternalFrame::Iterate(ObjectVisitor* v) const {
1382  // Internal frames only have object pointers on the expression stack
1383  // as they never have any arguments.
1384  IterateExpressions(v);
1385  IteratePc(v, pc_address(), LookupCode());
1386 }
1387 
1388 
1389 void StubFailureTrampolineFrame::Iterate(ObjectVisitor* v) const {
1390  Object** base = &Memory::Object_at(sp());
1391  Object** limit = &Memory::Object_at(fp() +
1393  v->VisitPointers(base, limit);
1395  const int offset = StandardFrameConstants::kLastObjectOffset;
1396  limit = &Memory::Object_at(fp() + offset) + 1;
1397  v->VisitPointers(base, limit);
1398  IteratePc(v, pc_address(), LookupCode());
1399 }
1400 
1401 
1404 }
1405 
1406 
1408  Code* trampoline;
1410  FindCodeInCache(&trampoline, isolate());
1411  if (trampoline->contains(pc())) {
1412  return trampoline;
1413  }
1414 
1416  FindCodeInCache(&trampoline, isolate());
1417  if (trampoline->contains(pc())) {
1418  return trampoline;
1419  }
1420 
1421  UNREACHABLE();
1422  return NULL;
1423 }
1424 
1425 
1426 // -------------------------------------------------------------------------
1427 
1428 
1429 JavaScriptFrame* StackFrameLocator::FindJavaScriptFrame(int n) {
1430  ASSERT(n >= 0);
1431  for (int i = 0; i <= n; i++) {
1432  while (!iterator_.frame()->is_java_script()) iterator_.Advance();
1433  if (i == n) return JavaScriptFrame::cast(iterator_.frame());
1434  iterator_.Advance();
1435  }
1436  UNREACHABLE();
1437  return NULL;
1438 }
1439 
1440 
1441 // -------------------------------------------------------------------------
1442 
1443 
1444 static Map* GcSafeMapOfCodeSpaceObject(HeapObject* object) {
1445  MapWord map_word = object->map_word();
1446  return map_word.IsForwardingAddress() ?
1447  map_word.ToForwardingAddress()->map() : map_word.ToMap();
1448 }
1449 
1450 
1451 static int GcSafeSizeOfCodeSpaceObject(HeapObject* object) {
1452  return object->SizeFromMap(GcSafeMapOfCodeSpaceObject(object));
1453 }
1454 
1455 
1456 #ifdef DEBUG
1457 static bool GcSafeCodeContains(HeapObject* code, Address addr) {
1458  Map* map = GcSafeMapOfCodeSpaceObject(code);
1459  ASSERT(map == code->GetHeap()->code_map());
1460  Address start = code->address();
1461  Address end = code->address() + code->SizeFromMap(map);
1462  return start <= addr && addr < end;
1463 }
1464 #endif
1465 
1466 
1468  Address inner_pointer) {
1469  Code* code = reinterpret_cast<Code*>(object);
1470  ASSERT(code != NULL && GcSafeCodeContains(code, inner_pointer));
1471  return code;
1472 }
1473 
1474 
1476  Address inner_pointer) {
1477  Heap* heap = isolate_->heap();
1478  // Check if the inner pointer points into a large object chunk.
1479  LargePage* large_page = heap->lo_space()->FindPage(inner_pointer);
1480  if (large_page != NULL) {
1481  return GcSafeCastToCode(large_page->GetObject(), inner_pointer);
1482  }
1483 
1484  // Iterate through the page until we reach the end or find an object starting
1485  // after the inner pointer.
1486  Page* page = Page::FromAddress(inner_pointer);
1487 
1488  Address addr = page->skip_list()->StartFor(inner_pointer);
1489 
1490  Address top = heap->code_space()->top();
1491  Address limit = heap->code_space()->limit();
1492 
1493  while (true) {
1494  if (addr == top && addr != limit) {
1495  addr = limit;
1496  continue;
1497  }
1498 
1500  int obj_size = GcSafeSizeOfCodeSpaceObject(obj);
1501  Address next_addr = addr + obj_size;
1502  if (next_addr > inner_pointer) return GcSafeCastToCode(obj, inner_pointer);
1503  addr = next_addr;
1504  }
1505 }
1506 
1507 
1510  isolate_->counters()->pc_to_code()->Increment();
1511  ASSERT(IsPowerOf2(kInnerPointerToCodeCacheSize));
1512  uint32_t hash = ComputeIntegerHash(
1513  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(inner_pointer)),
1514  v8::internal::kZeroHashSeed);
1515  uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
1516  InnerPointerToCodeCacheEntry* entry = cache(index);
1517  if (entry->inner_pointer == inner_pointer) {
1518  isolate_->counters()->pc_to_code_cached()->Increment();
1519  ASSERT(entry->code == GcSafeFindCodeForInnerPointer(inner_pointer));
1520  } else {
1521  // Because this code may be interrupted by a profiling signal that
1522  // also queries the cache, we cannot update inner_pointer before the code
1523  // has been set. Otherwise, we risk trying to use a cache entry before
1524  // the code has been computed.
1525  entry->code = GcSafeFindCodeForInnerPointer(inner_pointer);
1526  entry->safepoint_entry.Reset();
1527  entry->inner_pointer = inner_pointer;
1528  }
1529  return entry;
1530 }
1531 
1532 
1533 // -------------------------------------------------------------------------
1534 
1535 
1536 void StackHandler::Unwind(Isolate* isolate,
1537  FixedArray* array,
1538  int offset,
1539  int previous_handler_offset) const {
1541  ASSERT_LE(0, offset);
1543  // Unwinding a stack handler into an array chains it in the opposite
1544  // direction, re-using the "next" slot as a "previous" link, so that stack
1545  // handlers can be later re-wound in the correct order. Decode the "state"
1546  // slot into "index" and "kind" and store them separately, using the fp slot.
1547  array->set(offset, Smi::FromInt(previous_handler_offset)); // next
1548  array->set(offset + 1, *code_address()); // code
1549  array->set(offset + 2, Smi::FromInt(static_cast<int>(index()))); // state
1550  array->set(offset + 3, *context_address()); // context
1551  array->set(offset + 4, Smi::FromInt(static_cast<int>(kind()))); // fp
1552 
1553  *isolate->handler_address() = next()->address();
1554 }
1555 
1556 
1557 int StackHandler::Rewind(Isolate* isolate,
1558  FixedArray* array,
1559  int offset,
1560  Address fp) {
1562  ASSERT_LE(0, offset);
1563  ASSERT_GE(array->length(), offset + StackHandlerConstants::kSlotCount);
1564  Smi* prev_handler_offset = Smi::cast(array->get(offset));
1565  Code* code = Code::cast(array->get(offset + 1));
1566  Smi* smi_index = Smi::cast(array->get(offset + 2));
1567  Object* context = array->get(offset + 3);
1568  Smi* smi_kind = Smi::cast(array->get(offset + 4));
1569 
1570  unsigned state = KindField::encode(static_cast<Kind>(smi_kind->value())) |
1571  IndexField::encode(static_cast<unsigned>(smi_index->value()));
1572 
1574  *isolate->handler_address();
1578  context;
1579  SetFp(address() + StackHandlerConstants::kFPOffset, fp);
1580 
1581  *isolate->handler_address() = address();
1582 
1583  return prev_handler_offset->value();
1584 }
1585 
1586 
1587 // -------------------------------------------------------------------------
1588 
1589 int NumRegs(RegList reglist) {
1590  return CompilerIntrinsics::CountSetBits(reglist);
1591 }
1592 
1593 
1596 };
1597 
1599 
1601  int i = 0;
1602  for (int r = 0; r < kNumRegs; r++)
1603  if ((kJSCallerSaved & (1 << r)) != 0)
1604  caller_saved_code_data.reg_code[i++] = r;
1605 
1606  ASSERT(i == kNumJSCallerSaved);
1607 }
1608 
1609 
1610 int JSCallerSavedCode(int n) {
1611  ASSERT(0 <= n && n < kNumJSCallerSaved);
1612  return caller_saved_code_data.reg_code[n];
1613 }
1614 
1615 
1616 #define DEFINE_WRAPPER(type, field) \
1617 class field##_Wrapper : public ZoneObject { \
1618  public: /* NOLINT */ \
1619  field##_Wrapper(const field& original) : frame_(original) { \
1620  } \
1621  field frame_; \
1622 };
1624 #undef DEFINE_WRAPPER
1625 
1626 static StackFrame* AllocateFrameCopy(StackFrame* frame, Zone* zone) {
1627 #define FRAME_TYPE_CASE(type, field) \
1628  case StackFrame::type: { \
1629  field##_Wrapper* wrapper = \
1630  new(zone) field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
1631  return &wrapper->frame_; \
1632  }
1633 
1634  switch (frame->type()) {
1636  default: UNREACHABLE();
1637  }
1638 #undef FRAME_TYPE_CASE
1639  return NULL;
1640 }
1641 
1642 
1644  ZoneList<StackFrame*> list(10, zone);
1645  for (StackFrameIterator it(isolate); !it.done(); it.Advance()) {
1646  StackFrame* frame = AllocateFrameCopy(it.frame(), zone);
1647  list.Add(frame, zone);
1648  }
1649  return list.ToVector();
1650 }
1651 
1652 
1653 } } // namespace v8::internal
byte * Address
Definition: globals.h:186
virtual int GetNumberOfIncomingArguments() const
Definition: frames.cc:1133
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
Object * context() const
Definition: frames-inl.h:183
virtual Address GetCallerStackPointer() const
Definition: frames.cc:721
static const int kFPOffset
Definition: frames.h:98
Code * builtin(Name name)
Definition: builtins.h:322
Definition: frames.h:61
virtual Code * unchecked_code() const
Definition: frames.cc:716
static DeoptimizationOutputData * cast(Object *obj)
static Object *& Object_at(Address addr)
Definition: v8memory.h:83
static void FillState(Address fp, Address sp, State *state)
Definition: frames.cc:576
virtual Address GetCallerStackPointer() const
Definition: frames.cc:1143
static const int kStateOffset
Definition: frames.h:96
void set(int index, Object *value)
Definition: objects-inl.h:2147
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths true
Definition: flags.cc:208
int GetArgumentsLength() const
Definition: frames.cc:757
void PrintF(const char *format,...)
Definition: v8utils.cc:40
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
Code * GcSafeCastToCode(HeapObject *object, Address inner_pointer)
Definition: frames.cc:1467
static String * cast(Object *obj)
static const int kCodeOffset
Definition: frames.h:95
#define FRAME_TYPE_CASE(type, field)
void SourceCodePrint(StringStream *accumulator, int max_length)
Definition: objects.cc:9957
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
static Type GetStateForFramePointer(Address fp, State *state)
Definition: frames.cc:562
virtual int GetInlineCount()
Definition: frames.cc:1083
virtual Code * unchecked_code() const
Definition: frames.cc:1156
int NumRegs(RegList reglist)
Definition: frames.cc:1589
static MemoryChunk * FromAddress(Address a)
Definition: spaces.h:305
static Address ComputeConstantPoolAddress(Address fp)
Definition: frames-inl.h:204
Address GetParameterSlot(int index) const
Definition: frames-inl.h:228
static bool IsArgumentsAdaptorFrame(Address fp)
Definition: frames-inl.h:209
Address GetExpressionAddress(int n) const
Definition: frames.cc:586
void PrintSecurityTokenIfChanged(Object *function)
void RestoreOperandStack(FixedArray *store, int stack_handler_index)
Definition: frames.cc:900
void SaveOperandStack(FixedArray *store, int *stack_handler_index) const
Definition: frames.cc:867
kSerializedDataOffset Object
Definition: objects-inl.h:5016
Address caller_fp() const
Definition: frames-inl.h:189
TypeImpl< ZoneTypeConfig > Type
Vector< StackFrame * > CreateStackMap(Isolate *isolate, Zone *zone)
Definition: frames.cc:1643
Builtins * builtins()
Definition: isolate.h:948
int int32_t
Definition: unicode.cc:47
uint32_t RegList
Definition: frames.h:41
static bool enabled()
Definition: serialize.h:485
static ScopeInfo * Empty(Isolate *isolate)
Definition: scopeinfo.cc:151
virtual void Summarize(List< FrameSummary > *frames)
Definition: frames.cc:791
Object * GetExpression(int index) const
Definition: frames-inl.h:173
virtual void SetCallerFp(Address caller_fp)
Definition: frames.cc:499
const int kBitsPerByteLog2
Definition: globals.h:288
byte * instruction_end()
Definition: objects-inl.h:5862
static Address handler(ThreadLocalTop *thread)
Definition: isolate.h:651
Object *& code_slot() const
Definition: frames.cc:517
int reg_code[kNumJSCallerSaved]
Definition: frames.cc:1595
StackFrameIterator(Isolate *isolate)
Definition: frames.cc:89
#define ASSERT(condition)
Definition: checks.h:329
const RegList kJSCallerSaved
Definition: frames-arm.h:47
#define ASSERT_GE(v1, v2)
Definition: checks.h:332
static const int kContextOffset
Definition: frames.h:185
int ComputeExpressionsCount() const
Definition: frames.cc:603
static Script * cast(Object *obj)
int SourcePosition(Address pc)
Definition: objects.cc:10383
virtual Code * unchecked_code() const
Definition: frames.cc:489
void IterateExpressions(ObjectVisitor *v) const
Definition: frames.cc:1356
static Context * cast(Object *context)
Definition: contexts.h:244
static const int kNextOffset
Definition: frames.h:94
static const int kCallerFPOffset
Definition: frames.h:188
virtual void ComputeCallerState(State *state) const
Definition: frames.cc:614
int ComputeParametersCount() const
Definition: frames.h:585
static Code * cast(Object *obj)
const int kAlignmentPaddingPushed
Definition: frames-ia32.h:57
Object * FindCodeObject(Address a)
Definition: isolate.cc:2286
virtual void GetFunctions(List< JSFunction * > *functions)
Definition: frames.cc:1100
static Smi * cast(Object *object)
#define STACK_FRAME_TYPE_LIST(V)
Definition: frames.h:159
bool contains(byte *pc)
Definition: objects-inl.h:5892
void Add(Vector< const char > format, Vector< FmtElm > elms)
int CountSetBits(uint64_t value, int width)
int JSCallerSavedCode(int n)
Definition: frames.cc:1610
SmartArrayPointer< char > ToCString(AllowNullsFlag allow_nulls, RobustnessFlag robustness_flag, int offset, int length, int *length_output=0)
Definition: objects.cc:8272
virtual void Iterate(ObjectVisitor *v) const
Definition: frames.cc:731
Object * receiver() const
Definition: frames-inl.h:269
virtual Code * unchecked_code() const
Definition: frames.cc:523
const Register sp
HANDLE HANDLE LPSTACKFRAME64 StackFrame
static const int kCallerPCOffset
Definition: frames-arm.h:123
#define UNREACHABLE()
Definition: checks.h:52
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths true
virtual Address GetCallerStackPointer() const
Definition: frames.cc:1138
DeoptimizationInputData * GetDeoptimizationData(int *deopt_index)
Definition: frames.cc:1058
bool IsConstructor() const
Definition: frames.cc:747
LargePage * FindPage(Address a)
Definition: spaces.cc:3012
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static Address c_entry_fp(ThreadLocalTop *thread)
Definition: isolate.h:648
const int kDoubleSize
Definition: globals.h:266
StackFrame * frame() const
Definition: frames.h:863
virtual void Iterate(ObjectVisitor *v) const
Definition: frames.cc:1381
StackHandler * handler() const
Definition: frames.cc:59
static Address ComputePCAddress(Address fp)
Definition: frames-inl.h:199
static const int kCallerFPOffset
Definition: frames-arm.h:121
byte * instruction_start()
Definition: objects-inl.h:5857
static void PrintTop(Isolate *isolate, FILE *file, bool print_args, bool print_line_number)
Definition: frames.cc:804
const int kPointerSize
Definition: globals.h:268
static Address & Address_at(Address addr)
Definition: v8memory.h:79
int ComputeOperandsCount() const
Definition: frames-inl.h:257
uintptr_t(* ReturnAddressLocationResolver)(uintptr_t return_addr_location)
Definition: v8.h:4494
void SetParameterValue(int index, Object *value) const
Definition: frames.cc:742
static int32_t & int32_at(Address addr)
Definition: v8memory.h:51
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kCallerSPOffset
Definition: frames.h:190
static const int kDynamicAlignmentStateOffset
Definition: frames-ia32.h:103
virtual void Print(StringStream *accumulator, PrintMode mode, int index) const
Definition: frames.cc:1171
const Register pc
const int kNumJSCallerSaved
Definition: frames-arm.h:53
SafepointEntry GetSafepointEntry(Address pc)
Definition: objects.cc:10435
int GetScriptLineNumberSafe(Handle< Script > script, int code_pos)
Definition: handles.cc:403
#define ASSERT_LE(v1, v2)
Definition: checks.h:334
virtual void Summarize(List< FrameSummary > *frames)
Definition: frames.cc:956
Code * code
Definition: frames.h:63
static const int kMarkerOffset
Definition: frames.h:184
virtual void Iterate(ObjectVisitor *v) const
Definition: frames.cc:1389
static Address ComputeStackPointer(Address fp)
Definition: frames.cc:571
virtual Code * unchecked_code() const
Definition: frames.cc:767
OldSpace * code_space()
Definition: heap.h:640
virtual Address GetCallerStackPointer() const
Definition: frames.cc:1402
static const int kLastObjectOffset
Definition: frames.h:192
static const int kExpressionsOffset
Definition: frames.h:183
static int GetOutputInfo(DeoptimizationOutputData *data, BailoutId node_id, SharedFunctionInfo *shared)
Definition: deoptimizer.cc:718
const int kBitsPerByte
Definition: globals.h:287
bool IsPowerOf2(T x)
Definition: utils.h:51
static JavaScriptFrame * cast(StackFrame *frame)
Definition: frames.h:635
LargeObjectSpace * lo_space()
Definition: heap.h:646
#define BASE_EMBEDDED
Definition: allocation.h:68
StackTraceFrameIterator(Isolate *isolate)
Definition: frames.cc:190
virtual void Iterate(ObjectVisitor *v) const
Definition: frames.cc:1340
HeapObject * GetObject()
Definition: spaces.h:872
virtual void Iterate(ObjectVisitor *v) const
Definition: frames.cc:1375
virtual Code * unchecked_code() const
Definition: frames.cc:1407
virtual void SetCallerFp(Address caller_fp)
Definition: frames.cc:624
static const int kCallerSPDisplacement
Definition: frames-arm.h:127
virtual Code * unchecked_code() const
Definition: frames.cc:1150
void PrintName(FILE *out=stdout)
Definition: objects.cc:9839
virtual int GetNumberOfIncomingArguments() const
Definition: frames.cc:772
InnerPointerToCodeCache * inner_pointer_to_code_cache()
Definition: isolate.h:912
void IterateCompiledFrame(ObjectVisitor *v) const
Definition: frames.cc:639
void SetUpJSCallerSavedCodeData()
Definition: frames.cc:1600
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
Definition: utils.h:322
static const int kCallerFPOffset
Definition: frames-arm.h:105
InnerPointerToCodeCacheEntry * GetCacheEntry(Address inner_pointer)
Definition: frames.cc:1509
static const int kContextOffset
Definition: frames.h:97
virtual void GetFunctions(List< JSFunction * > *functions)
Definition: frames.cc:785
const int kNumSafepointRegisters
Definition: frames-arm.h:92
Object *& constant_pool_slot() const
Object * GetOperand(int index) const
Definition: frames-inl.h:252
virtual int GetNumberOfIncomingArguments() const
Definition: frames.cc:726
virtual void Iterate(ObjectVisitor *v) const
Definition: frames.cc:546
static const int kHeaderSize
Definition: objects.h:5604
void ShortPrint(FILE *out=stdout)
Definition: objects.cc:1123
virtual Address GetCallerStackPointer() const
Definition: frames.cc:557
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
void PrintFunction(Object *function, Object *receiver, Code **code)
bool IsExpressionInsideHandler(int n) const
Definition: frames.cc:630
static HeapObject * FromAddress(Address address)
Definition: objects-inl.h:1369
void USE(T)
Definition: globals.h:341
Counters * counters()
Definition: isolate.h:859
virtual void Print(StringStream *accumulator, PrintMode mode, int index) const
Definition: frames.cc:1310
static bool IsConstructFrame(Address fp)
Definition: frames-inl.h:216
void Print(const v8::FunctionCallbackInfo< v8::Value > &args)
JSCallerSavedCodeData caller_saved_code_data
Definition: frames.cc:1598
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:39
Object * get(int index)
Definition: objects-inl.h:2127
static const int kFirstRegisterParameterFrameOffset
Definition: frames.h:773
SafepointEntry safepoint_entry
Definition: frames.h:64
Address GetOperandSlot(int index) const
Definition: frames-inl.h:241
HeapObject * obj
SkipList * skip_list()
Definition: spaces.h:663
StackHandlerIterator(const StackFrame *frame, StackHandler *handler)
Definition: frames.cc:53
static uintptr_t & uintptr_at(Address addr)
Definition: v8memory.h:71
const Register fp
#define DEFINE_WRAPPER(type, field)
Definition: frames.cc:1616
const int kPCOnStackSize
Definition: globals.h:270
#define INITIALIZE_SINGLETON(type, field)
Definition: frames.cc:78
Code * GcSafeFindCodeForInnerPointer(Address inner_pointer)
Definition: frames.cc:1475
virtual void SetCallerFp(Address caller_fp)
Definition: frames.cc:541
SafeStackFrameIterator(Isolate *isolate, Address fp, Address sp, Address js_entry_sp)
Definition: frames.cc:217
ThreadLocalTop * thread_local_top()
Definition: isolate.h:879
virtual void Iterate(ObjectVisitor *v) const
Definition: frames.cc:711
const int kNumRegs
Definition: frames-arm.h:43
Object * GetParameter(int index) const
Definition: frames-inl.h:236
static const int kConstantPoolOffset
Definition: frames-arm.h:115
bool has_adapted_arguments() const
Definition: frames-inl.h:279
Address * handler_address()
Definition: isolate.h:656
Address StartFor(Address addr)
Definition: spaces.h:1033
JSFunction * function() const
Definition: frames-inl.h:284
virtual Address GetCallerStackPointer() const
Definition: frames.cc:780
virtual Code * unchecked_code() const
Definition: frames.cc:512
static DeoptimizationInputData * cast(Object *obj)
static JSFunction * cast(Object *obj)