v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
full-codegen-arm64.cc
Go to the documentation of this file.
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_ARM64
31 
32 #include "code-stubs.h"
33 #include "codegen.h"
34 #include "compiler.h"
35 #include "debug.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
38 #include "parser.h"
39 #include "scopes.h"
40 #include "stub-cache.h"
41 
42 #include "arm64/code-stubs-arm64.h"
44 
45 namespace v8 {
46 namespace internal {
47 
48 #define __ ACCESS_MASM(masm_)
49 
50 class JumpPatchSite BASE_EMBEDDED {
51  public:
52  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
53 #ifdef DEBUG
54  info_emitted_ = false;
55 #endif
56  }
57 
58  ~JumpPatchSite() {
59  if (patch_site_.is_bound()) {
60  ASSERT(info_emitted_);
61  } else {
62  ASSERT(reg_.IsNone());
63  }
64  }
65 
66  void EmitJumpIfNotSmi(Register reg, Label* target) {
67  // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
68  InstructionAccurateScope scope(masm_, 1);
69  ASSERT(!info_emitted_);
70  ASSERT(reg.Is64Bits());
71  ASSERT(!reg.Is(csp));
72  reg_ = reg;
73  __ bind(&patch_site_);
74  __ tbz(xzr, 0, target); // Always taken before patched.
75  }
76 
77  void EmitJumpIfSmi(Register reg, Label* target) {
78  // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
79  InstructionAccurateScope scope(masm_, 1);
80  ASSERT(!info_emitted_);
81  ASSERT(reg.Is64Bits());
82  ASSERT(!reg.Is(csp));
83  reg_ = reg;
84  __ bind(&patch_site_);
85  __ tbnz(xzr, 0, target); // Never taken before patched.
86  }
87 
88  void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
89  UseScratchRegisterScope temps(masm_);
90  Register temp = temps.AcquireX();
91  __ Orr(temp, reg1, reg2);
92  EmitJumpIfNotSmi(temp, target);
93  }
94 
95  void EmitPatchInfo() {
96  Assembler::BlockPoolsScope scope(masm_);
97  InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
98 #ifdef DEBUG
99  info_emitted_ = true;
100 #endif
101  }
102 
103  private:
104  MacroAssembler* masm_;
105  Label patch_site_;
106  Register reg_;
107 #ifdef DEBUG
108  bool info_emitted_;
109 #endif
110 };
111 
112 
113 static void EmitStackCheck(MacroAssembler* masm_,
114  int pointers = 0,
115  Register scratch = jssp) {
116  Isolate* isolate = masm_->isolate();
117  Label ok;
118  ASSERT(jssp.Is(__ StackPointer()));
119  ASSERT(scratch.Is(jssp) == (pointers == 0));
120  if (pointers != 0) {
121  __ Sub(scratch, jssp, pointers * kPointerSize);
122  }
123  __ CompareRoot(scratch, Heap::kStackLimitRootIndex);
124  __ B(hs, &ok);
125  PredictableCodeSizeScope predictable(masm_,
127  __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
128  __ Bind(&ok);
129 }
130 
131 
132 // Generate code for a JS function. On entry to the function the receiver
133 // and arguments have been pushed on the stack left to right. The actual
134 // argument count matches the formal parameter count expected by the
135 // function.
136 //
137 // The live registers are:
138 // - x1: the JS function object being called (i.e. ourselves).
139 // - cp: our context.
140 // - fp: our caller's frame pointer.
141 // - jssp: stack pointer.
142 // - lr: return address.
143 //
144 // The function builds a JS frame. See JavaScriptFrameConstants in
145 // frames-arm.h for its layout.
146 void FullCodeGenerator::Generate() {
147  CompilationInfo* info = info_;
148  handler_table_ =
149  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
150 
151  InitializeFeedbackVector();
152 
153  profiling_counter_ = isolate()->factory()->NewCell(
154  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
155  SetFunctionPosition(function());
156  Comment cmnt(masm_, "[ Function compiled by full code generator");
157 
159 
160 #ifdef DEBUG
161  if (strlen(FLAG_stop_at) > 0 &&
162  info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
163  __ Debug("stop-at", __LINE__, BREAK);
164  }
165 #endif
166 
167  // Sloppy mode functions and builtins need to replace the receiver with the
168  // global proxy when called as functions (without an explicit receiver
169  // object).
170  if (info->strict_mode() == SLOPPY && !info->is_native()) {
171  Label ok;
172  int receiver_offset = info->scope()->num_parameters() * kXRegSize;
173  __ Peek(x10, receiver_offset);
174  __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
175 
176  __ Ldr(x10, GlobalObjectMemOperand());
178  __ Poke(x10, receiver_offset);
179 
180  __ Bind(&ok);
181  }
182 
183 
184  // Open a frame scope to indicate that there is a frame on the stack.
185  // The MANUAL indicates that the scope shouldn't actually generate code
186  // to set up the frame because we do it manually below.
187  FrameScope frame_scope(masm_, StackFrame::MANUAL);
188 
189  // This call emits the following sequence in a way that can be patched for
190  // code ageing support:
191  // Push(lr, fp, cp, x1);
192  // Add(fp, jssp, 2 * kPointerSize);
193  info->set_prologue_offset(masm_->pc_offset());
194  __ Prologue(BUILD_FUNCTION_FRAME);
195  info->AddNoFrameRange(0, masm_->pc_offset());
196 
197  // Reserve space on the stack for locals.
198  { Comment cmnt(masm_, "[ Allocate locals");
199  int locals_count = info->scope()->num_stack_slots();
200  // Generators allocate locals, if any, in context slots.
201  ASSERT(!info->function()->is_generator() || locals_count == 0);
202 
203  if (locals_count > 0) {
204  if (locals_count >= 128) {
205  EmitStackCheck(masm_, locals_count, x10);
206  }
207  __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
208  if (FLAG_optimize_for_size) {
209  __ PushMultipleTimes(x10 , locals_count);
210  } else {
211  const int kMaxPushes = 32;
212  if (locals_count >= kMaxPushes) {
213  int loop_iterations = locals_count / kMaxPushes;
214  __ Mov(x3, loop_iterations);
215  Label loop_header;
216  __ Bind(&loop_header);
217  // Do pushes.
218  __ PushMultipleTimes(x10 , kMaxPushes);
219  __ Subs(x3, x3, 1);
220  __ B(ne, &loop_header);
221  }
222  int remaining = locals_count % kMaxPushes;
223  // Emit the remaining pushes.
224  __ PushMultipleTimes(x10 , remaining);
225  }
226  }
227  }
228 
229  bool function_in_register_x1 = true;
230 
231  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
232  if (heap_slots > 0) {
233  // Argument to NewContext is the function, which is still in x1.
234  Comment cmnt(masm_, "[ Allocate context");
235  if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
236  __ Mov(x10, Operand(info->scope()->GetScopeInfo()));
237  __ Push(x1, x10);
238  __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
239  } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
240  FastNewContextStub stub(heap_slots);
241  __ CallStub(&stub);
242  } else {
243  __ Push(x1);
244  __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
245  }
246  function_in_register_x1 = false;
247  // Context is returned in x0. It replaces the context passed to us.
248  // It's saved in the stack and kept live in cp.
249  __ Mov(cp, x0);
251  // Copy any necessary parameters into the context.
252  int num_parameters = info->scope()->num_parameters();
253  for (int i = 0; i < num_parameters; i++) {
254  Variable* var = scope()->parameter(i);
255  if (var->IsContextSlot()) {
256  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
257  (num_parameters - 1 - i) * kPointerSize;
258  // Load parameter from stack.
259  __ Ldr(x10, MemOperand(fp, parameter_offset));
260  // Store it in the context.
261  MemOperand target = ContextMemOperand(cp, var->index());
262  __ Str(x10, target);
263 
264  // Update the write barrier.
265  __ RecordWriteContextSlot(
266  cp, target.offset(), x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
267  }
268  }
269  }
270 
271  Variable* arguments = scope()->arguments();
272  if (arguments != NULL) {
273  // Function uses arguments object.
274  Comment cmnt(masm_, "[ Allocate arguments object");
275  if (!function_in_register_x1) {
276  // Load this again, if it's used by the local context below.
278  } else {
279  __ Mov(x3, x1);
280  }
281  // Receiver is just before the parameters on the caller's stack.
282  int num_parameters = info->scope()->num_parameters();
283  int offset = num_parameters * kPointerSize;
284  __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
285  __ Mov(x1, Smi::FromInt(num_parameters));
286  __ Push(x3, x2, x1);
287 
288  // Arguments to ArgumentsAccessStub:
289  // function, receiver address, parameter count.
290  // The stub will rewrite receiver and parameter count if the previous
291  // stack frame was an arguments adapter frame.
293  if (strict_mode() == STRICT) {
295  } else if (function()->has_duplicate_parameters()) {
297  } else {
299  }
300  ArgumentsAccessStub stub(type);
301  __ CallStub(&stub);
302 
303  SetVar(arguments, x0, x1, x2);
304  }
305 
306  if (FLAG_trace) {
307  __ CallRuntime(Runtime::kTraceEnter, 0);
308  }
309 
310 
311  // Visit the declarations and body unless there is an illegal
312  // redeclaration.
313  if (scope()->HasIllegalRedeclaration()) {
314  Comment cmnt(masm_, "[ Declarations");
315  scope()->VisitIllegalRedeclaration(this);
316 
317  } else {
318  PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
319  { Comment cmnt(masm_, "[ Declarations");
320  if (scope()->is_function_scope() && scope()->function() != NULL) {
321  VariableDeclaration* function = scope()->function();
322  ASSERT(function->proxy()->var()->mode() == CONST ||
323  function->proxy()->var()->mode() == CONST_LEGACY);
324  ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
325  VisitVariableDeclaration(function);
326  }
327  VisitDeclarations(scope()->declarations());
328  }
329  }
330 
331  { Comment cmnt(masm_, "[ Stack check");
332  PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
333  EmitStackCheck(masm_);
334  }
335 
336  { Comment cmnt(masm_, "[ Body");
337  ASSERT(loop_depth() == 0);
338  VisitStatements(function()->body());
339  ASSERT(loop_depth() == 0);
340  }
341 
342  // Always emit a 'return undefined' in case control fell off the end of
343  // the body.
344  { Comment cmnt(masm_, "[ return <undefined>;");
345  __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
346  }
347  EmitReturnSequence();
348 
349  // Force emission of the pools, so they don't get emitted in the middle
350  // of the back edge table.
351  masm()->CheckVeneerPool(true, false);
352  masm()->CheckConstPool(true, false);
353 }
354 
355 
356 void FullCodeGenerator::ClearAccumulator() {
357  __ Mov(x0, Smi::FromInt(0));
358 }
359 
360 
361 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
362  __ Mov(x2, Operand(profiling_counter_));
363  __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
364  __ Subs(x3, x3, Smi::FromInt(delta));
365  __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
366 }
367 
368 
369 void FullCodeGenerator::EmitProfilingCounterReset() {
370  int reset_value = FLAG_interrupt_budget;
371  if (isolate()->IsDebuggerActive()) {
372  // Detect debug break requests as soon as possible.
373  reset_value = FLAG_interrupt_budget >> 4;
374  }
375  __ Mov(x2, Operand(profiling_counter_));
376  __ Mov(x3, Smi::FromInt(reset_value));
377  __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
378 }
379 
380 
381 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
382  Label* back_edge_target) {
383  ASSERT(jssp.Is(__ StackPointer()));
384  Comment cmnt(masm_, "[ Back edge bookkeeping");
385  // Block literal pools whilst emitting back edge code.
386  Assembler::BlockPoolsScope block_const_pool(masm_);
387  Label ok;
388 
389  ASSERT(back_edge_target->is_bound());
390  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
391  int weight = Min(kMaxBackEdgeWeight,
392  Max(1, distance / kCodeSizeMultiplier));
393  EmitProfilingCounterDecrement(weight);
394  __ B(pl, &ok);
395  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
396 
397  // Record a mapping of this PC offset to the OSR id. This is used to find
398  // the AST id from the unoptimized code in order to use it as a key into
399  // the deoptimization input data found in the optimized code.
400  RecordBackEdge(stmt->OsrEntryId());
401 
402  EmitProfilingCounterReset();
403 
404  __ Bind(&ok);
405  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
406  // Record a mapping of the OSR id to this PC. This is used if the OSR
407  // entry becomes the target of a bailout. We don't expect it to be, but
408  // we want it to work if it is.
409  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
410 }
411 
412 
413 void FullCodeGenerator::EmitReturnSequence() {
414  Comment cmnt(masm_, "[ Return sequence");
415 
416  if (return_label_.is_bound()) {
417  __ B(&return_label_);
418 
419  } else {
420  __ Bind(&return_label_);
421  if (FLAG_trace) {
422  // Push the return value on the stack as the parameter.
423  // Runtime::TraceExit returns its parameter in x0.
424  __ Push(result_register());
425  __ CallRuntime(Runtime::kTraceExit, 1);
426  ASSERT(x0.Is(result_register()));
427  }
428  // Pretend that the exit is a backwards jump to the entry.
429  int weight = 1;
430  if (info_->ShouldSelfOptimize()) {
431  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
432  } else {
433  int distance = masm_->pc_offset();
434  weight = Min(kMaxBackEdgeWeight,
435  Max(1, distance / kCodeSizeMultiplier));
436  }
437  EmitProfilingCounterDecrement(weight);
438  Label ok;
439  __ B(pl, &ok);
440  __ Push(x0);
441  __ Call(isolate()->builtins()->InterruptCheck(),
442  RelocInfo::CODE_TARGET);
443  __ Pop(x0);
444  EmitProfilingCounterReset();
445  __ Bind(&ok);
446 
447  // Make sure that the constant pool is not emitted inside of the return
448  // sequence. This sequence can get patched when the debugger is used. See
449  // debug-arm64.cc:BreakLocationIterator::SetDebugBreakAtReturn().
450  {
451  InstructionAccurateScope scope(masm_,
453  CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
454  __ RecordJSReturn();
455  // This code is generated using Assembler methods rather than Macro
456  // Assembler methods because it will be patched later on, and so the size
457  // of the generated code must be consistent.
458  const Register& current_sp = __ StackPointer();
459  // Nothing ensures 16 bytes alignment here.
460  ASSERT(!current_sp.Is(csp));
461  __ mov(current_sp, fp);
462  int no_frame_start = masm_->pc_offset();
463  __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
464  // Drop the arguments and receiver and return.
465  // TODO(all): This implementation is overkill as it supports 2**31+1
466  // arguments, consider how to improve it without creating a security
467  // hole.
468  __ LoadLiteral(ip0, 3 * kInstructionSize);
469  __ add(current_sp, current_sp, ip0);
470  __ ret();
471  __ dc64(kXRegSize * (info_->scope()->num_parameters() + 1));
472  info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
473  }
474  }
475 }
476 
477 
478 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
479  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
480 }
481 
482 
483 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
484  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
485  codegen()->GetVar(result_register(), var);
486 }
487 
488 
489 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
490  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
491  codegen()->GetVar(result_register(), var);
492  __ Push(result_register());
493 }
494 
495 
496 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
497  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
498  // For simplicity we always test the accumulator register.
499  codegen()->GetVar(result_register(), var);
500  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
501  codegen()->DoTest(this);
502 }
503 
504 
505 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
506  // Root values have no side effects.
507 }
508 
509 
510 void FullCodeGenerator::AccumulatorValueContext::Plug(
511  Heap::RootListIndex index) const {
512  __ LoadRoot(result_register(), index);
513 }
514 
515 
516 void FullCodeGenerator::StackValueContext::Plug(
517  Heap::RootListIndex index) const {
518  __ LoadRoot(result_register(), index);
519  __ Push(result_register());
520 }
521 
522 
523 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
524  codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
525  false_label_);
526  if (index == Heap::kUndefinedValueRootIndex ||
527  index == Heap::kNullValueRootIndex ||
528  index == Heap::kFalseValueRootIndex) {
529  if (false_label_ != fall_through_) __ B(false_label_);
530  } else if (index == Heap::kTrueValueRootIndex) {
531  if (true_label_ != fall_through_) __ B(true_label_);
532  } else {
533  __ LoadRoot(result_register(), index);
534  codegen()->DoTest(this);
535  }
536 }
537 
538 
539 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
540 }
541 
542 
543 void FullCodeGenerator::AccumulatorValueContext::Plug(
544  Handle<Object> lit) const {
545  __ Mov(result_register(), Operand(lit));
546 }
547 
548 
549 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
550  // Immediates cannot be pushed directly.
551  __ Mov(result_register(), Operand(lit));
552  __ Push(result_register());
553 }
554 
555 
556 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
557  codegen()->PrepareForBailoutBeforeSplit(condition(),
558  true,
559  true_label_,
560  false_label_);
561  ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
562  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
563  if (false_label_ != fall_through_) __ B(false_label_);
564  } else if (lit->IsTrue() || lit->IsJSObject()) {
565  if (true_label_ != fall_through_) __ B(true_label_);
566  } else if (lit->IsString()) {
567  if (String::cast(*lit)->length() == 0) {
568  if (false_label_ != fall_through_) __ B(false_label_);
569  } else {
570  if (true_label_ != fall_through_) __ B(true_label_);
571  }
572  } else if (lit->IsSmi()) {
573  if (Smi::cast(*lit)->value() == 0) {
574  if (false_label_ != fall_through_) __ B(false_label_);
575  } else {
576  if (true_label_ != fall_through_) __ B(true_label_);
577  }
578  } else {
579  // For simplicity we always test the accumulator register.
580  __ Mov(result_register(), Operand(lit));
581  codegen()->DoTest(this);
582  }
583 }
584 
585 
586 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
587  Register reg) const {
588  ASSERT(count > 0);
589  __ Drop(count);
590 }
591 
592 
593 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
594  int count,
595  Register reg) const {
596  ASSERT(count > 0);
597  __ Drop(count);
598  __ Move(result_register(), reg);
599 }
600 
601 
602 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
603  Register reg) const {
604  ASSERT(count > 0);
605  if (count > 1) __ Drop(count - 1);
606  __ Poke(reg, 0);
607 }
608 
609 
610 void FullCodeGenerator::TestContext::DropAndPlug(int count,
611  Register reg) const {
612  ASSERT(count > 0);
613  // For simplicity we always test the accumulator register.
614  __ Drop(count);
615  __ Mov(result_register(), reg);
616  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
617  codegen()->DoTest(this);
618 }
619 
620 
621 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
622  Label* materialize_false) const {
623  ASSERT(materialize_true == materialize_false);
624  __ Bind(materialize_true);
625 }
626 
627 
628 void FullCodeGenerator::AccumulatorValueContext::Plug(
629  Label* materialize_true,
630  Label* materialize_false) const {
631  Label done;
632  __ Bind(materialize_true);
633  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
634  __ B(&done);
635  __ Bind(materialize_false);
636  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
637  __ Bind(&done);
638 }
639 
640 
641 void FullCodeGenerator::StackValueContext::Plug(
642  Label* materialize_true,
643  Label* materialize_false) const {
644  Label done;
645  __ Bind(materialize_true);
646  __ LoadRoot(x10, Heap::kTrueValueRootIndex);
647  __ B(&done);
648  __ Bind(materialize_false);
649  __ LoadRoot(x10, Heap::kFalseValueRootIndex);
650  __ Bind(&done);
651  __ Push(x10);
652 }
653 
654 
655 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
656  Label* materialize_false) const {
657  ASSERT(materialize_true == true_label_);
658  ASSERT(materialize_false == false_label_);
659 }
660 
661 
662 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
663 }
664 
665 
666 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
667  Heap::RootListIndex value_root_index =
668  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
669  __ LoadRoot(result_register(), value_root_index);
670 }
671 
672 
673 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
674  Heap::RootListIndex value_root_index =
675  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
676  __ LoadRoot(x10, value_root_index);
677  __ Push(x10);
678 }
679 
680 
681 void FullCodeGenerator::TestContext::Plug(bool flag) const {
682  codegen()->PrepareForBailoutBeforeSplit(condition(),
683  true,
684  true_label_,
685  false_label_);
686  if (flag) {
687  if (true_label_ != fall_through_) {
688  __ B(true_label_);
689  }
690  } else {
691  if (false_label_ != fall_through_) {
692  __ B(false_label_);
693  }
694  }
695 }
696 
697 
698 void FullCodeGenerator::DoTest(Expression* condition,
699  Label* if_true,
700  Label* if_false,
701  Label* fall_through) {
702  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
703  CallIC(ic, condition->test_id());
704  __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
705 }
706 
707 
708 // If (cond), branch to if_true.
709 // If (!cond), branch to if_false.
710 // fall_through is used as an optimization in cases where only one branch
711 // instruction is necessary.
712 void FullCodeGenerator::Split(Condition cond,
713  Label* if_true,
714  Label* if_false,
715  Label* fall_through) {
716  if (if_false == fall_through) {
717  __ B(cond, if_true);
718  } else if (if_true == fall_through) {
719  ASSERT(if_false != fall_through);
720  __ B(InvertCondition(cond), if_false);
721  } else {
722  __ B(cond, if_true);
723  __ B(if_false);
724  }
725 }
726 
727 
728 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
729  // Offset is negative because higher indexes are at lower addresses.
730  int offset = -var->index() * kXRegSize;
731  // Adjust by a (parameter or local) base offset.
732  if (var->IsParameter()) {
733  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
734  } else {
736  }
737  return MemOperand(fp, offset);
738 }
739 
740 
741 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
742  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
743  if (var->IsContextSlot()) {
744  int context_chain_length = scope()->ContextChainLength(var->scope());
745  __ LoadContext(scratch, context_chain_length);
746  return ContextMemOperand(scratch, var->index());
747  } else {
748  return StackOperand(var);
749  }
750 }
751 
752 
753 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
754  // Use destination as scratch.
755  MemOperand location = VarOperand(var, dest);
756  __ Ldr(dest, location);
757 }
758 
759 
760 void FullCodeGenerator::SetVar(Variable* var,
761  Register src,
762  Register scratch0,
763  Register scratch1) {
764  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
765  ASSERT(!AreAliased(src, scratch0, scratch1));
766  MemOperand location = VarOperand(var, scratch0);
767  __ Str(src, location);
768 
769  // Emit the write barrier code if the location is in the heap.
770  if (var->IsContextSlot()) {
771  // scratch0 contains the correct context.
772  __ RecordWriteContextSlot(scratch0,
773  location.offset(),
774  src,
775  scratch1,
778  }
779 }
780 
781 
782 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
783  bool should_normalize,
784  Label* if_true,
785  Label* if_false) {
786  // Only prepare for bailouts before splits if we're in a test
787  // context. Otherwise, we let the Visit function deal with the
788  // preparation to avoid preparing with the same AST id twice.
789  if (!context()->IsTest() || !info_->IsOptimizable()) return;
790 
791  // TODO(all): Investigate to see if there is something to work on here.
792  Label skip;
793  if (should_normalize) {
794  __ B(&skip);
795  }
796  PrepareForBailout(expr, TOS_REG);
797  if (should_normalize) {
798  __ CompareRoot(x0, Heap::kTrueValueRootIndex);
799  Split(eq, if_true, if_false, NULL);
800  __ Bind(&skip);
801  }
802 }
803 
804 
805 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
806  // The variable in the declaration always resides in the current function
807  // context.
808  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
809  if (generate_debug_code_) {
810  // Check that we're not inside a with or catch context.
812  __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
813  __ Check(ne, kDeclarationInWithContext);
814  __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
815  __ Check(ne, kDeclarationInCatchContext);
816  }
817 }
818 
819 
820 void FullCodeGenerator::VisitVariableDeclaration(
821  VariableDeclaration* declaration) {
822  // If it was not possible to allocate the variable at compile time, we
823  // need to "declare" it at runtime to make sure it actually exists in the
824  // local context.
825  VariableProxy* proxy = declaration->proxy();
826  VariableMode mode = declaration->mode();
827  Variable* variable = proxy->var();
828  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
829 
830  switch (variable->location()) {
832  globals_->Add(variable->name(), zone());
833  globals_->Add(variable->binding_needs_init()
834  ? isolate()->factory()->the_hole_value()
835  : isolate()->factory()->undefined_value(),
836  zone());
837  break;
838 
839  case Variable::PARAMETER:
840  case Variable::LOCAL:
841  if (hole_init) {
842  Comment cmnt(masm_, "[ VariableDeclaration");
843  __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
844  __ Str(x10, StackOperand(variable));
845  }
846  break;
847 
848  case Variable::CONTEXT:
849  if (hole_init) {
850  Comment cmnt(masm_, "[ VariableDeclaration");
851  EmitDebugCheckDeclarationContext(variable);
852  __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
853  __ Str(x10, ContextMemOperand(cp, variable->index()));
854  // No write barrier since the_hole_value is in old space.
855  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
856  }
857  break;
858 
859  case Variable::LOOKUP: {
860  Comment cmnt(masm_, "[ VariableDeclaration");
861  __ Mov(x2, Operand(variable->name()));
862  // Declaration nodes are always introduced in one of four modes.
865  : NONE;
866  __ Mov(x1, Smi::FromInt(attr));
867  // Push initial value, if any.
868  // Note: For variables we must not push an initial value (such as
869  // 'undefined') because we may have a (legal) redeclaration and we
870  // must not destroy the current value.
871  if (hole_init) {
872  __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
873  __ Push(cp, x2, x1, x0);
874  } else {
875  // Pushing 0 (xzr) indicates no initial value.
876  __ Push(cp, x2, x1, xzr);
877  }
878  __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
879  break;
880  }
881  }
882 }
883 
884 
885 void FullCodeGenerator::VisitFunctionDeclaration(
886  FunctionDeclaration* declaration) {
887  VariableProxy* proxy = declaration->proxy();
888  Variable* variable = proxy->var();
889  switch (variable->location()) {
890  case Variable::UNALLOCATED: {
891  globals_->Add(variable->name(), zone());
892  Handle<SharedFunctionInfo> function =
893  Compiler::BuildFunctionInfo(declaration->fun(), script());
894  // Check for stack overflow exception.
895  if (function.is_null()) return SetStackOverflow();
896  globals_->Add(function, zone());
897  break;
898  }
899 
900  case Variable::PARAMETER:
901  case Variable::LOCAL: {
902  Comment cmnt(masm_, "[ Function Declaration");
903  VisitForAccumulatorValue(declaration->fun());
904  __ Str(result_register(), StackOperand(variable));
905  break;
906  }
907 
908  case Variable::CONTEXT: {
909  Comment cmnt(masm_, "[ Function Declaration");
910  EmitDebugCheckDeclarationContext(variable);
911  VisitForAccumulatorValue(declaration->fun());
912  __ Str(result_register(), ContextMemOperand(cp, variable->index()));
913  int offset = Context::SlotOffset(variable->index());
914  // We know that we have written a function, which is not a smi.
915  __ RecordWriteContextSlot(cp,
916  offset,
917  result_register(),
918  x2,
923  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
924  break;
925  }
926 
927  case Variable::LOOKUP: {
928  Comment cmnt(masm_, "[ Function Declaration");
929  __ Mov(x2, Operand(variable->name()));
930  __ Mov(x1, Smi::FromInt(NONE));
931  __ Push(cp, x2, x1);
932  // Push initial value for function declaration.
933  VisitForStackValue(declaration->fun());
934  __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
935  break;
936  }
937  }
938 }
939 
940 
941 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
942  Variable* variable = declaration->proxy()->var();
943  ASSERT(variable->location() == Variable::CONTEXT);
944  ASSERT(variable->interface()->IsFrozen());
945 
946  Comment cmnt(masm_, "[ ModuleDeclaration");
947  EmitDebugCheckDeclarationContext(variable);
948 
949  // Load instance object.
950  __ LoadContext(x1, scope_->ContextChainLength(scope_->GlobalScope()));
951  __ Ldr(x1, ContextMemOperand(x1, variable->interface()->Index()));
953 
954  // Assign it.
955  __ Str(x1, ContextMemOperand(cp, variable->index()));
956  // We know that we have written a module, which is not a smi.
957  __ RecordWriteContextSlot(cp,
958  Context::SlotOffset(variable->index()),
959  x1,
960  x3,
965  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
966 
967  // Traverse info body.
968  Visit(declaration->module());
969 }
970 
971 
972 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
973  VariableProxy* proxy = declaration->proxy();
974  Variable* variable = proxy->var();
975  switch (variable->location()) {
977  // TODO(rossberg)
978  break;
979 
980  case Variable::CONTEXT: {
981  Comment cmnt(masm_, "[ ImportDeclaration");
982  EmitDebugCheckDeclarationContext(variable);
983  // TODO(rossberg)
984  break;
985  }
986 
987  case Variable::PARAMETER:
988  case Variable::LOCAL:
989  case Variable::LOOKUP:
990  UNREACHABLE();
991  }
992 }
993 
994 
995 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
996  // TODO(rossberg)
997 }
998 
999 
1000 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
1001  // Call the runtime to declare the globals.
1002  __ Mov(x11, Operand(pairs));
1003  Register flags = xzr;
1004  if (Smi::FromInt(DeclareGlobalsFlags())) {
1005  flags = x10;
1006  __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
1007  }
1008  __ Push(cp, x11, flags);
1009  __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
1010  // Return value is ignored.
1011 }
1012 
1013 
1014 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1015  // Call the runtime to declare the modules.
1016  __ Push(descriptions);
1017  __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
1018  // Return value is ignored.
1019 }
1020 
1021 
1022 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1023  ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
1024  Comment cmnt(masm_, "[ SwitchStatement");
1025  Breakable nested_statement(this, stmt);
1026  SetStatementPosition(stmt);
1027 
1028  // Keep the switch value on the stack until a case matches.
1029  VisitForStackValue(stmt->tag());
1030  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1031 
1032  ZoneList<CaseClause*>* clauses = stmt->cases();
1033  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1034 
1035  Label next_test; // Recycled for each test.
1036  // Compile all the tests with branches to their bodies.
1037  for (int i = 0; i < clauses->length(); i++) {
1038  CaseClause* clause = clauses->at(i);
1039  clause->body_target()->Unuse();
1040 
1041  // The default is not a test, but remember it as final fall through.
1042  if (clause->is_default()) {
1043  default_clause = clause;
1044  continue;
1045  }
1046 
1047  Comment cmnt(masm_, "[ Case comparison");
1048  __ Bind(&next_test);
1049  next_test.Unuse();
1050 
1051  // Compile the label expression.
1052  VisitForAccumulatorValue(clause->label());
1053 
1054  // Perform the comparison as if via '==='.
1055  __ Peek(x1, 0); // Switch value.
1056 
1057  JumpPatchSite patch_site(masm_);
1058  if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
1059  Label slow_case;
1060  patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1061  __ Cmp(x1, x0);
1062  __ B(ne, &next_test);
1063  __ Drop(1); // Switch value is no longer needed.
1064  __ B(clause->body_target());
1065  __ Bind(&slow_case);
1066  }
1067 
1068  // Record position before stub call for type feedback.
1069  SetSourcePosition(clause->position());
1070  Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1071  CallIC(ic, clause->CompareId());
1072  patch_site.EmitPatchInfo();
1073 
1074  Label skip;
1075  __ B(&skip);
1076  PrepareForBailout(clause, TOS_REG);
1077  __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1078  __ Drop(1);
1079  __ B(clause->body_target());
1080  __ Bind(&skip);
1081 
1082  __ Cbnz(x0, &next_test);
1083  __ Drop(1); // Switch value is no longer needed.
1084  __ B(clause->body_target());
1085  }
1086 
1087  // Discard the test value and jump to the default if present, otherwise to
1088  // the end of the statement.
1089  __ Bind(&next_test);
1090  __ Drop(1); // Switch value is no longer needed.
1091  if (default_clause == NULL) {
1092  __ B(nested_statement.break_label());
1093  } else {
1094  __ B(default_clause->body_target());
1095  }
1096 
1097  // Compile all the case bodies.
1098  for (int i = 0; i < clauses->length(); i++) {
1099  Comment cmnt(masm_, "[ Case body");
1100  CaseClause* clause = clauses->at(i);
1101  __ Bind(clause->body_target());
1102  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1103  VisitStatements(clause->statements());
1104  }
1105 
1106  __ Bind(nested_statement.break_label());
1107  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1108 }
1109 
1110 
1111 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1112  ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1113  Comment cmnt(masm_, "[ ForInStatement");
1114  int slot = stmt->ForInFeedbackSlot();
1115  // TODO(all): This visitor probably needs better comments and a revisit.
1116  SetStatementPosition(stmt);
1117 
1118  Label loop, exit;
1119  ForIn loop_statement(this, stmt);
1120  increment_loop_depth();
1121 
1122  // Get the object to enumerate over. If the object is null or undefined, skip
1123  // over the loop. See ECMA-262 version 5, section 12.6.4.
1124  VisitForAccumulatorValue(stmt->enumerable());
1125  __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1126  Register null_value = x15;
1127  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1128  __ Cmp(x0, null_value);
1129  __ B(eq, &exit);
1130 
1131  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1132 
1133  // Convert the object to a JS object.
1134  Label convert, done_convert;
1135  __ JumpIfSmi(x0, &convert);
1136  __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
1137  __ Bind(&convert);
1138  __ Push(x0);
1139  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1140  __ Bind(&done_convert);
1141  __ Push(x0);
1142 
1143  // Check for proxies.
1144  Label call_runtime;
1146  __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
1147 
1148  // Check cache validity in generated code. This is a fast case for
1149  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1150  // guarantee cache validity, call the runtime system to check cache
1151  // validity or get the property names in a fixed array.
1152  __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1153 
1154  // The enum cache is valid. Load the map of the object being
1155  // iterated over and use the cache for the iteration.
1156  Label use_cache;
1158  __ B(&use_cache);
1159 
1160  // Get the set of properties to enumerate.
1161  __ Bind(&call_runtime);
1162  __ Push(x0); // Duplicate the enumerable object on the stack.
1163  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1164 
1165  // If we got a map from the runtime call, we can do a fast
1166  // modification check. Otherwise, we got a fixed array, and we have
1167  // to do a slow check.
1168  Label fixed_array, no_descriptors;
1170  __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1171 
1172  // We got a map in register x0. Get the enumeration cache from it.
1173  __ Bind(&use_cache);
1174 
1175  __ EnumLengthUntagged(x1, x0);
1176  __ Cbz(x1, &no_descriptors);
1177 
1178  __ LoadInstanceDescriptors(x0, x2);
1180  __ Ldr(x2,
1182 
1183  // Set up the four remaining stack slots.
1184  __ Push(x0); // Map.
1185  __ Mov(x0, Smi::FromInt(0));
1186  // Push enumeration cache, enumeration cache length (as smi) and zero.
1187  __ SmiTag(x1);
1188  __ Push(x2, x1, x0);
1189  __ B(&loop);
1190 
1191  __ Bind(&no_descriptors);
1192  __ Drop(1);
1193  __ B(&exit);
1194 
1195  // We got a fixed array in register x0. Iterate through that.
1196  __ Bind(&fixed_array);
1197 
1198  Handle<Object> feedback = Handle<Object>(
1200  isolate());
1201  StoreFeedbackVectorSlot(slot, feedback);
1202  __ LoadObject(x1, FeedbackVector());
1204  __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot)));
1205 
1206  __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
1207  __ Peek(x10, 0); // Get enumerated object.
1209  // TODO(all): similar check was done already. Can we avoid it here?
1210  __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1211  ASSERT(Smi::FromInt(0) == 0);
1212  __ CzeroX(x1, le); // Zero indicates proxy.
1213  __ Push(x1, x0); // Smi and array
1215  __ Push(x1, xzr); // Fixed array length (as smi) and initial index.
1216 
1217  // Generate code for doing the condition check.
1218  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1219  __ Bind(&loop);
1220  // Load the current count to x0, load the length to x1.
1221  __ PeekPair(x0, x1, 0);
1222  __ Cmp(x0, x1); // Compare to the array length.
1223  __ B(hs, loop_statement.break_label());
1224 
1225  // Get the current entry of the array into register r3.
1226  __ Peek(x10, 2 * kXRegSize);
1227  __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1229 
1230  // Get the expected map from the stack or a smi in the
1231  // permanent slow case into register x10.
1232  __ Peek(x2, 3 * kXRegSize);
1233 
1234  // Check if the expected map still matches that of the enumerable.
1235  // If not, we may have to filter the key.
1236  Label update_each;
1237  __ Peek(x1, 4 * kXRegSize);
1238  __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1239  __ Cmp(x11, x2);
1240  __ B(eq, &update_each);
1241 
1242  // For proxies, no filtering is done.
1243  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1244  STATIC_ASSERT(kSmiTag == 0);
1245  __ Cbz(x2, &update_each);
1246 
1247  // Convert the entry to a string or (smi) 0 if it isn't a property
1248  // any more. If the property has been removed while iterating, we
1249  // just skip it.
1250  __ Push(x1, x3);
1251  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1252  __ Mov(x3, x0);
1253  __ Cbz(x0, loop_statement.continue_label());
1254 
1255  // Update the 'each' property or variable from the possibly filtered
1256  // entry in register x3.
1257  __ Bind(&update_each);
1258  __ Mov(result_register(), x3);
1259  // Perform the assignment as if via '='.
1260  { EffectContext context(this);
1261  EmitAssignment(stmt->each());
1262  }
1263 
1264  // Generate code for the body of the loop.
1265  Visit(stmt->body());
1266 
1267  // Generate code for going to the next element by incrementing
1268  // the index (smi) stored on top of the stack.
1269  __ Bind(loop_statement.continue_label());
1270  // TODO(all): We could use a callee saved register to avoid popping.
1271  __ Pop(x0);
1272  __ Add(x0, x0, Smi::FromInt(1));
1273  __ Push(x0);
1274 
1275  EmitBackEdgeBookkeeping(stmt, &loop);
1276  __ B(&loop);
1277 
1278  // Remove the pointers stored on the stack.
1279  __ Bind(loop_statement.break_label());
1280  __ Drop(5);
1281 
1282  // Exit and decrement the loop depth.
1283  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1284  __ Bind(&exit);
1285  decrement_loop_depth();
1286 }
1287 
1288 
1289 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1290  Comment cmnt(masm_, "[ ForOfStatement");
1291  SetStatementPosition(stmt);
1292 
1293  Iteration loop_statement(this, stmt);
1294  increment_loop_depth();
1295 
1296  // var iterator = iterable[@@iterator]()
1297  VisitForAccumulatorValue(stmt->assign_iterator());
1298 
1299  // As with for-in, skip the loop if the iterator is null or undefined.
1300  Register iterator = x0;
1301  __ JumpIfRoot(iterator, Heap::kUndefinedValueRootIndex,
1302  loop_statement.break_label());
1303  __ JumpIfRoot(iterator, Heap::kNullValueRootIndex,
1304  loop_statement.break_label());
1305 
1306  // Convert the iterator to a JS object.
1307  Label convert, done_convert;
1308  __ JumpIfSmi(iterator, &convert);
1309  __ CompareObjectType(iterator, x1, x1, FIRST_SPEC_OBJECT_TYPE);
1310  __ B(ge, &done_convert);
1311  __ Bind(&convert);
1312  __ Push(iterator);
1313  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1314  __ Bind(&done_convert);
1315  __ Push(iterator);
1316 
1317  // Loop entry.
1318  __ Bind(loop_statement.continue_label());
1319 
1320  // result = iterator.next()
1321  VisitForEffect(stmt->next_result());
1322 
1323  // if (result.done) break;
1324  Label result_not_done;
1325  VisitForControl(stmt->result_done(),
1326  loop_statement.break_label(),
1327  &result_not_done,
1328  &result_not_done);
1329  __ Bind(&result_not_done);
1330 
1331  // each = result.value
1332  VisitForEffect(stmt->assign_each());
1333 
1334  // Generate code for the body of the loop.
1335  Visit(stmt->body());
1336 
1337  // Check stack before looping.
1338  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1339  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1340  __ B(loop_statement.continue_label());
1341 
1342  // Exit and decrement the loop depth.
1343  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1344  __ Bind(loop_statement.break_label());
1345  decrement_loop_depth();
1346 }
1347 
1348 
1349 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1350  bool pretenure) {
1351  // Use the fast case closure allocation code that allocates in new space for
1352  // nested functions that don't need literals cloning. If we're running with
1353  // the --always-opt or the --prepare-always-opt flag, we need to use the
1354  // runtime function so that the new function we are creating here gets a
1355  // chance to have its code optimized and doesn't just get a copy of the
1356  // existing unoptimized code.
1357  if (!FLAG_always_opt &&
1358  !FLAG_prepare_always_opt &&
1359  !pretenure &&
1360  scope()->is_function_scope() &&
1361  info->num_literals() == 0) {
1362  FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1363  __ Mov(x2, Operand(info));
1364  __ CallStub(&stub);
1365  } else {
1366  __ Mov(x11, Operand(info));
1367  __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1368  : Heap::kFalseValueRootIndex);
1369  __ Push(cp, x11, x10);
1370  __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1371  }
1372  context()->Plug(x0);
1373 }
1374 
1375 
1376 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1377  Comment cmnt(masm_, "[ VariableProxy");
1378  EmitVariableLoad(expr);
1379 }
1380 
1381 
1382 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1383  TypeofState typeof_state,
1384  Label* slow) {
1385  Register current = cp;
1386  Register next = x10;
1387  Register temp = x11;
1388 
1389  Scope* s = scope();
1390  while (s != NULL) {
1391  if (s->num_heap_slots() > 0) {
1392  if (s->calls_sloppy_eval()) {
1393  // Check that extension is NULL.
1394  __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1395  __ Cbnz(temp, slow);
1396  }
1397  // Load next context in chain.
1398  __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1399  // Walk the rest of the chain without clobbering cp.
1400  current = next;
1401  }
1402  // If no outer scope calls eval, we do not need to check more
1403  // context extensions.
1404  if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1405  s = s->outer_scope();
1406  }
1407 
1408  if (s->is_eval_scope()) {
1409  Label loop, fast;
1410  __ Mov(next, current);
1411 
1412  __ Bind(&loop);
1413  // Terminate at native context.
1414  __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1415  __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1416  // Check that extension is NULL.
1417  __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1418  __ Cbnz(temp, slow);
1419  // Load next context in chain.
1420  __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1421  __ B(&loop);
1422  __ Bind(&fast);
1423  }
1424 
1425  __ Ldr(x0, GlobalObjectMemOperand());
1426  __ Mov(x2, Operand(var->name()));
1427  ContextualMode mode = (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL
1428  : CONTEXTUAL;
1429  CallLoadIC(mode);
1430 }
1431 
1432 
1433 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1434  Label* slow) {
1435  ASSERT(var->IsContextSlot());
1436  Register context = cp;
1437  Register next = x10;
1438  Register temp = x11;
1439 
1440  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1441  if (s->num_heap_slots() > 0) {
1442  if (s->calls_sloppy_eval()) {
1443  // Check that extension is NULL.
1444  __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1445  __ Cbnz(temp, slow);
1446  }
1447  __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1448  // Walk the rest of the chain without clobbering cp.
1449  context = next;
1450  }
1451  }
1452  // Check that last extension is NULL.
1453  __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1454  __ Cbnz(temp, slow);
1455 
1456  // This function is used only for loads, not stores, so it's safe to
1457  // return an cp-based operand (the write barrier cannot be allowed to
1458  // destroy the cp register).
1459  return ContextMemOperand(context, var->index());
1460 }
1461 
1462 
1463 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1464  TypeofState typeof_state,
1465  Label* slow,
1466  Label* done) {
1467  // Generate fast-case code for variables that might be shadowed by
1468  // eval-introduced variables. Eval is used a lot without
1469  // introducing variables. In those cases, we do not want to
1470  // perform a runtime call for all variables in the scope
1471  // containing the eval.
1472  if (var->mode() == DYNAMIC_GLOBAL) {
1473  EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1474  __ B(done);
1475  } else if (var->mode() == DYNAMIC_LOCAL) {
1476  Variable* local = var->local_if_not_shadowed();
1477  __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1478  if (local->mode() == LET || local->mode() == CONST ||
1479  local->mode() == CONST_LEGACY) {
1480  __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1481  if (local->mode() == CONST_LEGACY) {
1482  __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1483  } else { // LET || CONST
1484  __ Mov(x0, Operand(var->name()));
1485  __ Push(x0);
1486  __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1487  }
1488  }
1489  __ B(done);
1490  }
1491 }
1492 
1493 
1494 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1495  // Record position before possible IC call.
1496  SetSourcePosition(proxy->position());
1497  Variable* var = proxy->var();
1498 
1499  // Three cases: global variables, lookup variables, and all other types of
1500  // variables.
1501  switch (var->location()) {
1502  case Variable::UNALLOCATED: {
1503  Comment cmnt(masm_, "Global variable");
1504  // Use inline caching. Variable name is passed in x2 and the global
1505  // object (receiver) in x0.
1506  __ Ldr(x0, GlobalObjectMemOperand());
1507  __ Mov(x2, Operand(var->name()));
1508  CallLoadIC(CONTEXTUAL);
1509  context()->Plug(x0);
1510  break;
1511  }
1512 
1513  case Variable::PARAMETER:
1514  case Variable::LOCAL:
1515  case Variable::CONTEXT: {
1516  Comment cmnt(masm_, var->IsContextSlot()
1517  ? "Context variable"
1518  : "Stack variable");
1519  if (var->binding_needs_init()) {
1520  // var->scope() may be NULL when the proxy is located in eval code and
1521  // refers to a potential outside binding. Currently those bindings are
1522  // always looked up dynamically, i.e. in that case
1523  // var->location() == LOOKUP.
1524  // always holds.
1525  ASSERT(var->scope() != NULL);
1526 
1527  // Check if the binding really needs an initialization check. The check
1528  // can be skipped in the following situation: we have a LET or CONST
1529  // binding in harmony mode, both the Variable and the VariableProxy have
1530  // the same declaration scope (i.e. they are both in global code, in the
1531  // same function or in the same eval code) and the VariableProxy is in
1532  // the source physically located after the initializer of the variable.
1533  //
1534  // We cannot skip any initialization checks for CONST in non-harmony
1535  // mode because const variables may be declared but never initialized:
1536  // if (false) { const x; }; var y = x;
1537  //
1538  // The condition on the declaration scopes is a conservative check for
1539  // nested functions that access a binding and are called before the
1540  // binding is initialized:
1541  // function() { f(); let x = 1; function f() { x = 2; } }
1542  //
1543  bool skip_init_check;
1544  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1545  skip_init_check = false;
1546  } else {
1547  // Check that we always have valid source position.
1548  ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1549  ASSERT(proxy->position() != RelocInfo::kNoPosition);
1550  skip_init_check = var->mode() != CONST_LEGACY &&
1551  var->initializer_position() < proxy->position();
1552  }
1553 
1554  if (!skip_init_check) {
1555  // Let and const need a read barrier.
1556  GetVar(x0, var);
1557  Label done;
1558  __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1559  if (var->mode() == LET || var->mode() == CONST) {
1560  // Throw a reference error when using an uninitialized let/const
1561  // binding in harmony mode.
1562  __ Mov(x0, Operand(var->name()));
1563  __ Push(x0);
1564  __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1565  __ Bind(&done);
1566  } else {
1567  // Uninitalized const bindings outside of harmony mode are unholed.
1568  ASSERT(var->mode() == CONST_LEGACY);
1569  __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1570  __ Bind(&done);
1571  }
1572  context()->Plug(x0);
1573  break;
1574  }
1575  }
1576  context()->Plug(var);
1577  break;
1578  }
1579 
1580  case Variable::LOOKUP: {
1581  Label done, slow;
1582  // Generate code for loading from variables potentially shadowed by
1583  // eval-introduced variables.
1584  EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1585  __ Bind(&slow);
1586  Comment cmnt(masm_, "Lookup variable");
1587  __ Mov(x1, Operand(var->name()));
1588  __ Push(cp, x1); // Context and name.
1589  __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1590  __ Bind(&done);
1591  context()->Plug(x0);
1592  break;
1593  }
1594  }
1595 }
1596 
1597 
1598 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1599  Comment cmnt(masm_, "[ RegExpLiteral");
1600  Label materialized;
1601  // Registers will be used as follows:
1602  // x5 = materialized value (RegExp literal)
1603  // x4 = JS function, literals array
1604  // x3 = literal index
1605  // x2 = RegExp pattern
1606  // x1 = RegExp flags
1607  // x0 = RegExp literal clone
1610  int literal_offset =
1611  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1612  __ Ldr(x5, FieldMemOperand(x4, literal_offset));
1613  __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1614 
1615  // Create regexp literal using runtime function.
1616  // Result will be in x0.
1617  __ Mov(x3, Smi::FromInt(expr->literal_index()));
1618  __ Mov(x2, Operand(expr->pattern()));
1619  __ Mov(x1, Operand(expr->flags()));
1620  __ Push(x4, x3, x2, x1);
1621  __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1622  __ Mov(x5, x0);
1623 
1624  __ Bind(&materialized);
1626  Label allocated, runtime_allocate;
1627  __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
1628  __ B(&allocated);
1629 
1630  __ Bind(&runtime_allocate);
1631  __ Mov(x10, Smi::FromInt(size));
1632  __ Push(x5, x10);
1633  __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1634  __ Pop(x5);
1635 
1636  __ Bind(&allocated);
1637  // After this, registers are used as follows:
1638  // x0: Newly allocated regexp.
1639  // x5: Materialized regexp.
1640  // x10, x11, x12: temps.
1641  __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1642  context()->Plug(x0);
1643 }
1644 
1645 
1646 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1647  if (expression == NULL) {
1648  __ LoadRoot(x10, Heap::kNullValueRootIndex);
1649  __ Push(x10);
1650  } else {
1651  VisitForStackValue(expression);
1652  }
1653 }
1654 
1655 
1656 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1657  Comment cmnt(masm_, "[ ObjectLiteral");
1658 
1659  expr->BuildConstantProperties(isolate());
1660  Handle<FixedArray> constant_properties = expr->constant_properties();
1663  __ Mov(x2, Smi::FromInt(expr->literal_index()));
1664  __ Mov(x1, Operand(constant_properties));
1665  int flags = expr->fast_elements()
1666  ? ObjectLiteral::kFastElements
1667  : ObjectLiteral::kNoFlags;
1668  flags |= expr->has_function()
1669  ? ObjectLiteral::kHasFunction
1670  : ObjectLiteral::kNoFlags;
1671  __ Mov(x0, Smi::FromInt(flags));
1672  int properties_count = constant_properties->length() / 2;
1673  const int max_cloned_properties =
1675  if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() ||
1676  flags != ObjectLiteral::kFastElements ||
1677  properties_count > max_cloned_properties) {
1678  __ Push(x3, x2, x1, x0);
1679  __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1680  } else {
1681  FastCloneShallowObjectStub stub(properties_count);
1682  __ CallStub(&stub);
1683  }
1684 
1685  // If result_saved is true the result is on top of the stack. If
1686  // result_saved is false the result is in x0.
1687  bool result_saved = false;
1688 
1689  // Mark all computed expressions that are bound to a key that
1690  // is shadowed by a later occurrence of the same key. For the
1691  // marked expressions, no store code is emitted.
1692  expr->CalculateEmitStore(zone());
1693 
1694  AccessorTable accessor_table(zone());
1695  for (int i = 0; i < expr->properties()->length(); i++) {
1696  ObjectLiteral::Property* property = expr->properties()->at(i);
1697  if (property->IsCompileTimeValue()) continue;
1698 
1699  Literal* key = property->key();
1700  Expression* value = property->value();
1701  if (!result_saved) {
1702  __ Push(x0); // Save result on stack
1703  result_saved = true;
1704  }
1705  switch (property->kind()) {
1707  UNREACHABLE();
1708  case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1709  ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1710  // Fall through.
1711  case ObjectLiteral::Property::COMPUTED:
1712  if (key->value()->IsInternalizedString()) {
1713  if (property->emit_store()) {
1714  VisitForAccumulatorValue(value);
1715  __ Mov(x2, Operand(key->value()));
1716  __ Peek(x1, 0);
1717  CallStoreIC(key->LiteralFeedbackId());
1718  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1719  } else {
1720  VisitForEffect(value);
1721  }
1722  break;
1723  }
1724  if (property->emit_store()) {
1725  // Duplicate receiver on stack.
1726  __ Peek(x0, 0);
1727  __ Push(x0);
1728  VisitForStackValue(key);
1729  VisitForStackValue(value);
1730  __ Mov(x0, Smi::FromInt(NONE)); // PropertyAttributes
1731  __ Push(x0);
1732  __ CallRuntime(Runtime::kSetProperty, 4);
1733  } else {
1734  VisitForEffect(key);
1735  VisitForEffect(value);
1736  }
1737  break;
1738  case ObjectLiteral::Property::PROTOTYPE:
1739  if (property->emit_store()) {
1740  // Duplicate receiver on stack.
1741  __ Peek(x0, 0);
1742  __ Push(x0);
1743  VisitForStackValue(value);
1744  __ CallRuntime(Runtime::kSetPrototype, 2);
1745  } else {
1746  VisitForEffect(value);
1747  }
1748  break;
1749  case ObjectLiteral::Property::GETTER:
1750  accessor_table.lookup(key)->second->getter = value;
1751  break;
1752  case ObjectLiteral::Property::SETTER:
1753  accessor_table.lookup(key)->second->setter = value;
1754  break;
1755  }
1756  }
1757 
1758  // Emit code to define accessors, using only a single call to the runtime for
1759  // each pair of corresponding getters and setters.
1760  for (AccessorTable::Iterator it = accessor_table.begin();
1761  it != accessor_table.end();
1762  ++it) {
1763  __ Peek(x10, 0); // Duplicate receiver.
1764  __ Push(x10);
1765  VisitForStackValue(it->first);
1766  EmitAccessor(it->second->getter);
1767  EmitAccessor(it->second->setter);
1768  __ Mov(x10, Smi::FromInt(NONE));
1769  __ Push(x10);
1770  __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1771  }
1772 
1773  if (expr->has_function()) {
1774  ASSERT(result_saved);
1775  __ Peek(x0, 0);
1776  __ Push(x0);
1777  __ CallRuntime(Runtime::kToFastProperties, 1);
1778  }
1779 
1780  if (result_saved) {
1781  context()->PlugTOS();
1782  } else {
1783  context()->Plug(x0);
1784  }
1785 }
1786 
1787 
1788 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1789  Comment cmnt(masm_, "[ ArrayLiteral");
1790 
1791  expr->BuildConstantElements(isolate());
1792  int flags = (expr->depth() == 1) ? ArrayLiteral::kShallowElements
1793  : ArrayLiteral::kNoFlags;
1794 
1795  ZoneList<Expression*>* subexprs = expr->values();
1796  int length = subexprs->length();
1797  Handle<FixedArray> constant_elements = expr->constant_elements();
1798  ASSERT_EQ(2, constant_elements->length());
1799  ElementsKind constant_elements_kind =
1800  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1801  bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1802  Handle<FixedArrayBase> constant_elements_values(
1803  FixedArrayBase::cast(constant_elements->get(1)));
1804 
1805  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1806  if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1807  // If the only customer of allocation sites is transitioning, then
1808  // we can turn it off if we don't have anywhere else to transition to.
1809  allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1810  }
1811 
1814  __ Mov(x2, Smi::FromInt(expr->literal_index()));
1815  __ Mov(x1, Operand(constant_elements));
1816  if (has_fast_elements && constant_elements_values->map() ==
1817  isolate()->heap()->fixed_cow_array_map()) {
1818  FastCloneShallowArrayStub stub(
1820  allocation_site_mode,
1821  length);
1822  __ CallStub(&stub);
1823  __ IncrementCounter(
1824  isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11);
1825  } else if ((expr->depth() > 1) || Serializer::enabled() ||
1827  __ Mov(x0, Smi::FromInt(flags));
1828  __ Push(x3, x2, x1, x0);
1829  __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1830  } else {
1831  ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1832  FLAG_smi_only_arrays);
1835 
1836  if (has_fast_elements) {
1838  }
1839 
1840  FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1841  __ CallStub(&stub);
1842  }
1843 
1844  bool result_saved = false; // Is the result saved to the stack?
1845 
1846  // Emit code to evaluate all the non-constant subexpressions and to store
1847  // them into the newly cloned array.
1848  for (int i = 0; i < length; i++) {
1849  Expression* subexpr = subexprs->at(i);
1850  // If the subexpression is a literal or a simple materialized literal it
1851  // is already set in the cloned array.
1852  if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1853 
1854  if (!result_saved) {
1855  __ Push(x0);
1856  __ Push(Smi::FromInt(expr->literal_index()));
1857  result_saved = true;
1858  }
1859  VisitForAccumulatorValue(subexpr);
1860 
1861  if (IsFastObjectElementsKind(constant_elements_kind)) {
1862  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1863  __ Peek(x6, kPointerSize); // Copy of array literal.
1865  __ Str(result_register(), FieldMemOperand(x1, offset));
1866  // Update the write barrier for the array store.
1867  __ RecordWriteField(x1, offset, result_register(), x10,
1870  } else {
1871  __ Mov(x3, Smi::FromInt(i));
1872  StoreArrayLiteralElementStub stub;
1873  __ CallStub(&stub);
1874  }
1875 
1876  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1877  }
1878 
1879  if (result_saved) {
1880  __ Drop(1); // literal index
1881  context()->PlugTOS();
1882  } else {
1883  context()->Plug(x0);
1884  }
1885 }
1886 
1887 
1888 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1889  ASSERT(expr->target()->IsValidLeftHandSide());
1890 
1891  Comment cmnt(masm_, "[ Assignment");
1892 
1893  // Left-hand side can only be a property, a global or a (parameter or local)
1894  // slot.
1895  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1896  LhsKind assign_type = VARIABLE;
1897  Property* property = expr->target()->AsProperty();
1898  if (property != NULL) {
1899  assign_type = (property->key()->IsPropertyName())
1900  ? NAMED_PROPERTY
1901  : KEYED_PROPERTY;
1902  }
1903 
1904  // Evaluate LHS expression.
1905  switch (assign_type) {
1906  case VARIABLE:
1907  // Nothing to do here.
1908  break;
1909  case NAMED_PROPERTY:
1910  if (expr->is_compound()) {
1911  // We need the receiver both on the stack and in the accumulator.
1912  VisitForAccumulatorValue(property->obj());
1913  __ Push(result_register());
1914  } else {
1915  VisitForStackValue(property->obj());
1916  }
1917  break;
1918  case KEYED_PROPERTY:
1919  if (expr->is_compound()) {
1920  VisitForStackValue(property->obj());
1921  VisitForAccumulatorValue(property->key());
1922  __ Peek(x1, 0);
1923  __ Push(x0);
1924  } else {
1925  VisitForStackValue(property->obj());
1926  VisitForStackValue(property->key());
1927  }
1928  break;
1929  }
1930 
1931  // For compound assignments we need another deoptimization point after the
1932  // variable/property load.
1933  if (expr->is_compound()) {
1934  { AccumulatorValueContext context(this);
1935  switch (assign_type) {
1936  case VARIABLE:
1937  EmitVariableLoad(expr->target()->AsVariableProxy());
1938  PrepareForBailout(expr->target(), TOS_REG);
1939  break;
1940  case NAMED_PROPERTY:
1941  EmitNamedPropertyLoad(property);
1942  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1943  break;
1944  case KEYED_PROPERTY:
1945  EmitKeyedPropertyLoad(property);
1946  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1947  break;
1948  }
1949  }
1950 
1951  Token::Value op = expr->binary_op();
1952  __ Push(x0); // Left operand goes on the stack.
1953  VisitForAccumulatorValue(expr->value());
1954 
1955  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1956  ? OVERWRITE_RIGHT
1957  : NO_OVERWRITE;
1958  SetSourcePosition(expr->position() + 1);
1959  AccumulatorValueContext context(this);
1960  if (ShouldInlineSmiCase(op)) {
1961  EmitInlineSmiBinaryOp(expr->binary_operation(),
1962  op,
1963  mode,
1964  expr->target(),
1965  expr->value());
1966  } else {
1967  EmitBinaryOp(expr->binary_operation(), op, mode);
1968  }
1969 
1970  // Deoptimization point in case the binary operation may have side effects.
1971  PrepareForBailout(expr->binary_operation(), TOS_REG);
1972  } else {
1973  VisitForAccumulatorValue(expr->value());
1974  }
1975 
1976  // Record source position before possible IC call.
1977  SetSourcePosition(expr->position());
1978 
1979  // Store the value.
1980  switch (assign_type) {
1981  case VARIABLE:
1982  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1983  expr->op());
1984  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1985  context()->Plug(x0);
1986  break;
1987  case NAMED_PROPERTY:
1988  EmitNamedPropertyAssignment(expr);
1989  break;
1990  case KEYED_PROPERTY:
1991  EmitKeyedPropertyAssignment(expr);
1992  break;
1993  }
1994 }
1995 
1996 
1997 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1998  SetSourcePosition(prop->position());
1999  Literal* key = prop->key()->AsLiteral();
2000  __ Mov(x2, Operand(key->value()));
2001  // Call load IC. It has arguments receiver and property name x0 and x2.
2002  CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2003 }
2004 
2005 
2006 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2007  SetSourcePosition(prop->position());
2008  // Call keyed load IC. It has arguments key and receiver in r0 and r1.
2009  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2010  CallIC(ic, prop->PropertyFeedbackId());
2011 }
2012 
2013 
2014 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2015  Token::Value op,
2016  OverwriteMode mode,
2017  Expression* left_expr,
2018  Expression* right_expr) {
2019  Label done, both_smis, stub_call;
2020 
2021  // Get the arguments.
2022  Register left = x1;
2023  Register right = x0;
2024  Register result = x0;
2025  __ Pop(left);
2026 
2027  // Perform combined smi check on both operands.
2028  __ Orr(x10, left, right);
2029  JumpPatchSite patch_site(masm_);
2030  patch_site.EmitJumpIfSmi(x10, &both_smis);
2031 
2032  __ Bind(&stub_call);
2033  BinaryOpICStub stub(op, mode);
2034  {
2035  Assembler::BlockPoolsScope scope(masm_);
2036  CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2037  patch_site.EmitPatchInfo();
2038  }
2039  __ B(&done);
2040 
2041  __ Bind(&both_smis);
2042  // Smi case. This code works in the same way as the smi-smi case in the type
2043  // recording binary operation stub, see
2044  // BinaryOpStub::GenerateSmiSmiOperation for comments.
2045  // TODO(all): That doesn't exist any more. Where are the comments?
2046  //
2047  // The set of operations that needs to be supported here is controlled by
2048  // FullCodeGenerator::ShouldInlineSmiCase().
2049  switch (op) {
2050  case Token::SAR:
2051  __ Ubfx(right, right, kSmiShift, 5);
2052  __ Asr(result, left, right);
2053  __ Bic(result, result, kSmiShiftMask);
2054  break;
2055  case Token::SHL:
2056  __ Ubfx(right, right, kSmiShift, 5);
2057  __ Lsl(result, left, right);
2058  break;
2059  case Token::SHR: {
2060  Label right_not_zero;
2061  __ Cbnz(right, &right_not_zero);
2062  __ Tbnz(left, kXSignBit, &stub_call);
2063  __ Bind(&right_not_zero);
2064  __ Ubfx(right, right, kSmiShift, 5);
2065  __ Lsr(result, left, right);
2066  __ Bic(result, result, kSmiShiftMask);
2067  break;
2068  }
2069  case Token::ADD:
2070  __ Adds(x10, left, right);
2071  __ B(vs, &stub_call);
2072  __ Mov(result, x10);
2073  break;
2074  case Token::SUB:
2075  __ Subs(x10, left, right);
2076  __ B(vs, &stub_call);
2077  __ Mov(result, x10);
2078  break;
2079  case Token::MUL: {
2080  Label not_minus_zero, done;
2081  __ Smulh(x10, left, right);
2082  __ Cbnz(x10, &not_minus_zero);
2083  __ Eor(x11, left, right);
2084  __ Tbnz(x11, kXSignBit, &stub_call);
2085  STATIC_ASSERT(kSmiTag == 0);
2086  __ Mov(result, x10);
2087  __ B(&done);
2088  __ Bind(&not_minus_zero);
2089  __ Cls(x11, x10);
2090  __ Cmp(x11, kXRegSizeInBits - kSmiShift);
2091  __ B(lt, &stub_call);
2092  __ SmiTag(result, x10);
2093  __ Bind(&done);
2094  break;
2095  }
2096  case Token::BIT_OR:
2097  __ Orr(result, left, right);
2098  break;
2099  case Token::BIT_AND:
2100  __ And(result, left, right);
2101  break;
2102  case Token::BIT_XOR:
2103  __ Eor(result, left, right);
2104  break;
2105  default:
2106  UNREACHABLE();
2107  }
2108 
2109  __ Bind(&done);
2110  context()->Plug(x0);
2111 }
2112 
2113 
2114 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2115  Token::Value op,
2116  OverwriteMode mode) {
2117  __ Pop(x1);
2118  BinaryOpICStub stub(op, mode);
2119  JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2120  {
2121  Assembler::BlockPoolsScope scope(masm_);
2122  CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2123  patch_site.EmitPatchInfo();
2124  }
2125  context()->Plug(x0);
2126 }
2127 
2128 
2129 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2130  ASSERT(expr->IsValidLeftHandSide());
2131 
2132  // Left-hand side can only be a property, a global or a (parameter or local)
2133  // slot.
2134  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2135  LhsKind assign_type = VARIABLE;
2136  Property* prop = expr->AsProperty();
2137  if (prop != NULL) {
2138  assign_type = (prop->key()->IsPropertyName())
2139  ? NAMED_PROPERTY
2140  : KEYED_PROPERTY;
2141  }
2142 
2143  switch (assign_type) {
2144  case VARIABLE: {
2145  Variable* var = expr->AsVariableProxy()->var();
2146  EffectContext context(this);
2147  EmitVariableAssignment(var, Token::ASSIGN);
2148  break;
2149  }
2150  case NAMED_PROPERTY: {
2151  __ Push(x0); // Preserve value.
2152  VisitForAccumulatorValue(prop->obj());
2153  // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2154  // this copy.
2155  __ Mov(x1, x0);
2156  __ Pop(x0); // Restore value.
2157  __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
2158  CallStoreIC();
2159  break;
2160  }
2161  case KEYED_PROPERTY: {
2162  __ Push(x0); // Preserve value.
2163  VisitForStackValue(prop->obj());
2164  VisitForAccumulatorValue(prop->key());
2165  __ Mov(x1, x0);
2166  __ Pop(x2, x0);
2167  Handle<Code> ic = strict_mode() == SLOPPY
2168  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2169  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2170  CallIC(ic);
2171  break;
2172  }
2173  }
2174  context()->Plug(x0);
2175 }
2176 
2177 
2178 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2179  Variable* var, MemOperand location) {
2180  __ Str(result_register(), location);
2181  if (var->IsContextSlot()) {
2182  // RecordWrite may destroy all its register arguments.
2183  __ Mov(x10, result_register());
2184  int offset = Context::SlotOffset(var->index());
2185  __ RecordWriteContextSlot(
2186  x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2187  }
2188 }
2189 
2190 
2191 void FullCodeGenerator::EmitCallStoreContextSlot(
2192  Handle<String> name, StrictMode strict_mode) {
2193  __ Mov(x11, Operand(name));
2194  __ Mov(x10, Smi::FromInt(strict_mode));
2195  // jssp[0] : mode.
2196  // jssp[8] : name.
2197  // jssp[16] : context.
2198  // jssp[24] : value.
2199  __ Push(x0, cp, x11, x10);
2200  __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2201 }
2202 
2203 
2204 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2205  Token::Value op) {
2206  ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2207  if (var->IsUnallocated()) {
2208  // Global var, const, or let.
2209  __ Mov(x2, Operand(var->name()));
2210  __ Ldr(x1, GlobalObjectMemOperand());
2211  CallStoreIC();
2212 
2213  } else if (op == Token::INIT_CONST_LEGACY) {
2214  // Const initializers need a write barrier.
2215  ASSERT(!var->IsParameter()); // No const parameters.
2216  if (var->IsLookupSlot()) {
2217  __ Push(x0);
2218  __ Mov(x0, Operand(var->name()));
2219  __ Push(cp, x0); // Context and name.
2220  __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2221  } else {
2222  ASSERT(var->IsStackLocal() || var->IsContextSlot());
2223  Label skip;
2224  MemOperand location = VarOperand(var, x1);
2225  __ Ldr(x10, location);
2226  __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2227  EmitStoreToStackLocalOrContextSlot(var, location);
2228  __ Bind(&skip);
2229  }
2230 
2231  } else if (var->mode() == LET && op != Token::INIT_LET) {
2232  // Non-initializing assignment to let variable needs a write barrier.
2233  if (var->IsLookupSlot()) {
2234  EmitCallStoreContextSlot(var->name(), strict_mode());
2235  } else {
2236  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2237  Label assign;
2238  MemOperand location = VarOperand(var, x1);
2239  __ Ldr(x10, location);
2240  __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2241  __ Mov(x10, Operand(var->name()));
2242  __ Push(x10);
2243  __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2244  // Perform the assignment.
2245  __ Bind(&assign);
2246  EmitStoreToStackLocalOrContextSlot(var, location);
2247  }
2248 
2249  } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2250  // Assignment to var or initializing assignment to let/const
2251  // in harmony mode.
2252  if (var->IsLookupSlot()) {
2253  EmitCallStoreContextSlot(var->name(), strict_mode());
2254  } else {
2255  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2256  MemOperand location = VarOperand(var, x1);
2257  if (FLAG_debug_code && op == Token::INIT_LET) {
2258  __ Ldr(x10, location);
2259  __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2260  __ Check(eq, kLetBindingReInitialization);
2261  }
2262  EmitStoreToStackLocalOrContextSlot(var, location);
2263  }
2264  }
2265  // Non-initializing assignments to consts are ignored.
2266 }
2267 
2268 
2269 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2270  ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2271  // Assignment to a property, using a named store IC.
2272  Property* prop = expr->target()->AsProperty();
2273  ASSERT(prop != NULL);
2274  ASSERT(prop->key()->AsLiteral() != NULL);
2275 
2276  // Record source code position before IC call.
2277  SetSourcePosition(expr->position());
2278  __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
2279  __ Pop(x1);
2280 
2281  CallStoreIC(expr->AssignmentFeedbackId());
2282 
2283  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2284  context()->Plug(x0);
2285 }
2286 
2287 
2288 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2289  ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2290  // Assignment to a property, using a keyed store IC.
2291 
2292  // Record source code position before IC call.
2293  SetSourcePosition(expr->position());
2294  // TODO(all): Could we pass this in registers rather than on the stack?
2295  __ Pop(x1, x2); // Key and object holding the property.
2296 
2297  Handle<Code> ic = strict_mode() == SLOPPY
2298  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2299  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2300  CallIC(ic, expr->AssignmentFeedbackId());
2301 
2302  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2303  context()->Plug(x0);
2304 }
2305 
2306 
2307 void FullCodeGenerator::VisitProperty(Property* expr) {
2308  Comment cmnt(masm_, "[ Property");
2309  Expression* key = expr->key();
2310 
2311  if (key->IsPropertyName()) {
2312  VisitForAccumulatorValue(expr->obj());
2313  EmitNamedPropertyLoad(expr);
2314  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2315  context()->Plug(x0);
2316  } else {
2317  VisitForStackValue(expr->obj());
2318  VisitForAccumulatorValue(expr->key());
2319  __ Pop(x1);
2320  EmitKeyedPropertyLoad(expr);
2321  context()->Plug(x0);
2322  }
2323 }
2324 
2325 
2326 void FullCodeGenerator::CallIC(Handle<Code> code,
2327  TypeFeedbackId ast_id) {
2328  ic_total_count_++;
2329  // All calls must have a predictable size in full-codegen code to ensure that
2330  // the debugger can patch them correctly.
2331  __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2332 }
2333 
2334 
2335 // Code common for calls using the IC.
2336 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2337  ASM_LOCATION("EmitCallWithIC");
2338 
2339  Expression* callee = expr->expression();
2340  ZoneList<Expression*>* args = expr->arguments();
2341  int arg_count = args->length();
2342 
2344  // Get the target function.
2345  if (callee->IsVariableProxy()) {
2346  { StackValueContext context(this);
2347  EmitVariableLoad(callee->AsVariableProxy());
2348  PrepareForBailout(callee, NO_REGISTERS);
2349  }
2350  // Push undefined as receiver. This is patched in the method prologue if it
2351  // is a sloppy mode method.
2352  __ Push(isolate()->factory()->undefined_value());
2353  flags = NO_CALL_FUNCTION_FLAGS;
2354  } else {
2355  // Load the function from the receiver.
2356  ASSERT(callee->IsProperty());
2357  __ Peek(x0, 0);
2358  EmitNamedPropertyLoad(callee->AsProperty());
2359  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2360  // Push the target function under the receiver.
2361  __ Pop(x10);
2362  __ Push(x0, x10);
2363  flags = CALL_AS_METHOD;
2364  }
2365 
2366  // Load the arguments.
2367  { PreservePositionScope scope(masm()->positions_recorder());
2368  for (int i = 0; i < arg_count; i++) {
2369  VisitForStackValue(args->at(i));
2370  }
2371  }
2372 
2373  // Record source position for debugger.
2374  SetSourcePosition(expr->position());
2375  CallFunctionStub stub(arg_count, flags);
2376  __ Peek(x1, (arg_count + 1) * kPointerSize);
2377  __ CallStub(&stub);
2378 
2379  RecordJSReturnSite(expr);
2380 
2381  // Restore context register.
2383 
2384  context()->DropAndPlug(1, x0);
2385 }
2386 
2387 
2388 // Code common for calls using the IC.
2389 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2390  Expression* key) {
2391  // Load the key.
2392  VisitForAccumulatorValue(key);
2393 
2394  Expression* callee = expr->expression();
2395  ZoneList<Expression*>* args = expr->arguments();
2396  int arg_count = args->length();
2397 
2398  // Load the function from the receiver.
2399  ASSERT(callee->IsProperty());
2400  __ Peek(x1, 0);
2401  EmitKeyedPropertyLoad(callee->AsProperty());
2402  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2403 
2404  // Push the target function under the receiver.
2405  __ Pop(x10);
2406  __ Push(x0, x10);
2407 
2408  { PreservePositionScope scope(masm()->positions_recorder());
2409  for (int i = 0; i < arg_count; i++) {
2410  VisitForStackValue(args->at(i));
2411  }
2412  }
2413 
2414  // Record source position for debugger.
2415  SetSourcePosition(expr->position());
2416  CallFunctionStub stub(arg_count, CALL_AS_METHOD);
2417  __ Peek(x1, (arg_count + 1) * kPointerSize);
2418  __ CallStub(&stub);
2419 
2420  RecordJSReturnSite(expr);
2421  // Restore context register.
2423 
2424  context()->DropAndPlug(1, x0);
2425 }
2426 
2427 
2428 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2429  // Code common for calls using the call stub.
2430  ZoneList<Expression*>* args = expr->arguments();
2431  int arg_count = args->length();
2432  { PreservePositionScope scope(masm()->positions_recorder());
2433  for (int i = 0; i < arg_count; i++) {
2434  VisitForStackValue(args->at(i));
2435  }
2436  }
2437  // Record source position for debugger.
2438  SetSourcePosition(expr->position());
2439 
2440  Handle<Object> uninitialized =
2442  StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2443  __ LoadObject(x2, FeedbackVector());
2444  __ Mov(x3, Smi::FromInt(expr->CallFeedbackSlot()));
2445 
2446  // Record call targets in unoptimized code.
2447  CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2448  __ Peek(x1, (arg_count + 1) * kXRegSize);
2449  __ CallStub(&stub);
2450  RecordJSReturnSite(expr);
2451  // Restore context register.
2453  context()->DropAndPlug(1, x0);
2454 }
2455 
2456 
2457 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2458  ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2459  // Prepare to push a copy of the first argument or undefined if it doesn't
2460  // exist.
2461  if (arg_count > 0) {
2462  __ Peek(x10, arg_count * kXRegSize);
2463  } else {
2464  __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2465  }
2466 
2467  // Prepare to push the receiver of the enclosing function.
2468  int receiver_offset = 2 + info_->scope()->num_parameters();
2469  __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize));
2470 
2471  // Push.
2472  __ Push(x10, x11);
2473 
2474  // Prepare to push the language mode.
2475  __ Mov(x10, Smi::FromInt(strict_mode()));
2476  // Prepare to push the start position of the scope the calls resides in.
2477  __ Mov(x11, Smi::FromInt(scope()->start_position()));
2478 
2479  // Push.
2480  __ Push(x10, x11);
2481 
2482  // Do the runtime call.
2483  __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2484 }
2485 
2486 
2487 void FullCodeGenerator::VisitCall(Call* expr) {
2488 #ifdef DEBUG
2489  // We want to verify that RecordJSReturnSite gets called on all paths
2490  // through this function. Avoid early returns.
2491  expr->return_is_recorded_ = false;
2492 #endif
2493 
2494  Comment cmnt(masm_, "[ Call");
2495  Expression* callee = expr->expression();
2496  Call::CallType call_type = expr->GetCallType(isolate());
2497 
2498  if (call_type == Call::POSSIBLY_EVAL_CALL) {
2499  // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2500  // to resolve the function we need to call and the receiver of the
2501  // call. Then we call the resolved function using the given
2502  // arguments.
2503  ZoneList<Expression*>* args = expr->arguments();
2504  int arg_count = args->length();
2505 
2506  {
2507  PreservePositionScope pos_scope(masm()->positions_recorder());
2508  VisitForStackValue(callee);
2509  __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2510  __ Push(x10); // Reserved receiver slot.
2511 
2512  // Push the arguments.
2513  for (int i = 0; i < arg_count; i++) {
2514  VisitForStackValue(args->at(i));
2515  }
2516 
2517  // Push a copy of the function (found below the arguments) and
2518  // resolve eval.
2519  __ Peek(x10, (arg_count + 1) * kPointerSize);
2520  __ Push(x10);
2521  EmitResolvePossiblyDirectEval(arg_count);
2522 
2523  // The runtime call returns a pair of values in x0 (function) and
2524  // x1 (receiver). Touch up the stack with the right values.
2525  __ PokePair(x1, x0, arg_count * kPointerSize);
2526  }
2527 
2528  // Record source position for debugger.
2529  SetSourcePosition(expr->position());
2530 
2531  // Call the evaluated function.
2532  CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
2533  __ Peek(x1, (arg_count + 1) * kXRegSize);
2534  __ CallStub(&stub);
2535  RecordJSReturnSite(expr);
2536  // Restore context register.
2538  context()->DropAndPlug(1, x0);
2539 
2540  } else if (call_type == Call::GLOBAL_CALL) {
2541  EmitCallWithIC(expr);
2542 
2543  } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2544  // Call to a lookup slot (dynamically introduced variable).
2545  VariableProxy* proxy = callee->AsVariableProxy();
2546  Label slow, done;
2547 
2548  { PreservePositionScope scope(masm()->positions_recorder());
2549  // Generate code for loading from variables potentially shadowed
2550  // by eval-introduced variables.
2551  EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2552  }
2553 
2554  __ Bind(&slow);
2555  // Call the runtime to find the function to call (returned in x0)
2556  // and the object holding it (returned in x1).
2557  __ Push(context_register());
2558  __ Mov(x10, Operand(proxy->name()));
2559  __ Push(x10);
2560  __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2561  __ Push(x0, x1); // Receiver, function.
2562 
2563  // If fast case code has been generated, emit code to push the
2564  // function and receiver and have the slow path jump around this
2565  // code.
2566  if (done.is_linked()) {
2567  Label call;
2568  __ B(&call);
2569  __ Bind(&done);
2570  // Push function.
2571  __ Push(x0);
2572  // The receiver is implicitly the global receiver. Indicate this
2573  // by passing the undefined to the call function stub.
2574  __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2575  __ Push(x1);
2576  __ Bind(&call);
2577  }
2578 
2579  // The receiver is either the global receiver or an object found
2580  // by LoadContextSlot.
2581  EmitCallWithStub(expr);
2582  } else if (call_type == Call::PROPERTY_CALL) {
2583  Property* property = callee->AsProperty();
2584  { PreservePositionScope scope(masm()->positions_recorder());
2585  VisitForStackValue(property->obj());
2586  }
2587  if (property->key()->IsPropertyName()) {
2588  EmitCallWithIC(expr);
2589  } else {
2590  EmitKeyedCallWithIC(expr, property->key());
2591  }
2592 
2593  } else {
2594  ASSERT(call_type == Call::OTHER_CALL);
2595  // Call to an arbitrary expression not handled specially above.
2596  { PreservePositionScope scope(masm()->positions_recorder());
2597  VisitForStackValue(callee);
2598  }
2599  __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2600  __ Push(x1);
2601  // Emit function call.
2602  EmitCallWithStub(expr);
2603  }
2604 
2605 #ifdef DEBUG
2606  // RecordJSReturnSite should have been called.
2607  ASSERT(expr->return_is_recorded_);
2608 #endif
2609 }
2610 
2611 
2612 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2613  Comment cmnt(masm_, "[ CallNew");
2614  // According to ECMA-262, section 11.2.2, page 44, the function
2615  // expression in new calls must be evaluated before the
2616  // arguments.
2617 
2618  // Push constructor on the stack. If it's not a function it's used as
2619  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2620  // ignored.
2621  VisitForStackValue(expr->expression());
2622 
2623  // Push the arguments ("left-to-right") on the stack.
2624  ZoneList<Expression*>* args = expr->arguments();
2625  int arg_count = args->length();
2626  for (int i = 0; i < arg_count; i++) {
2627  VisitForStackValue(args->at(i));
2628  }
2629 
2630  // Call the construct call builtin that handles allocation and
2631  // constructor invocation.
2632  SetSourcePosition(expr->position());
2633 
2634  // Load function and argument count into x1 and x0.
2635  __ Mov(x0, arg_count);
2636  __ Peek(x1, arg_count * kXRegSize);
2637 
2638  // Record call targets in unoptimized code.
2639  Handle<Object> uninitialized =
2641  StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2642  if (FLAG_pretenuring_call_new) {
2643  StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
2644  isolate()->factory()->NewAllocationSite());
2645  ASSERT(expr->AllocationSiteFeedbackSlot() ==
2646  expr->CallNewFeedbackSlot() + 1);
2647  }
2648 
2649  __ LoadObject(x2, FeedbackVector());
2650  __ Mov(x3, Smi::FromInt(expr->CallNewFeedbackSlot()));
2651 
2652  CallConstructStub stub(RECORD_CALL_TARGET);
2653  __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2654  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2655  context()->Plug(x0);
2656 }
2657 
2658 
2659 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2660  ZoneList<Expression*>* args = expr->arguments();
2661  ASSERT(args->length() == 1);
2662 
2663  VisitForAccumulatorValue(args->at(0));
2664 
2665  Label materialize_true, materialize_false;
2666  Label* if_true = NULL;
2667  Label* if_false = NULL;
2668  Label* fall_through = NULL;
2669  context()->PrepareTest(&materialize_true, &materialize_false,
2670  &if_true, &if_false, &fall_through);
2671 
2672  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2673  __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2674 
2675  context()->Plug(if_true, if_false);
2676 }
2677 
2678 
2679 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2680  ZoneList<Expression*>* args = expr->arguments();
2681  ASSERT(args->length() == 1);
2682 
2683  VisitForAccumulatorValue(args->at(0));
2684 
2685  Label materialize_true, materialize_false;
2686  Label* if_true = NULL;
2687  Label* if_false = NULL;
2688  Label* fall_through = NULL;
2689  context()->PrepareTest(&materialize_true, &materialize_false,
2690  &if_true, &if_false, &fall_through);
2691 
2692  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2693  __ TestAndSplit(x0, kSmiTagMask | (0x80000000UL << kSmiShift), if_true,
2694  if_false, fall_through);
2695 
2696  context()->Plug(if_true, if_false);
2697 }
2698 
2699 
2700 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2701  ZoneList<Expression*>* args = expr->arguments();
2702  ASSERT(args->length() == 1);
2703 
2704  VisitForAccumulatorValue(args->at(0));
2705 
2706  Label materialize_true, materialize_false;
2707  Label* if_true = NULL;
2708  Label* if_false = NULL;
2709  Label* fall_through = NULL;
2710  context()->PrepareTest(&materialize_true, &materialize_false,
2711  &if_true, &if_false, &fall_through);
2712 
2713  __ JumpIfSmi(x0, if_false);
2714  __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
2715  __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2716  // Undetectable objects behave like undefined when tested with typeof.
2717  __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2718  __ Tbnz(x11, Map::kIsUndetectable, if_false);
2719  __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
2721  __ B(lt, if_false);
2723  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2724  Split(le, if_true, if_false, fall_through);
2725 
2726  context()->Plug(if_true, if_false);
2727 }
2728 
2729 
2730 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2731  ZoneList<Expression*>* args = expr->arguments();
2732  ASSERT(args->length() == 1);
2733 
2734  VisitForAccumulatorValue(args->at(0));
2735 
2736  Label materialize_true, materialize_false;
2737  Label* if_true = NULL;
2738  Label* if_false = NULL;
2739  Label* fall_through = NULL;
2740  context()->PrepareTest(&materialize_true, &materialize_false,
2741  &if_true, &if_false, &fall_through);
2742 
2743  __ JumpIfSmi(x0, if_false);
2744  __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
2745  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2746  Split(ge, if_true, if_false, fall_through);
2747 
2748  context()->Plug(if_true, if_false);
2749 }
2750 
2751 
2752 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2753  ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject");
2754  ZoneList<Expression*>* args = expr->arguments();
2755  ASSERT(args->length() == 1);
2756 
2757  VisitForAccumulatorValue(args->at(0));
2758 
2759  Label materialize_true, materialize_false;
2760  Label* if_true = NULL;
2761  Label* if_false = NULL;
2762  Label* fall_through = NULL;
2763  context()->PrepareTest(&materialize_true, &materialize_false,
2764  &if_true, &if_false, &fall_through);
2765 
2766  __ JumpIfSmi(x0, if_false);
2767  __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2768  __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2769  __ Tst(x11, 1 << Map::kIsUndetectable);
2770  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2771  Split(ne, if_true, if_false, fall_through);
2772 
2773  context()->Plug(if_true, if_false);
2774 }
2775 
2776 
2777 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2778  CallRuntime* expr) {
2779  ZoneList<Expression*>* args = expr->arguments();
2780  ASSERT(args->length() == 1);
2781  VisitForAccumulatorValue(args->at(0));
2782 
2783  Label materialize_true, materialize_false, skip_lookup;
2784  Label* if_true = NULL;
2785  Label* if_false = NULL;
2786  Label* fall_through = NULL;
2787  context()->PrepareTest(&materialize_true, &materialize_false,
2788  &if_true, &if_false, &fall_through);
2789 
2790  Register object = x0;
2791  __ AssertNotSmi(object);
2792 
2793  Register map = x10;
2794  Register bitfield2 = x11;
2795  __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2796  __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
2797  __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, &skip_lookup);
2798 
2799  // Check for fast case object. Generate false result for slow case object.
2800  Register props = x12;
2801  Register props_map = x12;
2802  Register hash_table_map = x13;
2803  __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset));
2804  __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset));
2805  __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
2806  __ Cmp(props_map, hash_table_map);
2807  __ B(eq, if_false);
2808 
2809  // Look for valueOf name in the descriptor array, and indicate false if found.
2810  // Since we omit an enumeration index check, if it is added via a transition
2811  // that shares its descriptor array, this is a false positive.
2812  Label loop, done;
2813 
2814  // Skip loop if no descriptors are valid.
2815  Register descriptors = x12;
2816  Register descriptors_length = x13;
2817  __ NumberOfOwnDescriptors(descriptors_length, map);
2818  __ Cbz(descriptors_length, &done);
2819 
2820  __ LoadInstanceDescriptors(map, descriptors);
2821 
2822  // Calculate the end of the descriptor array.
2823  Register descriptors_end = x14;
2825  __ Mul(descriptors_length, descriptors_length, x15);
2826  // Calculate location of the first key name.
2827  __ Add(descriptors, descriptors,
2829  // Calculate the end of the descriptor array.
2830  __ Add(descriptors_end, descriptors,
2831  Operand(descriptors_length, LSL, kPointerSizeLog2));
2832 
2833  // Loop through all the keys in the descriptor array. If one of these is the
2834  // string "valueOf" the result is false.
2835  Register valueof_string = x1;
2836  int descriptor_size = DescriptorArray::kDescriptorSize * kPointerSize;
2837  __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
2838  __ Bind(&loop);
2839  __ Ldr(x15, MemOperand(descriptors, descriptor_size, PostIndex));
2840  __ Cmp(x15, valueof_string);
2841  __ B(eq, if_false);
2842  __ Cmp(descriptors, descriptors_end);
2843  __ B(ne, &loop);
2844 
2845  __ Bind(&done);
2846 
2847  // Set the bit in the map to indicate that there is no local valueOf field.
2848  __ Ldrb(x2, FieldMemOperand(map, Map::kBitField2Offset));
2849  __ Orr(x2, x2, 1 << Map::kStringWrapperSafeForDefaultValueOf);
2850  __ Strb(x2, FieldMemOperand(map, Map::kBitField2Offset));
2851 
2852  __ Bind(&skip_lookup);
2853 
2854  // If a valueOf property is not found on the object check that its prototype
2855  // is the unmodified String prototype. If not result is false.
2856  Register prototype = x1;
2857  Register global_idx = x2;
2858  Register native_context = x2;
2859  Register string_proto = x3;
2860  Register proto_map = x4;
2861  __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset));
2862  __ JumpIfSmi(prototype, if_false);
2863  __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset));
2864  __ Ldr(global_idx, GlobalObjectMemOperand());
2865  __ Ldr(native_context,
2867  __ Ldr(string_proto,
2868  ContextMemOperand(native_context,
2870  __ Cmp(proto_map, string_proto);
2871 
2872  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2873  Split(eq, if_true, if_false, fall_through);
2874 
2875  context()->Plug(if_true, if_false);
2876 }
2877 
2878 
2879 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2880  ZoneList<Expression*>* args = expr->arguments();
2881  ASSERT(args->length() == 1);
2882 
2883  VisitForAccumulatorValue(args->at(0));
2884 
2885  Label materialize_true, materialize_false;
2886  Label* if_true = NULL;
2887  Label* if_false = NULL;
2888  Label* fall_through = NULL;
2889  context()->PrepareTest(&materialize_true, &materialize_false,
2890  &if_true, &if_false, &fall_through);
2891 
2892  __ JumpIfSmi(x0, if_false);
2893  __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
2894  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2895  Split(eq, if_true, if_false, fall_through);
2896 
2897  context()->Plug(if_true, if_false);
2898 }
2899 
2900 
2901 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
2902  ZoneList<Expression*>* args = expr->arguments();
2903  ASSERT(args->length() == 1);
2904 
2905  VisitForAccumulatorValue(args->at(0));
2906 
2907  Label materialize_true, materialize_false;
2908  Label* if_true = NULL;
2909  Label* if_false = NULL;
2910  Label* fall_through = NULL;
2911  context()->PrepareTest(&materialize_true, &materialize_false,
2912  &if_true, &if_false, &fall_through);
2913 
2914  // Only a HeapNumber can be -0.0, so return false if we have something else.
2915  __ CheckMap(x0, x1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
2916 
2917  // Test the bit pattern.
2919  __ Cmp(x10, 1); // Set V on 0x8000000000000000.
2920 
2921  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2922  Split(vs, if_true, if_false, fall_through);
2923 
2924  context()->Plug(if_true, if_false);
2925 }
2926 
2927 
2928 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2929  ZoneList<Expression*>* args = expr->arguments();
2930  ASSERT(args->length() == 1);
2931 
2932  VisitForAccumulatorValue(args->at(0));
2933 
2934  Label materialize_true, materialize_false;
2935  Label* if_true = NULL;
2936  Label* if_false = NULL;
2937  Label* fall_through = NULL;
2938  context()->PrepareTest(&materialize_true, &materialize_false,
2939  &if_true, &if_false, &fall_through);
2940 
2941  __ JumpIfSmi(x0, if_false);
2942  __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
2943  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2944  Split(eq, if_true, if_false, fall_through);
2945 
2946  context()->Plug(if_true, if_false);
2947 }
2948 
2949 
2950 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2951  ZoneList<Expression*>* args = expr->arguments();
2952  ASSERT(args->length() == 1);
2953 
2954  VisitForAccumulatorValue(args->at(0));
2955 
2956  Label materialize_true, materialize_false;
2957  Label* if_true = NULL;
2958  Label* if_false = NULL;
2959  Label* fall_through = NULL;
2960  context()->PrepareTest(&materialize_true, &materialize_false,
2961  &if_true, &if_false, &fall_through);
2962 
2963  __ JumpIfSmi(x0, if_false);
2964  __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
2965  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2966  Split(eq, if_true, if_false, fall_through);
2967 
2968  context()->Plug(if_true, if_false);
2969 }
2970 
2971 
2972 
2973 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2974  ASSERT(expr->arguments()->length() == 0);
2975 
2976  Label materialize_true, materialize_false;
2977  Label* if_true = NULL;
2978  Label* if_false = NULL;
2979  Label* fall_through = NULL;
2980  context()->PrepareTest(&materialize_true, &materialize_false,
2981  &if_true, &if_false, &fall_through);
2982 
2983  // Get the frame pointer for the calling frame.
2985 
2986  // Skip the arguments adaptor frame if it exists.
2987  Label check_frame_marker;
2990  __ B(ne, &check_frame_marker);
2992 
2993  // Check the marker in the calling frame.
2994  __ Bind(&check_frame_marker);
2996  __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
2997  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2998  Split(eq, if_true, if_false, fall_through);
2999 
3000  context()->Plug(if_true, if_false);
3001 }
3002 
3003 
3004 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3005  ZoneList<Expression*>* args = expr->arguments();
3006  ASSERT(args->length() == 2);
3007 
3008  // Load the two objects into registers and perform the comparison.
3009  VisitForStackValue(args->at(0));
3010  VisitForAccumulatorValue(args->at(1));
3011 
3012  Label materialize_true, materialize_false;
3013  Label* if_true = NULL;
3014  Label* if_false = NULL;
3015  Label* fall_through = NULL;
3016  context()->PrepareTest(&materialize_true, &materialize_false,
3017  &if_true, &if_false, &fall_through);
3018 
3019  __ Pop(x1);
3020  __ Cmp(x0, x1);
3021  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3022  Split(eq, if_true, if_false, fall_through);
3023 
3024  context()->Plug(if_true, if_false);
3025 }
3026 
3027 
3028 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3029  ZoneList<Expression*>* args = expr->arguments();
3030  ASSERT(args->length() == 1);
3031 
3032  // ArgumentsAccessStub expects the key in x1.
3033  VisitForAccumulatorValue(args->at(0));
3034  __ Mov(x1, x0);
3035  __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3036  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3037  __ CallStub(&stub);
3038  context()->Plug(x0);
3039 }
3040 
3041 
3042 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3043  ASSERT(expr->arguments()->length() == 0);
3044  Label exit;
3045  // Get the number of formal parameters.
3046  __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3047 
3048  // Check if the calling frame is an arguments adaptor frame.
3052  __ B(ne, &exit);
3053 
3054  // Arguments adaptor case: Read the arguments length from the
3055  // adaptor frame.
3057 
3058  __ Bind(&exit);
3059  context()->Plug(x0);
3060 }
3061 
3062 
3063 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3064  ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3065  ZoneList<Expression*>* args = expr->arguments();
3066  ASSERT(args->length() == 1);
3067  Label done, null, function, non_function_constructor;
3068 
3069  VisitForAccumulatorValue(args->at(0));
3070 
3071  // If the object is a smi, we return null.
3072  __ JumpIfSmi(x0, &null);
3073 
3074  // Check that the object is a JS object but take special care of JS
3075  // functions to make sure they have 'Function' as their class.
3076  // Assume that there are only two callable types, and one of them is at
3077  // either end of the type range for JS object types. Saves extra comparisons.
3079  __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3080  // x10: object's map.
3081  // x11: object's type.
3082  __ B(lt, &null);
3085  __ B(eq, &function);
3086 
3087  __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
3089  LAST_SPEC_OBJECT_TYPE - 1);
3090  __ B(eq, &function);
3091  // Assume that there is no larger type.
3093 
3094  // Check if the constructor in the map is a JS function.
3095  __ Ldr(x12, FieldMemOperand(x10, Map::kConstructorOffset));
3096  __ JumpIfNotObjectType(x12, x13, x14, JS_FUNCTION_TYPE,
3097  &non_function_constructor);
3098 
3099  // x12 now contains the constructor function. Grab the
3100  // instance class name from there.
3102  __ Ldr(x0,
3104  __ B(&done);
3105 
3106  // Functions have class 'Function'.
3107  __ Bind(&function);
3108  __ LoadRoot(x0, Heap::kfunction_class_stringRootIndex);
3109  __ B(&done);
3110 
3111  // Objects with a non-function constructor have class 'Object'.
3112  __ Bind(&non_function_constructor);
3113  __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3114  __ B(&done);
3115 
3116  // Non-JS objects have class null.
3117  __ Bind(&null);
3118  __ LoadRoot(x0, Heap::kNullValueRootIndex);
3119 
3120  // All done.
3121  __ Bind(&done);
3122 
3123  context()->Plug(x0);
3124 }
3125 
3126 
3127 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3128  // Conditionally generate a log call.
3129  // Args:
3130  // 0 (literal string): The type of logging (corresponds to the flags).
3131  // This is used to determine whether or not to generate the log call.
3132  // 1 (string): Format string. Access the string at argument index 2
3133  // with '%2s' (see Logger::LogRuntime for all the formats).
3134  // 2 (array): Arguments to the format string.
3135  ZoneList<Expression*>* args = expr->arguments();
3136  ASSERT_EQ(args->length(), 3);
3137  if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3138  VisitForStackValue(args->at(1));
3139  VisitForStackValue(args->at(2));
3140  __ CallRuntime(Runtime::kHiddenLog, 2);
3141  }
3142 
3143  // Finally, we're expected to leave a value on the top of the stack.
3144  __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3145  context()->Plug(x0);
3146 }
3147 
3148 
3149 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3150  // Load the arguments on the stack and call the stub.
3151  SubStringStub stub;
3152  ZoneList<Expression*>* args = expr->arguments();
3153  ASSERT(args->length() == 3);
3154  VisitForStackValue(args->at(0));
3155  VisitForStackValue(args->at(1));
3156  VisitForStackValue(args->at(2));
3157  __ CallStub(&stub);
3158  context()->Plug(x0);
3159 }
3160 
3161 
3162 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3163  // Load the arguments on the stack and call the stub.
3164  RegExpExecStub stub;
3165  ZoneList<Expression*>* args = expr->arguments();
3166  ASSERT(args->length() == 4);
3167  VisitForStackValue(args->at(0));
3168  VisitForStackValue(args->at(1));
3169  VisitForStackValue(args->at(2));
3170  VisitForStackValue(args->at(3));
3171  __ CallStub(&stub);
3172  context()->Plug(x0);
3173 }
3174 
3175 
3176 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3177  ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3178  ZoneList<Expression*>* args = expr->arguments();
3179  ASSERT(args->length() == 1);
3180  VisitForAccumulatorValue(args->at(0)); // Load the object.
3181 
3182  Label done;
3183  // If the object is a smi return the object.
3184  __ JumpIfSmi(x0, &done);
3185  // If the object is not a value type, return the object.
3186  __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3188 
3189  __ Bind(&done);
3190  context()->Plug(x0);
3191 }
3192 
3193 
3194 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3195  ZoneList<Expression*>* args = expr->arguments();
3196  ASSERT(args->length() == 2);
3197  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3198  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3199 
3200  VisitForAccumulatorValue(args->at(0)); // Load the object.
3201 
3202  Label runtime, done, not_date_object;
3203  Register object = x0;
3204  Register result = x0;
3205  Register stamp_addr = x10;
3206  Register stamp_cache = x11;
3207 
3208  __ JumpIfSmi(object, &not_date_object);
3209  __ JumpIfNotObjectType(object, x10, x10, JS_DATE_TYPE, &not_date_object);
3210 
3211  if (index->value() == 0) {
3212  __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3213  __ B(&done);
3214  } else {
3215  if (index->value() < JSDate::kFirstUncachedField) {
3216  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3217  __ Mov(x10, stamp);
3218  __ Ldr(stamp_addr, MemOperand(x10));
3219  __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
3220  __ Cmp(stamp_addr, stamp_cache);
3221  __ B(ne, &runtime);
3222  __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3223  kPointerSize * index->value()));
3224  __ B(&done);
3225  }
3226 
3227  __ Bind(&runtime);
3228  __ Mov(x1, index);
3229  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3230  __ B(&done);
3231  }
3232 
3233  __ Bind(&not_date_object);
3234  __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3235  __ Bind(&done);
3236  context()->Plug(x0);
3237 }
3238 
3239 
3240 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3241  ZoneList<Expression*>* args = expr->arguments();
3242  ASSERT_EQ(3, args->length());
3243 
3244  Register string = x0;
3245  Register index = x1;
3246  Register value = x2;
3247  Register scratch = x10;
3248 
3249  VisitForStackValue(args->at(1)); // index
3250  VisitForStackValue(args->at(2)); // value
3251  VisitForAccumulatorValue(args->at(0)); // string
3252  __ Pop(value, index);
3253 
3254  if (FLAG_debug_code) {
3255  __ AssertSmi(value, kNonSmiValue);
3256  __ AssertSmi(index, kNonSmiIndex);
3257  static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3258  __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3259  one_byte_seq_type);
3260  }
3261 
3262  __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3263  __ SmiUntag(value);
3264  __ SmiUntag(index);
3265  __ Strb(value, MemOperand(scratch, index));
3266  context()->Plug(string);
3267 }
3268 
3269 
3270 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3271  ZoneList<Expression*>* args = expr->arguments();
3272  ASSERT_EQ(3, args->length());
3273 
3274  Register string = x0;
3275  Register index = x1;
3276  Register value = x2;
3277  Register scratch = x10;
3278 
3279  VisitForStackValue(args->at(1)); // index
3280  VisitForStackValue(args->at(2)); // value
3281  VisitForAccumulatorValue(args->at(0)); // string
3282  __ Pop(value, index);
3283 
3284  if (FLAG_debug_code) {
3285  __ AssertSmi(value, kNonSmiValue);
3286  __ AssertSmi(index, kNonSmiIndex);
3287  static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3288  __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3289  two_byte_seq_type);
3290  }
3291 
3292  __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3293  __ SmiUntag(value);
3294  __ SmiUntag(index);
3295  __ Strh(value, MemOperand(scratch, index, LSL, 1));
3296  context()->Plug(string);
3297 }
3298 
3299 
3300 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3301  // Load the arguments on the stack and call the MathPow stub.
3302  ZoneList<Expression*>* args = expr->arguments();
3303  ASSERT(args->length() == 2);
3304  VisitForStackValue(args->at(0));
3305  VisitForStackValue(args->at(1));
3306  MathPowStub stub(MathPowStub::ON_STACK);
3307  __ CallStub(&stub);
3308  context()->Plug(x0);
3309 }
3310 
3311 
3312 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3313  ZoneList<Expression*>* args = expr->arguments();
3314  ASSERT(args->length() == 2);
3315  VisitForStackValue(args->at(0)); // Load the object.
3316  VisitForAccumulatorValue(args->at(1)); // Load the value.
3317  __ Pop(x1);
3318  // x0 = value.
3319  // x1 = object.
3320 
3321  Label done;
3322  // If the object is a smi, return the value.
3323  __ JumpIfSmi(x1, &done);
3324 
3325  // If the object is not a value type, return the value.
3326  __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3327 
3328  // Store the value.
3330  // Update the write barrier. Save the value as it will be
3331  // overwritten by the write barrier code and is needed afterward.
3332  __ Mov(x10, x0);
3333  __ RecordWriteField(
3335 
3336  __ Bind(&done);
3337  context()->Plug(x0);
3338 }
3339 
3340 
3341 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3342  ZoneList<Expression*>* args = expr->arguments();
3343  ASSERT_EQ(args->length(), 1);
3344 
3345  // Load the argument into x0 and call the stub.
3346  VisitForAccumulatorValue(args->at(0));
3347 
3348  NumberToStringStub stub;
3349  __ CallStub(&stub);
3350  context()->Plug(x0);
3351 }
3352 
3353 
3354 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3355  ZoneList<Expression*>* args = expr->arguments();
3356  ASSERT(args->length() == 1);
3357 
3358  VisitForAccumulatorValue(args->at(0));
3359 
3360  Label done;
3361  Register code = x0;
3362  Register result = x1;
3363 
3364  StringCharFromCodeGenerator generator(code, result);
3365  generator.GenerateFast(masm_);
3366  __ B(&done);
3367 
3368  NopRuntimeCallHelper call_helper;
3369  generator.GenerateSlow(masm_, call_helper);
3370 
3371  __ Bind(&done);
3372  context()->Plug(result);
3373 }
3374 
3375 
3376 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3377  ZoneList<Expression*>* args = expr->arguments();
3378  ASSERT(args->length() == 2);
3379 
3380  VisitForStackValue(args->at(0));
3381  VisitForAccumulatorValue(args->at(1));
3382 
3383  Register object = x1;
3384  Register index = x0;
3385  Register result = x3;
3386 
3387  __ Pop(object);
3388 
3389  Label need_conversion;
3390  Label index_out_of_range;
3391  Label done;
3392  StringCharCodeAtGenerator generator(object,
3393  index,
3394  result,
3395  &need_conversion,
3396  &need_conversion,
3397  &index_out_of_range,
3399  generator.GenerateFast(masm_);
3400  __ B(&done);
3401 
3402  __ Bind(&index_out_of_range);
3403  // When the index is out of range, the spec requires us to return NaN.
3404  __ LoadRoot(result, Heap::kNanValueRootIndex);
3405  __ B(&done);
3406 
3407  __ Bind(&need_conversion);
3408  // Load the undefined value into the result register, which will
3409  // trigger conversion.
3410  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3411  __ B(&done);
3412 
3413  NopRuntimeCallHelper call_helper;
3414  generator.GenerateSlow(masm_, call_helper);
3415 
3416  __ Bind(&done);
3417  context()->Plug(result);
3418 }
3419 
3420 
3421 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3422  ZoneList<Expression*>* args = expr->arguments();
3423  ASSERT(args->length() == 2);
3424 
3425  VisitForStackValue(args->at(0));
3426  VisitForAccumulatorValue(args->at(1));
3427 
3428  Register object = x1;
3429  Register index = x0;
3430  Register result = x0;
3431 
3432  __ Pop(object);
3433 
3434  Label need_conversion;
3435  Label index_out_of_range;
3436  Label done;
3437  StringCharAtGenerator generator(object,
3438  index,
3439  x3,
3440  result,
3441  &need_conversion,
3442  &need_conversion,
3443  &index_out_of_range,
3445  generator.GenerateFast(masm_);
3446  __ B(&done);
3447 
3448  __ Bind(&index_out_of_range);
3449  // When the index is out of range, the spec requires us to return
3450  // the empty string.
3451  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3452  __ B(&done);
3453 
3454  __ Bind(&need_conversion);
3455  // Move smi zero into the result register, which will trigger conversion.
3456  __ Mov(result, Smi::FromInt(0));
3457  __ B(&done);
3458 
3459  NopRuntimeCallHelper call_helper;
3460  generator.GenerateSlow(masm_, call_helper);
3461 
3462  __ Bind(&done);
3463  context()->Plug(result);
3464 }
3465 
3466 
3467 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3468  ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3469  ZoneList<Expression*>* args = expr->arguments();
3470  ASSERT_EQ(2, args->length());
3471 
3472  VisitForStackValue(args->at(0));
3473  VisitForAccumulatorValue(args->at(1));
3474 
3475  __ Pop(x1);
3476  StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
3477  __ CallStub(&stub);
3478 
3479  context()->Plug(x0);
3480 }
3481 
3482 
3483 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3484  ZoneList<Expression*>* args = expr->arguments();
3485  ASSERT_EQ(2, args->length());
3486  VisitForStackValue(args->at(0));
3487  VisitForStackValue(args->at(1));
3488 
3489  StringCompareStub stub;
3490  __ CallStub(&stub);
3491  context()->Plug(x0);
3492 }
3493 
3494 
3495 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3496  // Load the argument on the stack and call the runtime function.
3497  ZoneList<Expression*>* args = expr->arguments();
3498  ASSERT(args->length() == 1);
3499  VisitForStackValue(args->at(0));
3500  __ CallRuntime(Runtime::kMath_log, 1);
3501  context()->Plug(x0);
3502 }
3503 
3504 
3505 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3506  // Load the argument on the stack and call the runtime function.
3507  ZoneList<Expression*>* args = expr->arguments();
3508  ASSERT(args->length() == 1);
3509  VisitForStackValue(args->at(0));
3510  __ CallRuntime(Runtime::kMath_sqrt, 1);
3511  context()->Plug(x0);
3512 }
3513 
3514 
3515 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3516  ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3517  ZoneList<Expression*>* args = expr->arguments();
3518  ASSERT(args->length() >= 2);
3519 
3520  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3521  for (int i = 0; i < arg_count + 1; i++) {
3522  VisitForStackValue(args->at(i));
3523  }
3524  VisitForAccumulatorValue(args->last()); // Function.
3525 
3526  Label runtime, done;
3527  // Check for non-function argument (including proxy).
3528  __ JumpIfSmi(x0, &runtime);
3529  __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
3530 
3531  // InvokeFunction requires the function in x1. Move it in there.
3532  __ Mov(x1, x0);
3533  ParameterCount count(arg_count);
3534  __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper());
3536  __ B(&done);
3537 
3538  __ Bind(&runtime);
3539  __ Push(x0);
3540  __ CallRuntime(Runtime::kCall, args->length());
3541  __ Bind(&done);
3542 
3543  context()->Plug(x0);
3544 }
3545 
3546 
3547 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3548  RegExpConstructResultStub stub;
3549  ZoneList<Expression*>* args = expr->arguments();
3550  ASSERT(args->length() == 3);
3551  VisitForStackValue(args->at(0));
3552  VisitForStackValue(args->at(1));
3553  VisitForAccumulatorValue(args->at(2));
3554  __ Pop(x1, x2);
3555  __ CallStub(&stub);
3556  context()->Plug(x0);
3557 }
3558 
3559 
3560 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3561  ZoneList<Expression*>* args = expr->arguments();
3562  ASSERT_EQ(2, args->length());
3563  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3564  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3565 
3566  Handle<FixedArray> jsfunction_result_caches(
3567  isolate()->native_context()->jsfunction_result_caches());
3568  if (jsfunction_result_caches->length() <= cache_id) {
3569  __ Abort(kAttemptToUseUndefinedCache);
3570  __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3571  context()->Plug(x0);
3572  return;
3573  }
3574 
3575  VisitForAccumulatorValue(args->at(1));
3576 
3577  Register key = x0;
3578  Register cache = x1;
3579  __ Ldr(cache, GlobalObjectMemOperand());
3581  __ Ldr(cache, ContextMemOperand(cache,
3583  __ Ldr(cache,
3585 
3586  Label done;
3587  __ Ldrsw(x2, UntagSmiFieldMemOperand(cache,
3589  __ Add(x3, cache, FixedArray::kHeaderSize - kHeapObjectTag);
3590  __ Add(x3, x3, Operand(x2, LSL, kPointerSizeLog2));
3591 
3592  // Load the key and data from the cache.
3593  __ Ldp(x2, x3, MemOperand(x3));
3594 
3595  __ Cmp(key, x2);
3596  __ CmovX(x0, x3, eq);
3597  __ B(eq, &done);
3598 
3599  // Call runtime to perform the lookup.
3600  __ Push(cache, key);
3601  __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3602 
3603  __ Bind(&done);
3604  context()->Plug(x0);
3605 }
3606 
3607 
3608 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3609  ZoneList<Expression*>* args = expr->arguments();
3610  VisitForAccumulatorValue(args->at(0));
3611 
3612  Label materialize_true, materialize_false;
3613  Label* if_true = NULL;
3614  Label* if_false = NULL;
3615  Label* fall_through = NULL;
3616  context()->PrepareTest(&materialize_true, &materialize_false,
3617  &if_true, &if_false, &fall_through);
3618 
3621  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3622  Split(eq, if_true, if_false, fall_through);
3623 
3624  context()->Plug(if_true, if_false);
3625 }
3626 
3627 
3628 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3629  ZoneList<Expression*>* args = expr->arguments();
3630  ASSERT(args->length() == 1);
3631  VisitForAccumulatorValue(args->at(0));
3632 
3633  __ AssertString(x0);
3634 
3636  __ IndexFromHash(x10, x0);
3637 
3638  context()->Plug(x0);
3639 }
3640 
3641 
3642 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3643  ASM_LOCATION("FullCodeGenerator::EmitFastAsciiArrayJoin");
3644 
3645  ZoneList<Expression*>* args = expr->arguments();
3646  ASSERT(args->length() == 2);
3647  VisitForStackValue(args->at(1));
3648  VisitForAccumulatorValue(args->at(0));
3649 
3650  Register array = x0;
3651  Register result = x0;
3652  Register elements = x1;
3653  Register element = x2;
3654  Register separator = x3;
3655  Register array_length = x4;
3656  Register result_pos = x5;
3657  Register map = x6;
3658  Register string_length = x10;
3659  Register elements_end = x11;
3660  Register string = x12;
3661  Register scratch1 = x13;
3662  Register scratch2 = x14;
3663  Register scratch3 = x7;
3664  Register separator_length = x15;
3665 
3666  Label bailout, done, one_char_separator, long_separator,
3667  non_trivial_array, not_size_one_array, loop,
3668  empty_separator_loop, one_char_separator_loop,
3669  one_char_separator_loop_entry, long_separator_loop;
3670 
3671  // The separator operand is on the stack.
3672  __ Pop(separator);
3673 
3674  // Check that the array is a JSArray.
3675  __ JumpIfSmi(array, &bailout);
3676  __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
3677 
3678  // Check that the array has fast elements.
3679  __ CheckFastElements(map, scratch1, &bailout);
3680 
3681  // If the array has length zero, return the empty string.
3682  // Load and untag the length of the array.
3683  // It is an unsigned value, so we can skip sign extension.
3684  // We assume little endianness.
3685  __ Ldrsw(array_length,
3687  __ Cbnz(array_length, &non_trivial_array);
3688  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3689  __ B(&done);
3690 
3691  __ Bind(&non_trivial_array);
3692  // Get the FixedArray containing array's elements.
3693  __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3694 
3695  // Check that all array elements are sequential ASCII strings, and
3696  // accumulate the sum of their lengths.
3697  __ Mov(string_length, 0);
3698  __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3699  __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3700  // Loop condition: while (element < elements_end).
3701  // Live values in registers:
3702  // elements: Fixed array of strings.
3703  // array_length: Length of the fixed array of strings (not smi)
3704  // separator: Separator string
3705  // string_length: Accumulated sum of string lengths (not smi).
3706  // element: Current array element.
3707  // elements_end: Array end.
3708  if (FLAG_debug_code) {
3709  __ Cmp(array_length, 0);
3710  __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3711  }
3712  __ Bind(&loop);
3713  __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3714  __ JumpIfSmi(string, &bailout);
3715  __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3716  __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3717  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3718  __ Ldrsw(scratch1,
3720  __ Adds(string_length, string_length, scratch1);
3721  __ B(vs, &bailout);
3722  __ Cmp(element, elements_end);
3723  __ B(lt, &loop);
3724 
3725  // If array_length is 1, return elements[0], a string.
3726  __ Cmp(array_length, 1);
3727  __ B(ne, &not_size_one_array);
3728  __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
3729  __ B(&done);
3730 
3731  __ Bind(&not_size_one_array);
3732 
3733  // Live values in registers:
3734  // separator: Separator string
3735  // array_length: Length of the array (not smi).
3736  // string_length: Sum of string lengths (not smi).
3737  // elements: FixedArray of strings.
3738 
3739  // Check that the separator is a flat ASCII string.
3740  __ JumpIfSmi(separator, &bailout);
3741  __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3742  __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3743  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3744 
3745  // Add (separator length times array_length) - separator length to the
3746  // string_length to get the length of the result string.
3747  // Load the separator length as untagged.
3748  // We assume little endianness, and that the length is positive.
3749  __ Ldrsw(separator_length,
3750  UntagSmiFieldMemOperand(separator,
3752  __ Sub(string_length, string_length, separator_length);
3753  __ Umaddl(string_length, array_length.W(), separator_length.W(),
3754  string_length);
3755 
3756  // Get first element in the array.
3757  __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3758  // Live values in registers:
3759  // element: First array element
3760  // separator: Separator string
3761  // string_length: Length of result string (not smi)
3762  // array_length: Length of the array (not smi).
3763  __ AllocateAsciiString(result, string_length, scratch1, scratch2, scratch3,
3764  &bailout);
3765 
3766  // Prepare for looping. Set up elements_end to end of the array. Set
3767  // result_pos to the position of the result where to write the first
3768  // character.
3769  // TODO(all): useless unless AllocateAsciiString trashes the register.
3770  __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3771  __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3772 
3773  // Check the length of the separator.
3774  __ Cmp(separator_length, 1);
3775  __ B(eq, &one_char_separator);
3776  __ B(gt, &long_separator);
3777 
3778  // Empty separator case
3779  __ Bind(&empty_separator_loop);
3780  // Live values in registers:
3781  // result_pos: the position to which we are currently copying characters.
3782  // element: Current array element.
3783  // elements_end: Array end.
3784 
3785  // Copy next array element to the result.
3786  __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3787  __ Ldrsw(string_length,
3789  __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3790  __ CopyBytes(result_pos, string, string_length, scratch1);
3791  __ Cmp(element, elements_end);
3792  __ B(lt, &empty_separator_loop); // End while (element < elements_end).
3793  __ B(&done);
3794 
3795  // One-character separator case
3796  __ Bind(&one_char_separator);
3797  // Replace separator with its ASCII character value.
3798  __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
3799  // Jump into the loop after the code that copies the separator, so the first
3800  // element is not preceded by a separator
3801  __ B(&one_char_separator_loop_entry);
3802 
3803  __ Bind(&one_char_separator_loop);
3804  // Live values in registers:
3805  // result_pos: the position to which we are currently copying characters.
3806  // element: Current array element.
3807  // elements_end: Array end.
3808  // separator: Single separator ASCII char (in lower byte).
3809 
3810  // Copy the separator character to the result.
3811  __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
3812 
3813  // Copy next array element to the result.
3814  __ Bind(&one_char_separator_loop_entry);
3815  __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3816  __ Ldrsw(string_length,
3818  __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3819  __ CopyBytes(result_pos, string, string_length, scratch1);
3820  __ Cmp(element, elements_end);
3821  __ B(lt, &one_char_separator_loop); // End while (element < elements_end).
3822  __ B(&done);
3823 
3824  // Long separator case (separator is more than one character). Entry is at the
3825  // label long_separator below.
3826  __ Bind(&long_separator_loop);
3827  // Live values in registers:
3828  // result_pos: the position to which we are currently copying characters.
3829  // element: Current array element.
3830  // elements_end: Array end.
3831  // separator: Separator string.
3832 
3833  // Copy the separator to the result.
3834  // TODO(all): hoist next two instructions.
3835  __ Ldrsw(string_length,
3837  __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3838  __ CopyBytes(result_pos, string, string_length, scratch1);
3839 
3840  __ Bind(&long_separator);
3841  __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3842  __ Ldrsw(string_length,
3844  __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3845  __ CopyBytes(result_pos, string, string_length, scratch1);
3846  __ Cmp(element, elements_end);
3847  __ B(lt, &long_separator_loop); // End while (element < elements_end).
3848  __ B(&done);
3849 
3850  __ Bind(&bailout);
3851  // Returning undefined will force slower code to handle it.
3852  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3853  __ Bind(&done);
3854  context()->Plug(result);
3855 }
3856 
3857 
3858 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3859  if (expr->function() != NULL &&
3860  expr->function()->intrinsic_type == Runtime::INLINE) {
3861  Comment cmnt(masm_, "[ InlineRuntimeCall");
3862  EmitInlineRuntimeCall(expr);
3863  return;
3864  }
3865 
3866  Comment cmnt(masm_, "[ CallRunTime");
3867  ZoneList<Expression*>* args = expr->arguments();
3868  int arg_count = args->length();
3869 
3870  if (expr->is_jsruntime()) {
3871  // Push the builtins object as the receiver.
3872  __ Ldr(x10, GlobalObjectMemOperand());
3874  __ Push(x0);
3875 
3876  // Load the function from the receiver.
3877  Handle<String> name = expr->name();
3878  __ Mov(x2, Operand(name));
3879  CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
3880 
3881  // Push the target function under the receiver.
3882  __ Pop(x10);
3883  __ Push(x0, x10);
3884 
3885  int arg_count = args->length();
3886  for (int i = 0; i < arg_count; i++) {
3887  VisitForStackValue(args->at(i));
3888  }
3889 
3890  // Record source position of the IC call.
3891  SetSourcePosition(expr->position());
3892  CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
3893  __ Peek(x1, (arg_count + 1) * kPointerSize);
3894  __ CallStub(&stub);
3895 
3896  // Restore context register.
3898 
3899  context()->DropAndPlug(1, x0);
3900  } else {
3901  // Push the arguments ("left-to-right").
3902  for (int i = 0; i < arg_count; i++) {
3903  VisitForStackValue(args->at(i));
3904  }
3905 
3906  // Call the C runtime function.
3907  __ CallRuntime(expr->function(), arg_count);
3908  context()->Plug(x0);
3909  }
3910 }
3911 
3912 
3913 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3914  switch (expr->op()) {
3915  case Token::DELETE: {
3916  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3917  Property* property = expr->expression()->AsProperty();
3918  VariableProxy* proxy = expr->expression()->AsVariableProxy();
3919 
3920  if (property != NULL) {
3921  VisitForStackValue(property->obj());
3922  VisitForStackValue(property->key());
3923  __ Mov(x10, Smi::FromInt(strict_mode()));
3924  __ Push(x10);
3925  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3926  context()->Plug(x0);
3927  } else if (proxy != NULL) {
3928  Variable* var = proxy->var();
3929  // Delete of an unqualified identifier is disallowed in strict mode
3930  // but "delete this" is allowed.
3931  ASSERT(strict_mode() == SLOPPY || var->is_this());
3932  if (var->IsUnallocated()) {
3933  __ Ldr(x12, GlobalObjectMemOperand());
3934  __ Mov(x11, Operand(var->name()));
3935  __ Mov(x10, Smi::FromInt(SLOPPY));
3936  __ Push(x12, x11, x10);
3937  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3938  context()->Plug(x0);
3939  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3940  // Result of deleting non-global, non-dynamic variables is false.
3941  // The subexpression does not have side effects.
3942  context()->Plug(var->is_this());
3943  } else {
3944  // Non-global variable. Call the runtime to try to delete from the
3945  // context where the variable was introduced.
3946  __ Mov(x2, Operand(var->name()));
3947  __ Push(context_register(), x2);
3948  __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
3949  context()->Plug(x0);
3950  }
3951  } else {
3952  // Result of deleting non-property, non-variable reference is true.
3953  // The subexpression may have side effects.
3954  VisitForEffect(expr->expression());
3955  context()->Plug(true);
3956  }
3957  break;
3958  break;
3959  }
3960  case Token::VOID: {
3961  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3962  VisitForEffect(expr->expression());
3963  context()->Plug(Heap::kUndefinedValueRootIndex);
3964  break;
3965  }
3966  case Token::NOT: {
3967  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3968  if (context()->IsEffect()) {
3969  // Unary NOT has no side effects so it's only necessary to visit the
3970  // subexpression. Match the optimizing compiler by not branching.
3971  VisitForEffect(expr->expression());
3972  } else if (context()->IsTest()) {
3973  const TestContext* test = TestContext::cast(context());
3974  // The labels are swapped for the recursive call.
3975  VisitForControl(expr->expression(),
3976  test->false_label(),
3977  test->true_label(),
3978  test->fall_through());
3979  context()->Plug(test->true_label(), test->false_label());
3980  } else {
3981  ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3982  // TODO(jbramley): This could be much more efficient using (for
3983  // example) the CSEL instruction.
3984  Label materialize_true, materialize_false, done;
3985  VisitForControl(expr->expression(),
3986  &materialize_false,
3987  &materialize_true,
3988  &materialize_true);
3989 
3990  __ Bind(&materialize_true);
3991  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3992  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
3993  __ B(&done);
3994 
3995  __ Bind(&materialize_false);
3996  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3997  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
3998  __ B(&done);
3999 
4000  __ Bind(&done);
4001  if (context()->IsStackValue()) {
4002  __ Push(result_register());
4003  }
4004  }
4005  break;
4006  }
4007  case Token::TYPEOF: {
4008  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4009  {
4010  StackValueContext context(this);
4011  VisitForTypeofValue(expr->expression());
4012  }
4013  __ CallRuntime(Runtime::kTypeof, 1);
4014  context()->Plug(x0);
4015  break;
4016  }
4017  default:
4018  UNREACHABLE();
4019  }
4020 }
4021 
4022 
4023 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4024  ASSERT(expr->expression()->IsValidLeftHandSide());
4025 
4026  Comment cmnt(masm_, "[ CountOperation");
4027  SetSourcePosition(expr->position());
4028 
4029  // Expression can only be a property, a global or a (parameter or local)
4030  // slot.
4031  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4032  LhsKind assign_type = VARIABLE;
4033  Property* prop = expr->expression()->AsProperty();
4034  // In case of a property we use the uninitialized expression context
4035  // of the key to detect a named property.
4036  if (prop != NULL) {
4037  assign_type =
4038  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4039  }
4040 
4041  // Evaluate expression and get value.
4042  if (assign_type == VARIABLE) {
4043  ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4044  AccumulatorValueContext context(this);
4045  EmitVariableLoad(expr->expression()->AsVariableProxy());
4046  } else {
4047  // Reserve space for result of postfix operation.
4048  if (expr->is_postfix() && !context()->IsEffect()) {
4049  __ Push(xzr);
4050  }
4051  if (assign_type == NAMED_PROPERTY) {
4052  // Put the object both on the stack and in the accumulator.
4053  VisitForAccumulatorValue(prop->obj());
4054  __ Push(x0);
4055  EmitNamedPropertyLoad(prop);
4056  } else {
4057  // KEYED_PROPERTY
4058  VisitForStackValue(prop->obj());
4059  VisitForAccumulatorValue(prop->key());
4060  __ Peek(x1, 0);
4061  __ Push(x0);
4062  EmitKeyedPropertyLoad(prop);
4063  }
4064  }
4065 
4066  // We need a second deoptimization point after loading the value
4067  // in case evaluating the property load my have a side effect.
4068  if (assign_type == VARIABLE) {
4069  PrepareForBailout(expr->expression(), TOS_REG);
4070  } else {
4071  PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4072  }
4073 
4074  // Inline smi case if we are in a loop.
4075  Label stub_call, done;
4076  JumpPatchSite patch_site(masm_);
4077 
4078  int count_value = expr->op() == Token::INC ? 1 : -1;
4079  if (ShouldInlineSmiCase(expr->op())) {
4080  Label slow;
4081  patch_site.EmitJumpIfNotSmi(x0, &slow);
4082 
4083  // Save result for postfix expressions.
4084  if (expr->is_postfix()) {
4085  if (!context()->IsEffect()) {
4086  // Save the result on the stack. If we have a named or keyed property we
4087  // store the result under the receiver that is currently on top of the
4088  // stack.
4089  switch (assign_type) {
4090  case VARIABLE:
4091  __ Push(x0);
4092  break;
4093  case NAMED_PROPERTY:
4094  __ Poke(x0, kPointerSize);
4095  break;
4096  case KEYED_PROPERTY:
4097  __ Poke(x0, kPointerSize * 2);
4098  break;
4099  }
4100  }
4101  }
4102 
4103  __ Adds(x0, x0, Smi::FromInt(count_value));
4104  __ B(vc, &done);
4105  // Call stub. Undo operation first.
4106  __ Sub(x0, x0, Smi::FromInt(count_value));
4107  __ B(&stub_call);
4108  __ Bind(&slow);
4109  }
4110  ToNumberStub convert_stub;
4111  __ CallStub(&convert_stub);
4112 
4113  // Save result for postfix expressions.
4114  if (expr->is_postfix()) {
4115  if (!context()->IsEffect()) {
4116  // Save the result on the stack. If we have a named or keyed property
4117  // we store the result under the receiver that is currently on top
4118  // of the stack.
4119  switch (assign_type) {
4120  case VARIABLE:
4121  __ Push(x0);
4122  break;
4123  case NAMED_PROPERTY:
4124  __ Poke(x0, kXRegSize);
4125  break;
4126  case KEYED_PROPERTY:
4127  __ Poke(x0, 2 * kXRegSize);
4128  break;
4129  }
4130  }
4131  }
4132 
4133  __ Bind(&stub_call);
4134  __ Mov(x1, x0);
4135  __ Mov(x0, Smi::FromInt(count_value));
4136 
4137  // Record position before stub call.
4138  SetSourcePosition(expr->position());
4139 
4140  {
4141  Assembler::BlockPoolsScope scope(masm_);
4142  BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
4143  CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4144  patch_site.EmitPatchInfo();
4145  }
4146  __ Bind(&done);
4147 
4148  // Store the value returned in x0.
4149  switch (assign_type) {
4150  case VARIABLE:
4151  if (expr->is_postfix()) {
4152  { EffectContext context(this);
4153  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4154  Token::ASSIGN);
4155  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4156  context.Plug(x0);
4157  }
4158  // For all contexts except EffectConstant We have the result on
4159  // top of the stack.
4160  if (!context()->IsEffect()) {
4161  context()->PlugTOS();
4162  }
4163  } else {
4164  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4165  Token::ASSIGN);
4166  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4167  context()->Plug(x0);
4168  }
4169  break;
4170  case NAMED_PROPERTY: {
4171  __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
4172  __ Pop(x1);
4173  CallStoreIC(expr->CountStoreFeedbackId());
4174  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4175  if (expr->is_postfix()) {
4176  if (!context()->IsEffect()) {
4177  context()->PlugTOS();
4178  }
4179  } else {
4180  context()->Plug(x0);
4181  }
4182  break;
4183  }
4184  case KEYED_PROPERTY: {
4185  __ Pop(x1); // Key.
4186  __ Pop(x2); // Receiver.
4187  Handle<Code> ic = strict_mode() == SLOPPY
4188  ? isolate()->builtins()->KeyedStoreIC_Initialize()
4189  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4190  CallIC(ic, expr->CountStoreFeedbackId());
4191  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4192  if (expr->is_postfix()) {
4193  if (!context()->IsEffect()) {
4194  context()->PlugTOS();
4195  }
4196  } else {
4197  context()->Plug(x0);
4198  }
4199  break;
4200  }
4201  }
4202 }
4203 
4204 
4205 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4206  ASSERT(!context()->IsEffect());
4207  ASSERT(!context()->IsTest());
4208  VariableProxy* proxy = expr->AsVariableProxy();
4209  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4210  Comment cmnt(masm_, "Global variable");
4211  __ Ldr(x0, GlobalObjectMemOperand());
4212  __ Mov(x2, Operand(proxy->name()));
4213  // Use a regular load, not a contextual load, to avoid a reference
4214  // error.
4215  CallLoadIC(NOT_CONTEXTUAL);
4216  PrepareForBailout(expr, TOS_REG);
4217  context()->Plug(x0);
4218  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4219  Label done, slow;
4220 
4221  // Generate code for loading from variables potentially shadowed
4222  // by eval-introduced variables.
4223  EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4224 
4225  __ Bind(&slow);
4226  __ Mov(x0, Operand(proxy->name()));
4227  __ Push(cp, x0);
4228  __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4229  PrepareForBailout(expr, TOS_REG);
4230  __ Bind(&done);
4231 
4232  context()->Plug(x0);
4233  } else {
4234  // This expression cannot throw a reference error at the top level.
4235  VisitInDuplicateContext(expr);
4236  }
4237 }
4238 
4239 
4240 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4241  Expression* sub_expr,
4242  Handle<String> check) {
4243  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4244  Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4245  Label materialize_true, materialize_false;
4246  Label* if_true = NULL;
4247  Label* if_false = NULL;
4248  Label* fall_through = NULL;
4249  context()->PrepareTest(&materialize_true, &materialize_false,
4250  &if_true, &if_false, &fall_through);
4251 
4252  { AccumulatorValueContext context(this);
4253  VisitForTypeofValue(sub_expr);
4254  }
4255  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4256 
4257  if (check->Equals(isolate()->heap()->number_string())) {
4258  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4259  __ JumpIfSmi(x0, if_true);
4261  __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4262  Split(eq, if_true, if_false, fall_through);
4263  } else if (check->Equals(isolate()->heap()->string_string())) {
4264  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4265  __ JumpIfSmi(x0, if_false);
4266  // Check for undetectable objects => false.
4267  __ JumpIfObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE, if_false, ge);
4268  __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4269  __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false,
4270  fall_through);
4271  } else if (check->Equals(isolate()->heap()->symbol_string())) {
4272  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4273  __ JumpIfSmi(x0, if_false);
4274  __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4275  Split(eq, if_true, if_false, fall_through);
4276  } else if (check->Equals(isolate()->heap()->boolean_string())) {
4277  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4278  __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4279  __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4280  Split(eq, if_true, if_false, fall_through);
4281  } else if (FLAG_harmony_typeof &&
4282  check->Equals(isolate()->heap()->null_string())) {
4283  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof null_string");
4284  __ CompareRoot(x0, Heap::kNullValueRootIndex);
4285  Split(eq, if_true, if_false, fall_through);
4286  } else if (check->Equals(isolate()->heap()->undefined_string())) {
4287  ASM_LOCATION(
4288  "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4289  __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4290  __ JumpIfSmi(x0, if_false);
4291  // Check for undetectable objects => true.
4293  __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4294  __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4295  fall_through);
4296  } else if (check->Equals(isolate()->heap()->function_string())) {
4297  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4298  __ JumpIfSmi(x0, if_false);
4300  __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true);
4301  __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false,
4302  fall_through);
4303 
4304  } else if (check->Equals(isolate()->heap()->object_string())) {
4305  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4306  __ JumpIfSmi(x0, if_false);
4307  if (!FLAG_harmony_typeof) {
4308  __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4309  }
4310  // Check for JS objects => true.
4311  Register map = x10;
4312  __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE,
4313  if_false, lt);
4314  __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4315  __ B(gt, if_false);
4316  // Check for undetectable objects => false.
4317  __ Ldrb(x10, FieldMemOperand(map, Map::kBitFieldOffset));
4318 
4319  __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false,
4320  fall_through);
4321 
4322  } else {
4323  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
4324  if (if_false != fall_through) __ B(if_false);
4325  }
4326  context()->Plug(if_true, if_false);
4327 }
4328 
4329 
4330 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4331  Comment cmnt(masm_, "[ CompareOperation");
4332  SetSourcePosition(expr->position());
4333 
4334  // Try to generate an optimized comparison with a literal value.
4335  // TODO(jbramley): This only checks common values like NaN or undefined.
4336  // Should it also handle ARM64 immediate operands?
4337  if (TryLiteralCompare(expr)) {
4338  return;
4339  }
4340 
4341  // Assign labels according to context()->PrepareTest.
4342  Label materialize_true;
4343  Label materialize_false;
4344  Label* if_true = NULL;
4345  Label* if_false = NULL;
4346  Label* fall_through = NULL;
4347  context()->PrepareTest(&materialize_true, &materialize_false,
4348  &if_true, &if_false, &fall_through);
4349 
4350  Token::Value op = expr->op();
4351  VisitForStackValue(expr->left());
4352  switch (op) {
4353  case Token::IN:
4354  VisitForStackValue(expr->right());
4355  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4356  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4357  __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4358  Split(eq, if_true, if_false, fall_through);
4359  break;
4360 
4361  case Token::INSTANCEOF: {
4362  VisitForStackValue(expr->right());
4363  InstanceofStub stub(InstanceofStub::kNoFlags);
4364  __ CallStub(&stub);
4365  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4366  // The stub returns 0 for true.
4367  __ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through);
4368  break;
4369  }
4370 
4371  default: {
4372  VisitForAccumulatorValue(expr->right());
4374 
4375  // Pop the stack value.
4376  __ Pop(x1);
4377 
4378  JumpPatchSite patch_site(masm_);
4379  if (ShouldInlineSmiCase(op)) {
4380  Label slow_case;
4381  patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4382  __ Cmp(x1, x0);
4383  Split(cond, if_true, if_false, NULL);
4384  __ Bind(&slow_case);
4385  }
4386 
4387  // Record position and call the compare IC.
4388  SetSourcePosition(expr->position());
4389  Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4390  CallIC(ic, expr->CompareOperationFeedbackId());
4391  patch_site.EmitPatchInfo();
4392  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4393  __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4394  }
4395  }
4396 
4397  // Convert the result of the comparison into one expected for this
4398  // expression's context.
4399  context()->Plug(if_true, if_false);
4400 }
4401 
4402 
4403 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4404  Expression* sub_expr,
4405  NilValue nil) {
4406  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
4407  Label materialize_true, materialize_false;
4408  Label* if_true = NULL;
4409  Label* if_false = NULL;
4410  Label* fall_through = NULL;
4411  context()->PrepareTest(&materialize_true, &materialize_false,
4412  &if_true, &if_false, &fall_through);
4413 
4414  VisitForAccumulatorValue(sub_expr);
4415  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4416 
4417  if (expr->op() == Token::EQ_STRICT) {
4418  Heap::RootListIndex nil_value = nil == kNullValue ?
4419  Heap::kNullValueRootIndex :
4420  Heap::kUndefinedValueRootIndex;
4421  __ CompareRoot(x0, nil_value);
4422  Split(eq, if_true, if_false, fall_through);
4423  } else {
4424  Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4425  CallIC(ic, expr->CompareOperationFeedbackId());
4426  __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
4427  }
4428 
4429  context()->Plug(if_true, if_false);
4430 }
4431 
4432 
4433 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4435  context()->Plug(x0);
4436 }
4437 
4438 
4439 void FullCodeGenerator::VisitYield(Yield* expr) {
4440  Comment cmnt(masm_, "[ Yield");
4441  // Evaluate yielded value first; the initial iterator definition depends on
4442  // this. It stays on the stack while we update the iterator.
4443  VisitForStackValue(expr->expression());
4444 
4445  // TODO(jbramley): Tidy this up once the merge is done, using named registers
4446  // and suchlike. The implementation changes a little by bleeding_edge so I
4447  // don't want to spend too much time on it now.
4448 
4449  switch (expr->yield_kind()) {
4450  case Yield::SUSPEND:
4451  // Pop value from top-of-stack slot; box result into result register.
4452  EmitCreateIteratorResult(false);
4453  __ Push(result_register());
4454  // Fall through.
4455  case Yield::INITIAL: {
4456  Label suspend, continuation, post_runtime, resume;
4457 
4458  __ B(&suspend);
4459 
4460  // TODO(jbramley): This label is bound here because the following code
4461  // looks at its pos(). Is it possible to do something more efficient here,
4462  // perhaps using Adr?
4463  __ Bind(&continuation);
4464  __ B(&resume);
4465 
4466  __ Bind(&suspend);
4467  VisitForAccumulatorValue(expr->generator_object());
4468  ASSERT((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
4469  __ Mov(x1, Smi::FromInt(continuation.pos()));
4472  __ Mov(x1, cp);
4473  __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4476  __ Cmp(__ StackPointer(), x1);
4477  __ B(eq, &post_runtime);
4478  __ Push(x0); // generator object
4479  __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
4481  __ Bind(&post_runtime);
4482  __ Pop(result_register());
4483  EmitReturnSequence();
4484 
4485  __ Bind(&resume);
4486  context()->Plug(result_register());
4487  break;
4488  }
4489 
4490  case Yield::FINAL: {
4491  VisitForAccumulatorValue(expr->generator_object());
4493  __ Str(x1, FieldMemOperand(result_register(),
4495  // Pop value from top-of-stack slot, box result into result register.
4496  EmitCreateIteratorResult(true);
4497  EmitUnwindBeforeReturn();
4498  EmitReturnSequence();
4499  break;
4500  }
4501 
4502  case Yield::DELEGATING: {
4503  VisitForStackValue(expr->generator_object());
4504 
4505  // Initial stack layout is as follows:
4506  // [sp + 1 * kPointerSize] iter
4507  // [sp + 0 * kPointerSize] g
4508 
4509  Label l_catch, l_try, l_suspend, l_continuation, l_resume;
4510  Label l_next, l_call, l_loop;
4511  // Initial send value is undefined.
4512  __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4513  __ B(&l_next);
4514 
4515  // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
4516  __ Bind(&l_catch);
4517  handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
4518  __ LoadRoot(x2, Heap::kthrow_stringRootIndex); // "throw"
4519  __ Peek(x3, 1 * kPointerSize); // iter
4520  __ Push(x2, x3, x0); // "throw", iter, except
4521  __ B(&l_call);
4522 
4523  // try { received = %yield result }
4524  // Shuffle the received result above a try handler and yield it without
4525  // re-boxing.
4526  __ Bind(&l_try);
4527  __ Pop(x0); // result
4528  __ PushTryHandler(StackHandler::CATCH, expr->index());
4529  const int handler_size = StackHandlerConstants::kSize;
4530  __ Push(x0); // result
4531  __ B(&l_suspend);
4532 
4533  // TODO(jbramley): This label is bound here because the following code
4534  // looks at its pos(). Is it possible to do something more efficient here,
4535  // perhaps using Adr?
4536  __ Bind(&l_continuation);
4537  __ B(&l_resume);
4538 
4539  __ Bind(&l_suspend);
4540  const int generator_object_depth = kPointerSize + handler_size;
4541  __ Peek(x0, generator_object_depth);
4542  __ Push(x0); // g
4543  ASSERT((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
4544  __ Mov(x1, Smi::FromInt(l_continuation.pos()));
4547  __ Mov(x1, cp);
4548  __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4550  __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
4552  __ Pop(x0); // result
4553  EmitReturnSequence();
4554  __ Bind(&l_resume); // received in x0
4555  __ PopTryHandler();
4556 
4557  // receiver = iter; f = 'next'; arg = received;
4558  __ Bind(&l_next);
4559  __ LoadRoot(x2, Heap::knext_stringRootIndex); // "next"
4560  __ Peek(x3, 1 * kPointerSize); // iter
4561  __ Push(x2, x3, x0); // "next", iter, received
4562 
4563  // result = receiver[f](arg);
4564  __ Bind(&l_call);
4565  __ Peek(x1, 1 * kPointerSize);
4566  __ Peek(x0, 2 * kPointerSize);
4567  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
4568  CallIC(ic, TypeFeedbackId::None());
4569  __ Mov(x1, x0);
4570  __ Poke(x1, 2 * kPointerSize);
4571  CallFunctionStub stub(1, CALL_AS_METHOD);
4572  __ CallStub(&stub);
4573 
4575  __ Drop(1); // The function is still on the stack; drop it.
4576 
4577  // if (!result.done) goto l_try;
4578  __ Bind(&l_loop);
4579  __ Push(x0); // save result
4580  __ LoadRoot(x2, Heap::kdone_stringRootIndex); // "done"
4581  CallLoadIC(NOT_CONTEXTUAL); // result.done in x0
4582  // The ToBooleanStub argument (result.done) is in x0.
4583  Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
4584  CallIC(bool_ic);
4585  __ Cbz(x0, &l_try);
4586 
4587  // result.value
4588  __ Pop(x0); // result
4589  __ LoadRoot(x2, Heap::kvalue_stringRootIndex); // "value"
4590  CallLoadIC(NOT_CONTEXTUAL); // result.value in x0
4591  context()->DropAndPlug(2, x0); // drop iter and g
4592  break;
4593  }
4594  }
4595 }
4596 
4597 
4598 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
4599  Expression *value,
4600  JSGeneratorObject::ResumeMode resume_mode) {
4601  ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
4602  Register value_reg = x0;
4603  Register generator_object = x1;
4604  Register the_hole = x2;
4605  Register operand_stack_size = w3;
4606  Register function = x4;
4607 
4608  // The value stays in x0, and is ultimately read by the resumed generator, as
4609  // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
4610  // is read to throw the value when the resumed generator is already closed. r1
4611  // will hold the generator object until the activation has been resumed.
4612  VisitForStackValue(generator);
4613  VisitForAccumulatorValue(value);
4614  __ Pop(generator_object);
4615 
4616  // Check generator state.
4617  Label wrong_state, closed_state, done;
4618  __ Ldr(x10, FieldMemOperand(generator_object,
4622  __ CompareAndBranch(x10, Smi::FromInt(0), eq, &closed_state);
4623  __ CompareAndBranch(x10, Smi::FromInt(0), lt, &wrong_state);
4624 
4625  // Load suspended function and context.
4626  __ Ldr(cp, FieldMemOperand(generator_object,
4628  __ Ldr(function, FieldMemOperand(generator_object,
4630 
4631  // Load receiver and store as the first argument.
4632  __ Ldr(x10, FieldMemOperand(generator_object,
4634  __ Push(x10);
4635 
4636  // Push holes for the rest of the arguments to the generator function.
4638 
4639  // The number of arguments is stored as an int32_t, and -1 is a marker
4640  // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
4641  // extension to correctly handle it. However, in this case, we operate on
4642  // 32-bit W registers, so extension isn't required.
4643  __ Ldr(w10, FieldMemOperand(x10,
4645  __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
4646  __ PushMultipleTimes(the_hole, w10);
4647 
4648  // Enter a new JavaScript frame, and initialize its slots as they were when
4649  // the generator was suspended.
4650  Label resume_frame;
4651  __ Bl(&resume_frame);
4652  __ B(&done);
4653 
4654  __ Bind(&resume_frame);
4655  __ Push(lr, // Return address.
4656  fp, // Caller's frame pointer.
4657  cp, // Callee's context.
4658  function); // Callee's JS Function.
4659  __ Add(fp, __ StackPointer(), kPointerSize * 2);
4660 
4661  // Load and untag the operand stack size.
4662  __ Ldr(x10, FieldMemOperand(generator_object,
4664  __ Ldr(operand_stack_size,
4666 
4667  // If we are sending a value and there is no operand stack, we can jump back
4668  // in directly.
4669  if (resume_mode == JSGeneratorObject::NEXT) {
4670  Label slow_resume;
4671  __ Cbnz(operand_stack_size, &slow_resume);
4672  __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
4673  __ Ldrsw(x11,
4674  UntagSmiFieldMemOperand(generator_object,
4676  __ Add(x10, x10, x11);
4678  __ Str(x12, FieldMemOperand(generator_object,
4680  __ Br(x10);
4681 
4682  __ Bind(&slow_resume);
4683  }
4684 
4685  // Otherwise, we push holes for the operand stack and call the runtime to fix
4686  // up the stack and the handlers.
4687  __ PushMultipleTimes(the_hole, operand_stack_size);
4688 
4689  __ Mov(x10, Smi::FromInt(resume_mode));
4690  __ Push(generator_object, result_register(), x10);
4691  __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
4692  // Not reached: the runtime call returns elsewhere.
4693  __ Unreachable();
4694 
4695  // Reach here when generator is closed.
4696  __ Bind(&closed_state);
4697  if (resume_mode == JSGeneratorObject::NEXT) {
4698  // Return completed iterator result when generator is closed.
4699  __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
4700  __ Push(x10);
4701  // Pop value from top-of-stack slot; box result into result register.
4702  EmitCreateIteratorResult(true);
4703  } else {
4704  // Throw the provided value.
4705  __ Push(value_reg);
4706  __ CallRuntime(Runtime::kHiddenThrow, 1);
4707  }
4708  __ B(&done);
4709 
4710  // Throw error if we attempt to operate on a running generator.
4711  __ Bind(&wrong_state);
4712  __ Push(generator_object);
4713  __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
4714 
4715  __ Bind(&done);
4716  context()->Plug(result_register());
4717 }
4718 
4719 
4720 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
4721  Label gc_required;
4722  Label allocated;
4723 
4724  Handle<Map> map(isolate()->native_context()->generator_result_map());
4725 
4726  // Allocate and populate an object with this form: { value: VAL, done: DONE }
4727 
4728  Register result = x0;
4729  __ Allocate(map->instance_size(), result, x10, x11, &gc_required, TAG_OBJECT);
4730  __ B(&allocated);
4731 
4732  __ Bind(&gc_required);
4733  __ Push(Smi::FromInt(map->instance_size()));
4734  __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
4735  __ Ldr(context_register(),
4737 
4738  __ Bind(&allocated);
4739  Register map_reg = x1;
4740  Register result_value = x2;
4741  Register boolean_done = x3;
4742  Register empty_fixed_array = x4;
4743  __ Mov(map_reg, Operand(map));
4744  __ Pop(result_value);
4745  __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
4746  __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
4747  ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
4748  // TODO(jbramley): Use Stp if possible.
4749  __ Str(map_reg, FieldMemOperand(result, HeapObject::kMapOffset));
4750  __ Str(empty_fixed_array,
4752  __ Str(empty_fixed_array, FieldMemOperand(result, JSObject::kElementsOffset));
4753  __ Str(result_value,
4754  FieldMemOperand(result,
4756  __ Str(boolean_done,
4757  FieldMemOperand(result,
4759 
4760  // Only the value field needs a write barrier, as the other values are in the
4761  // root set.
4762  __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset,
4763  x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
4764 }
4765 
4766 
4767 // TODO(all): I don't like this method.
4768 // It seems to me that in too many places x0 is used in place of this.
4769 // Also, this function is not suitable for all places where x0 should be
4770 // abstracted (eg. when used as an argument). But some places assume that the
4771 // first argument register is x0, and use this function instead.
4772 // Considering that most of the register allocation is hard-coded in the
4773 // FullCodeGen, that it is unlikely we will need to change it extensively, and
4774 // that abstracting the allocation through functions would not yield any
4775 // performance benefit, I think the existence of this function is debatable.
4776 Register FullCodeGenerator::result_register() {
4777  return x0;
4778 }
4779 
4780 
4781 Register FullCodeGenerator::context_register() {
4782  return cp;
4783 }
4784 
4785 
4786 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4787  ASSERT(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
4788  __ Str(value, MemOperand(fp, frame_offset));
4789 }
4790 
4791 
4792 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4793  __ Ldr(dst, ContextMemOperand(cp, context_index));
4794 }
4795 
4796 
4797 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4798  Scope* declaration_scope = scope()->DeclarationScope();
4799  if (declaration_scope->is_global_scope() ||
4800  declaration_scope->is_module_scope()) {
4801  // Contexts nested in the native context have a canonical empty function
4802  // as their closure, not the anonymous closure containing the global
4803  // code. Pass a smi sentinel and let the runtime look up the empty
4804  // function.
4805  ASSERT(kSmiTag == 0);
4806  __ Push(xzr);
4807  } else if (declaration_scope->is_eval_scope()) {
4808  // Contexts created by a call to eval have the same closure as the
4809  // context calling eval, not the anonymous closure containing the eval
4810  // code. Fetch it from the context.
4812  __ Push(x10);
4813  } else {
4814  ASSERT(declaration_scope->is_function_scope());
4816  __ Push(x10);
4817  }
4818 }
4819 
4820 
4821 void FullCodeGenerator::EnterFinallyBlock() {
4822  ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
4823  ASSERT(!result_register().is(x10));
4824  // Preserve the result register while executing finally block.
4825  // Also cook the return address in lr to the stack (smi encoded Code* delta).
4826  __ Sub(x10, lr, Operand(masm_->CodeObject()));
4827  __ SmiTag(x10);
4828  __ Push(result_register(), x10);
4829 
4830  // Store pending message while executing finally block.
4831  ExternalReference pending_message_obj =
4832  ExternalReference::address_of_pending_message_obj(isolate());
4833  __ Mov(x10, pending_message_obj);
4834  __ Ldr(x10, MemOperand(x10));
4835 
4836  ExternalReference has_pending_message =
4837  ExternalReference::address_of_has_pending_message(isolate());
4838  __ Mov(x11, has_pending_message);
4839  __ Ldr(x11, MemOperand(x11));
4840  __ SmiTag(x11);
4841 
4842  __ Push(x10, x11);
4843 
4844  ExternalReference pending_message_script =
4845  ExternalReference::address_of_pending_message_script(isolate());
4846  __ Mov(x10, pending_message_script);
4847  __ Ldr(x10, MemOperand(x10));
4848  __ Push(x10);
4849 }
4850 
4851 
4852 void FullCodeGenerator::ExitFinallyBlock() {
4853  ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
4854  ASSERT(!result_register().is(x10));
4855 
4856  // Restore pending message from stack.
4857  __ Pop(x10, x11, x12);
4858  ExternalReference pending_message_script =
4859  ExternalReference::address_of_pending_message_script(isolate());
4860  __ Mov(x13, pending_message_script);
4861  __ Str(x10, MemOperand(x13));
4862 
4863  __ SmiUntag(x11);
4864  ExternalReference has_pending_message =
4865  ExternalReference::address_of_has_pending_message(isolate());
4866  __ Mov(x13, has_pending_message);
4867  __ Str(x11, MemOperand(x13));
4868 
4869  ExternalReference pending_message_obj =
4870  ExternalReference::address_of_pending_message_obj(isolate());
4871  __ Mov(x13, pending_message_obj);
4872  __ Str(x12, MemOperand(x13));
4873 
4874  // Restore result register and cooked return address from the stack.
4875  __ Pop(x10, result_register());
4876 
4877  // Uncook the return address (see EnterFinallyBlock).
4878  __ SmiUntag(x10);
4879  __ Add(x11, x10, Operand(masm_->CodeObject()));
4880  __ Br(x11);
4881 }
4882 
4883 
4884 #undef __
4885 
4886 
4887 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4888  Address pc,
4889  BackEdgeState target_state,
4890  Code* replacement_code) {
4891  // Turn the jump into a nop.
4892  Address branch_address = pc - 3 * kInstructionSize;
4893  PatchingAssembler patcher(branch_address, 1);
4894 
4895  ASSERT(Instruction::Cast(branch_address)
4896  ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
4897  (Instruction::Cast(branch_address)->IsCondBranchImm() &&
4898  Instruction::Cast(branch_address)->ImmPCOffset() ==
4899  6 * kInstructionSize));
4900 
4901  switch (target_state) {
4902  case INTERRUPT:
4903  // <decrement profiling counter>
4904  // .. .. .. .. b.pl ok
4905  // .. .. .. .. ldr x16, pc+<interrupt stub address>
4906  // .. .. .. .. blr x16
4907  // ... more instructions.
4908  // ok-label
4909  // Jump offset is 6 instructions.
4910  patcher.b(6, pl);
4911  break;
4912  case ON_STACK_REPLACEMENT:
4913  case OSR_AFTER_STACK_CHECK:
4914  // <decrement profiling counter>
4915  // .. .. .. .. mov x0, x0 (NOP)
4916  // .. .. .. .. ldr x16, pc+<on-stack replacement address>
4917  // .. .. .. .. blr x16
4918  patcher.nop(Assembler::INTERRUPT_CODE_NOP);
4919  break;
4920  }
4921 
4922  // Replace the call address.
4923  Instruction* load = Instruction::Cast(pc)->preceding(2);
4924  Address interrupt_address_pointer =
4925  reinterpret_cast<Address>(load) + load->ImmPCOffset();
4926  ASSERT((Memory::uint64_at(interrupt_address_pointer) ==
4927  reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4928  ->builtins()
4929  ->OnStackReplacement()
4930  ->entry())) ||
4931  (Memory::uint64_at(interrupt_address_pointer) ==
4932  reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4933  ->builtins()
4934  ->InterruptCheck()
4935  ->entry())) ||
4936  (Memory::uint64_at(interrupt_address_pointer) ==
4937  reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4938  ->builtins()
4939  ->OsrAfterStackCheck()
4940  ->entry())) ||
4941  (Memory::uint64_at(interrupt_address_pointer) ==
4942  reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4943  ->builtins()
4944  ->OnStackReplacement()
4945  ->entry())));
4946  Memory::uint64_at(interrupt_address_pointer) =
4947  reinterpret_cast<uint64_t>(replacement_code->entry());
4948 
4949  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4950  unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
4951 }
4952 
4953 
4955  Isolate* isolate,
4956  Code* unoptimized_code,
4957  Address pc) {
4958  // TODO(jbramley): There should be some extra assertions here (as in the ARM
4959  // back-end), but this function is gone in bleeding_edge so it might not
4960  // matter anyway.
4961  Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
4962 
4963  if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
4964  Instruction* load = Instruction::Cast(pc)->preceding(2);
4965  uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
4966  load->ImmPCOffset());
4967  if (entry == reinterpret_cast<uint64_t>(
4968  isolate->builtins()->OnStackReplacement()->entry())) {
4969  return ON_STACK_REPLACEMENT;
4970  } else if (entry == reinterpret_cast<uint64_t>(
4971  isolate->builtins()->OsrAfterStackCheck()->entry())) {
4972  return OSR_AFTER_STACK_CHECK;
4973  } else {
4974  UNREACHABLE();
4975  }
4976  }
4977 
4978  return INTERRUPT;
4979 }
4980 
4981 
4982 #define __ ACCESS_MASM(masm())
4983 
4984 
4985 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4986  int* stack_depth,
4987  int* context_length) {
4988  ASM_LOCATION("FullCodeGenerator::TryFinally::Exit");
4989  // The macros used here must preserve the result register.
4990 
4991  // Because the handler block contains the context of the finally
4992  // code, we can restore it directly from there for the finally code
4993  // rather than iteratively unwinding contexts via their previous
4994  // links.
4995  __ Drop(*stack_depth); // Down to the handler block.
4996  if (*context_length > 0) {
4997  // Restore the context to its dedicated register and the stack.
5000  }
5001  __ PopTryHandler();
5002  __ Bl(finally_entry_);
5003 
5004  *stack_depth = 0;
5005  *context_length = 0;
5006  return previous_;
5007 }
5008 
5009 
5010 #undef __
5011 
5012 
5013 } } // namespace v8::internal
5014 
5015 #endif // V8_TARGET_ARCH_ARM64
static const int kFunctionOffset
Definition: objects.h:7324
byte * Address
Definition: globals.h:186
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
static const int kHashFieldOffset
Definition: objects.h:8629
static const int kBitFieldOffset
Definition: objects.h:6461
Scope * DeclarationScope()
Definition: scopes.cc:743
const intptr_t kSmiTagMask
Definition: v8.h:5480
static const int kForInFastCaseMarker
Definition: objects.h:8230
VariableDeclaration * function() const
Definition: scopes.h:326
static const int kCodeEntryOffset
Definition: objects.h:7518
static const int kValueOffset
Definition: objects.h:9547
static int SlotOffset(int index)
Definition: contexts.h:498
static const int kBuiltinsOffset
Definition: objects.h:7610
static Handle< Code > GetUninitialized(Isolate *isolate)
Definition: code-stubs.h:2385
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
static const int kEnumCacheOffset
Definition: objects.h:3499
static String * cast(Object *obj)
const uint32_t kTwoByteStringTag
Definition: objects.h:610
const Register cp
static const int kCallSizeWithRelocation
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
static const int kResultValuePropertyOffset
Definition: objects.h:7342
bool IsFastObjectElementsKind(ElementsKind kind)
#define ASM_LOCATION(message)
Definition: checks.h:69
static TypeFeedbackId None()
Definition: utils.h:1149
static Handle< Code > GetUninitialized(Isolate *isolate, Token::Value op)
Definition: ic.cc:2489
static const int kGlobalReceiverOffset
Definition: objects.h:7613
T Max(T a, T b)
Definition: utils.h:227
Scope * outer_scope() const
Definition: scopes.h:350
const unsigned kXRegSizeInBits
static const int kGeneratorClosed
Definition: objects.h:7321
static const unsigned int kContainsCachedArrayIndexMask
Definition: objects.h:8673
static const int kForInSlowCaseMarker
Definition: objects.h:8231
static bool enabled()
Definition: serialize.h:485
static void Emit(MacroAssembler *masm, const Register &reg, const Label *smi_check)
static const int kSize
Definition: objects.h:7922
static const int kResultDonePropertyOffset
Definition: objects.h:7343
#define ASSERT(condition)
Definition: checks.h:329
static const int kContextOffset
Definition: frames.h:185
const int kPointerSizeLog2
Definition: globals.h:281
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:121
static const int kInObjectFieldCount
Definition: objects.h:7976
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3090
#define POINTER_SIZE_ALIGN(value)
Definition: v8globals.h:390
static V8_INLINE Instruction * Cast(T src)
static const int kReceiverOffset
Definition: objects.h:7326
static const int kCallerFPOffset
Definition: frames.h:188
static const int kInstanceClassNameOffset
Definition: objects.h:7107
bool IsOptimizable() const
Definition: compiler.h:232
Variable * parameter(int index) const
Definition: scopes.h:333
PropertyAttributes
static Operand UntagSmiAndScale(Register smi, int scale)
V8_INLINE Instruction * preceding(int count=1)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static Smi * cast(Object *object)
const int64_t kXSignBit
int ContextChainLength(Scope *scope)
Definition: scopes.cc:721
kInstanceClassNameOffset flag
Definition: objects-inl.h:5115
const uint64_t kSmiShiftMask
#define IN
MemOperand UntagSmiFieldMemOperand(Register object, int offset)
static const int kLiteralsOffset
Definition: objects.h:7524
#define UNREACHABLE()
Definition: checks.h:52
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static Condition ComputeCondition(Token::Value op)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
static const int kLengthOffset
Definition: objects.h:8905
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kValueOffset
Definition: objects.h:1971
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Definition: flags.cc:665
Condition InvertCondition(Condition cond)
Variable * arguments() const
Definition: scopes.h:341
static const int kFirstOffset
Definition: objects.h:3500
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1278
NilValue
Definition: v8.h:133
static BailoutId Declarations()
Definition: utils.h:1166
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
const int kPointerSize
Definition: globals.h:268
void check(i::Vector< const uint8_t > string)
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:6478
const unsigned kInstructionSize
static void MaybeCallEntryHook(MacroAssembler *masm)
const int kHeapObjectTag
Definition: v8.h:5473
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define __
static const int kCallerSPOffset
Definition: frames.h:190
static const int kCacheStampOffset
Definition: objects.h:7787
const Register pc
static const int kJSRetSequenceInstructions
static const int kDescriptorSize
Definition: objects.h:3509
static const int kPropertiesOffset
Definition: objects.h:2755
int num_parameters() const
Definition: scopes.h:338
static const int kMarkerOffset
Definition: frames.h:184
static const int kExpressionsOffset
Definition: frames.h:183
static const int kHeaderSize
Definition: objects.h:9042
void CheckConstPool(bool force_emit, bool require_jump)
const int kSmiShift
static const int kElementsOffset
Definition: objects.h:2756
static BailoutId FunctionEntry()
Definition: utils.h:1165
#define BASE_EMBEDDED
Definition: allocation.h:68
OverwriteMode
Definition: ic.h:690
bool IsDeclaredVariableMode(VariableMode mode)
Definition: v8globals.h:503
Vector< const char > CStrVector(const char *data)
Definition: utils.h:574
static int OffsetOfElementAt(int index)
Definition: objects.h:3070
static const int kLengthOffset
Definition: objects.h:10076
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
const unsigned kXRegSize
static const int kHeaderSize
Definition: objects.h:3016
Scope * GlobalScope()
Definition: scopes.cc:734
static const int kContextOffset
Definition: objects.h:7325
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
Definition: code-stubs.h:1406
const Register lr
static const int kMapOffset
Definition: objects.h:1890
static const int kValueOffset
Definition: objects.h:7779
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:3503
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:545
static const int kLengthOffset
Definition: objects.h:3015
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:6675
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
MemOperand FieldMemOperand(Register object, int offset)
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:206
static const int kContextOffset
Definition: frames.h:97
static const int kFormalParameterCountOffset
Definition: objects.h:7156
void CopyBytes(uint8_t *target, uint8_t *source)
Definition: runtime.cc:1309
static const int kGeneratorExecuting
Definition: objects.h:7320
static bool ShouldGenerateLog(Isolate *isolate, Expression *type)
Definition: codegen.cc:191
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
static const int kContinuationOffset
Definition: objects.h:7327
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:996
static uint64_t & uint64_at(Address addr)
Definition: v8memory.h:55
static const int kConstructorOffset
Definition: objects.h:6428
const uint32_t kOneByteStringTag
Definition: objects.h:611
const int kSmiTag
Definition: v8.h:5478
#define ASSERT_NE(v1, v2)
Definition: checks.h:331
static const int kIsUndetectable
Definition: objects.h:6472
static const int kPrototypeOffset
Definition: objects.h:6427
void CheckVeneerPool(bool force_emit, bool require_jump, int margin=kVeneerDistanceMargin)
static const int kValueOffset
Definition: objects.h:7701
bool IsImmutableVariableMode(VariableMode mode)
Definition: v8globals.h:513
const Register fp
static const int kNativeContextOffset
Definition: objects.h:7611
void AddNoFrameRange(int from, int to)
Definition: compiler.h:296
MemOperand ContextMemOperand(Register context, int index)
T Min(T a, T b)
Definition: utils.h:234
static const int kSharedFunctionInfoOffset
Definition: objects.h:7521
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:2121
static const int kBitField2Offset
Definition: objects.h:6462
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
#define VOID
MemOperand GlobalObjectMemOperand()
bool AreAliased(const CPURegister &reg1, const CPURegister &reg2, const CPURegister &reg3=NoReg, const CPURegister &reg4=NoReg, const CPURegister &reg5=NoReg, const CPURegister &reg6=NoReg, const CPURegister &reg7=NoReg, const CPURegister &reg8=NoReg)
static const int kInstanceTypeOffset
Definition: objects.h:6459
static const int kOperandStackOffset
Definition: objects.h:7328
TypeofState
Definition: codegen.h:69
Scope * scope() const
Definition: compiler.h:78