v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
full-codegen-arm.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_ARM
31 
32 #include "code-stubs.h"
33 #include "codegen.h"
34 #include "compiler.h"
35 #include "debug.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
38 #include "parser.h"
39 #include "scopes.h"
40 #include "stub-cache.h"
41 
42 #include "arm/code-stubs-arm.h"
44 
45 namespace v8 {
46 namespace internal {
47 
48 #define __ ACCESS_MASM(masm_)
49 
50 
51 // A patch site is a location in the code which it is possible to patch. This
52 // class has a number of methods to emit the code which is patchable and the
53 // method EmitPatchInfo to record a marker back to the patchable code. This
54 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
55 // immediate value is used) is the delta from the pc to the first instruction of
56 // the patchable code.
57 class JumpPatchSite BASE_EMBEDDED {
58  public:
59  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
60 #ifdef DEBUG
61  info_emitted_ = false;
62 #endif
63  }
64 
65  ~JumpPatchSite() {
66  ASSERT(patch_site_.is_bound() == info_emitted_);
67  }
68 
69  // When initially emitting this ensure that a jump is always generated to skip
70  // the inlined smi code.
71  void EmitJumpIfNotSmi(Register reg, Label* target) {
72  ASSERT(!patch_site_.is_bound() && !info_emitted_);
73  Assembler::BlockConstPoolScope block_const_pool(masm_);
74  __ bind(&patch_site_);
75  __ cmp(reg, Operand(reg));
76  __ b(eq, target); // Always taken before patched.
77  }
78 
79  // When initially emitting this ensure that a jump is never generated to skip
80  // the inlined smi code.
81  void EmitJumpIfSmi(Register reg, Label* target) {
82  ASSERT(!patch_site_.is_bound() && !info_emitted_);
83  Assembler::BlockConstPoolScope block_const_pool(masm_);
84  __ bind(&patch_site_);
85  __ cmp(reg, Operand(reg));
86  __ b(ne, target); // Never taken before patched.
87  }
88 
89  void EmitPatchInfo() {
90  // Block literal pool emission whilst recording patch site information.
91  Assembler::BlockConstPoolScope block_const_pool(masm_);
92  if (patch_site_.is_bound()) {
93  int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
94  Register reg;
95  reg.set_code(delta_to_patch_site / kOff12Mask);
96  __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
97 #ifdef DEBUG
98  info_emitted_ = true;
99 #endif
100  } else {
101  __ nop(); // Signals no inlined code.
102  }
103  }
104 
105  private:
106  MacroAssembler* masm_;
107  Label patch_site_;
108 #ifdef DEBUG
109  bool info_emitted_;
110 #endif
111 };
112 
113 
114 static void EmitStackCheck(MacroAssembler* masm_,
115  Register stack_limit_scratch,
116  int pointers = 0,
117  Register scratch = sp) {
118  Isolate* isolate = masm_->isolate();
119  Label ok;
120  ASSERT(scratch.is(sp) == (pointers == 0));
121  if (pointers != 0) {
122  __ sub(scratch, sp, Operand(pointers * kPointerSize));
123  }
124  __ LoadRoot(stack_limit_scratch, Heap::kStackLimitRootIndex);
125  __ cmp(scratch, Operand(stack_limit_scratch));
126  __ b(hs, &ok);
127  PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
128  __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
129  __ bind(&ok);
130 }
131 
132 
133 // Generate code for a JS function. On entry to the function the receiver
134 // and arguments have been pushed on the stack left to right. The actual
135 // argument count matches the formal parameter count expected by the
136 // function.
137 //
138 // The live registers are:
139 // o r1: the JS function object being called (i.e., ourselves)
140 // o cp: our context
141 // o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool)
142 // o fp: our caller's frame pointer
143 // o sp: stack pointer
144 // o lr: return address
145 //
146 // The function builds a JS frame. Please see JavaScriptFrameConstants in
147 // frames-arm.h for its layout.
148 void FullCodeGenerator::Generate() {
149  CompilationInfo* info = info_;
150  handler_table_ =
151  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
152 
153  InitializeFeedbackVector();
154 
155  profiling_counter_ = isolate()->factory()->NewCell(
156  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
157  SetFunctionPosition(function());
158  Comment cmnt(masm_, "[ function compiled by full code generator");
159 
161 
162 #ifdef DEBUG
163  if (strlen(FLAG_stop_at) > 0 &&
164  info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
165  __ stop("stop-at");
166  }
167 #endif
168 
169  // Sloppy mode functions and builtins need to replace the receiver with the
170  // global proxy when called as functions (without an explicit receiver
171  // object).
172  if (info->strict_mode() == SLOPPY && !info->is_native()) {
173  Label ok;
174  int receiver_offset = info->scope()->num_parameters() * kPointerSize;
175  __ ldr(r2, MemOperand(sp, receiver_offset));
176  __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
177  __ b(ne, &ok);
178 
179  __ ldr(r2, GlobalObjectOperand());
181 
182  __ str(r2, MemOperand(sp, receiver_offset));
183 
184  __ bind(&ok);
185  }
186 
187  // Open a frame scope to indicate that there is a frame on the stack. The
188  // MANUAL indicates that the scope shouldn't actually generate code to set up
189  // the frame (that is done below).
190  FrameScope frame_scope(masm_, StackFrame::MANUAL);
191 
192  info->set_prologue_offset(masm_->pc_offset());
193  __ Prologue(BUILD_FUNCTION_FRAME);
194  info->AddNoFrameRange(0, masm_->pc_offset());
195 
196  { Comment cmnt(masm_, "[ Allocate locals");
197  int locals_count = info->scope()->num_stack_slots();
198  // Generators allocate locals, if any, in context slots.
199  ASSERT(!info->function()->is_generator() || locals_count == 0);
200  if (locals_count > 0) {
201  if (locals_count >= 128) {
202  EmitStackCheck(masm_, r2, locals_count, r9);
203  }
204  __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
205  int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
206  if (locals_count >= kMaxPushes) {
207  int loop_iterations = locals_count / kMaxPushes;
208  __ mov(r2, Operand(loop_iterations));
209  Label loop_header;
210  __ bind(&loop_header);
211  // Do pushes.
212  for (int i = 0; i < kMaxPushes; i++) {
213  __ push(r9);
214  }
215  // Continue loop if not done.
216  __ sub(r2, r2, Operand(1), SetCC);
217  __ b(&loop_header, ne);
218  }
219  int remaining = locals_count % kMaxPushes;
220  // Emit the remaining pushes.
221  for (int i = 0; i < remaining; i++) {
222  __ push(r9);
223  }
224  }
225  }
226 
227  bool function_in_register = true;
228 
229  // Possibly allocate a local context.
230  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
231  if (heap_slots > 0) {
232  // Argument to NewContext is the function, which is still in r1.
233  Comment cmnt(masm_, "[ Allocate context");
234  if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
235  __ push(r1);
236  __ Push(info->scope()->GetScopeInfo());
237  __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
238  } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
239  FastNewContextStub stub(heap_slots);
240  __ CallStub(&stub);
241  } else {
242  __ push(r1);
243  __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
244  }
245  function_in_register = false;
246  // Context is returned in r0. It replaces the context passed to us.
247  // It's saved in the stack and kept live in cp.
248  __ mov(cp, r0);
250  // Copy any necessary parameters into the context.
251  int num_parameters = info->scope()->num_parameters();
252  for (int i = 0; i < num_parameters; i++) {
253  Variable* var = scope()->parameter(i);
254  if (var->IsContextSlot()) {
255  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
256  (num_parameters - 1 - i) * kPointerSize;
257  // Load parameter from stack.
258  __ ldr(r0, MemOperand(fp, parameter_offset));
259  // Store it in the context.
260  MemOperand target = ContextOperand(cp, var->index());
261  __ str(r0, target);
262 
263  // Update the write barrier.
264  __ RecordWriteContextSlot(
265  cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
266  }
267  }
268  }
269 
270  Variable* arguments = scope()->arguments();
271  if (arguments != NULL) {
272  // Function uses arguments object.
273  Comment cmnt(masm_, "[ Allocate arguments object");
274  if (!function_in_register) {
275  // Load this again, if it's used by the local context below.
277  } else {
278  __ mov(r3, r1);
279  }
280  // Receiver is just before the parameters on the caller's stack.
281  int num_parameters = info->scope()->num_parameters();
282  int offset = num_parameters * kPointerSize;
283  __ add(r2, fp,
284  Operand(StandardFrameConstants::kCallerSPOffset + offset));
285  __ mov(r1, Operand(Smi::FromInt(num_parameters)));
286  __ Push(r3, r2, r1);
287 
288  // Arguments to ArgumentsAccessStub:
289  // function, receiver address, parameter count.
290  // The stub will rewrite receiever and parameter count if the previous
291  // stack frame was an arguments adapter frame.
293  if (strict_mode() == STRICT) {
295  } else if (function()->has_duplicate_parameters()) {
297  } else {
299  }
300  ArgumentsAccessStub stub(type);
301  __ CallStub(&stub);
302 
303  SetVar(arguments, r0, r1, r2);
304  }
305 
306  if (FLAG_trace) {
307  __ CallRuntime(Runtime::kTraceEnter, 0);
308  }
309 
310  // Visit the declarations and body unless there is an illegal
311  // redeclaration.
312  if (scope()->HasIllegalRedeclaration()) {
313  Comment cmnt(masm_, "[ Declarations");
314  scope()->VisitIllegalRedeclaration(this);
315 
316  } else {
317  PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
318  { Comment cmnt(masm_, "[ Declarations");
319  // For named function expressions, declare the function name as a
320  // constant.
321  if (scope()->is_function_scope() && scope()->function() != NULL) {
322  VariableDeclaration* function = scope()->function();
323  ASSERT(function->proxy()->var()->mode() == CONST ||
324  function->proxy()->var()->mode() == CONST_LEGACY);
325  ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
326  VisitVariableDeclaration(function);
327  }
328  VisitDeclarations(scope()->declarations());
329  }
330 
331  { Comment cmnt(masm_, "[ Stack check");
332  PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
333  EmitStackCheck(masm_, ip);
334  }
335 
336  { Comment cmnt(masm_, "[ Body");
337  ASSERT(loop_depth() == 0);
338  VisitStatements(function()->body());
339  ASSERT(loop_depth() == 0);
340  }
341  }
342 
343  // Always emit a 'return undefined' in case control fell off the end of
344  // the body.
345  { Comment cmnt(masm_, "[ return <undefined>;");
346  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
347  }
348  EmitReturnSequence();
349 
350  // Force emit the constant pool, so it doesn't get emitted in the middle
351  // of the back edge table.
352  masm()->CheckConstPool(true, false);
353 }
354 
355 
356 void FullCodeGenerator::ClearAccumulator() {
357  __ mov(r0, Operand(Smi::FromInt(0)));
358 }
359 
360 
361 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
362  __ mov(r2, Operand(profiling_counter_));
364  __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
366 }
367 
368 
369 void FullCodeGenerator::EmitProfilingCounterReset() {
370  int reset_value = FLAG_interrupt_budget;
371  if (isolate()->IsDebuggerActive()) {
372  // Detect debug break requests as soon as possible.
373  reset_value = FLAG_interrupt_budget >> 4;
374  }
375  __ mov(r2, Operand(profiling_counter_));
376  __ mov(r3, Operand(Smi::FromInt(reset_value)));
378 }
379 
380 
381 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
382  Label* back_edge_target) {
383  Comment cmnt(masm_, "[ Back edge bookkeeping");
384  // Block literal pools whilst emitting back edge code.
385  Assembler::BlockConstPoolScope block_const_pool(masm_);
386  Label ok;
387 
388  ASSERT(back_edge_target->is_bound());
389  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
390  int weight = Min(kMaxBackEdgeWeight,
391  Max(1, distance / kCodeSizeMultiplier));
392  EmitProfilingCounterDecrement(weight);
393  __ b(pl, &ok);
394  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
395 
396  // Record a mapping of this PC offset to the OSR id. This is used to find
397  // the AST id from the unoptimized code in order to use it as a key into
398  // the deoptimization input data found in the optimized code.
399  RecordBackEdge(stmt->OsrEntryId());
400 
401  EmitProfilingCounterReset();
402 
403  __ bind(&ok);
404  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
405  // Record a mapping of the OSR id to this PC. This is used if the OSR
406  // entry becomes the target of a bailout. We don't expect it to be, but
407  // we want it to work if it is.
408  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
409 }
410 
411 
412 void FullCodeGenerator::EmitReturnSequence() {
413  Comment cmnt(masm_, "[ Return sequence");
414  if (return_label_.is_bound()) {
415  __ b(&return_label_);
416  } else {
417  __ bind(&return_label_);
418  if (FLAG_trace) {
419  // Push the return value on the stack as the parameter.
420  // Runtime::TraceExit returns its parameter in r0.
421  __ push(r0);
422  __ CallRuntime(Runtime::kTraceExit, 1);
423  }
424  // Pretend that the exit is a backwards jump to the entry.
425  int weight = 1;
426  if (info_->ShouldSelfOptimize()) {
427  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
428  } else {
429  int distance = masm_->pc_offset();
430  weight = Min(kMaxBackEdgeWeight,
431  Max(1, distance / kCodeSizeMultiplier));
432  }
433  EmitProfilingCounterDecrement(weight);
434  Label ok;
435  __ b(pl, &ok);
436  __ push(r0);
437  __ Call(isolate()->builtins()->InterruptCheck(),
438  RelocInfo::CODE_TARGET);
439  __ pop(r0);
440  EmitProfilingCounterReset();
441  __ bind(&ok);
442 
443 #ifdef DEBUG
444  // Add a label for checking the size of the code used for returning.
445  Label check_exit_codesize;
446  __ bind(&check_exit_codesize);
447 #endif
448  // Make sure that the constant pool is not emitted inside of the return
449  // sequence.
450  { Assembler::BlockConstPoolScope block_const_pool(masm_);
451  int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
452  CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
453  // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
454  PredictableCodeSizeScope predictable(masm_, -1);
455  __ RecordJSReturn();
456  int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
457  __ add(sp, sp, Operand(sp_delta));
458  __ Jump(lr);
459  info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
460  }
461 
462 #ifdef DEBUG
463  // Check that the size of the code used for returning is large enough
464  // for the debugger's requirements.
466  masm_->InstructionsGeneratedSince(&check_exit_codesize));
467 #endif
468  }
469 }
470 
471 
472 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
473  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
474 }
475 
476 
477 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
478  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
479  codegen()->GetVar(result_register(), var);
480 }
481 
482 
483 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
484  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
485  codegen()->GetVar(result_register(), var);
486  __ push(result_register());
487 }
488 
489 
490 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
491  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
492  // For simplicity we always test the accumulator register.
493  codegen()->GetVar(result_register(), var);
494  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
495  codegen()->DoTest(this);
496 }
497 
498 
499 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
500 }
501 
502 
503 void FullCodeGenerator::AccumulatorValueContext::Plug(
504  Heap::RootListIndex index) const {
505  __ LoadRoot(result_register(), index);
506 }
507 
508 
509 void FullCodeGenerator::StackValueContext::Plug(
510  Heap::RootListIndex index) const {
511  __ LoadRoot(result_register(), index);
512  __ push(result_register());
513 }
514 
515 
516 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
517  codegen()->PrepareForBailoutBeforeSplit(condition(),
518  true,
519  true_label_,
520  false_label_);
521  if (index == Heap::kUndefinedValueRootIndex ||
522  index == Heap::kNullValueRootIndex ||
523  index == Heap::kFalseValueRootIndex) {
524  if (false_label_ != fall_through_) __ b(false_label_);
525  } else if (index == Heap::kTrueValueRootIndex) {
526  if (true_label_ != fall_through_) __ b(true_label_);
527  } else {
528  __ LoadRoot(result_register(), index);
529  codegen()->DoTest(this);
530  }
531 }
532 
533 
534 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
535 }
536 
537 
538 void FullCodeGenerator::AccumulatorValueContext::Plug(
539  Handle<Object> lit) const {
540  __ mov(result_register(), Operand(lit));
541 }
542 
543 
544 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
545  // Immediates cannot be pushed directly.
546  __ mov(result_register(), Operand(lit));
547  __ push(result_register());
548 }
549 
550 
551 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
552  codegen()->PrepareForBailoutBeforeSplit(condition(),
553  true,
554  true_label_,
555  false_label_);
556  ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
557  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
558  if (false_label_ != fall_through_) __ b(false_label_);
559  } else if (lit->IsTrue() || lit->IsJSObject()) {
560  if (true_label_ != fall_through_) __ b(true_label_);
561  } else if (lit->IsString()) {
562  if (String::cast(*lit)->length() == 0) {
563  if (false_label_ != fall_through_) __ b(false_label_);
564  } else {
565  if (true_label_ != fall_through_) __ b(true_label_);
566  }
567  } else if (lit->IsSmi()) {
568  if (Smi::cast(*lit)->value() == 0) {
569  if (false_label_ != fall_through_) __ b(false_label_);
570  } else {
571  if (true_label_ != fall_through_) __ b(true_label_);
572  }
573  } else {
574  // For simplicity we always test the accumulator register.
575  __ mov(result_register(), Operand(lit));
576  codegen()->DoTest(this);
577  }
578 }
579 
580 
581 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
582  Register reg) const {
583  ASSERT(count > 0);
584  __ Drop(count);
585 }
586 
587 
588 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
589  int count,
590  Register reg) const {
591  ASSERT(count > 0);
592  __ Drop(count);
593  __ Move(result_register(), reg);
594 }
595 
596 
597 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
598  Register reg) const {
599  ASSERT(count > 0);
600  if (count > 1) __ Drop(count - 1);
601  __ str(reg, MemOperand(sp, 0));
602 }
603 
604 
605 void FullCodeGenerator::TestContext::DropAndPlug(int count,
606  Register reg) const {
607  ASSERT(count > 0);
608  // For simplicity we always test the accumulator register.
609  __ Drop(count);
610  __ Move(result_register(), reg);
611  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
612  codegen()->DoTest(this);
613 }
614 
615 
616 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
617  Label* materialize_false) const {
618  ASSERT(materialize_true == materialize_false);
619  __ bind(materialize_true);
620 }
621 
622 
623 void FullCodeGenerator::AccumulatorValueContext::Plug(
624  Label* materialize_true,
625  Label* materialize_false) const {
626  Label done;
627  __ bind(materialize_true);
628  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
629  __ jmp(&done);
630  __ bind(materialize_false);
631  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
632  __ bind(&done);
633 }
634 
635 
636 void FullCodeGenerator::StackValueContext::Plug(
637  Label* materialize_true,
638  Label* materialize_false) const {
639  Label done;
640  __ bind(materialize_true);
641  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
642  __ jmp(&done);
643  __ bind(materialize_false);
644  __ LoadRoot(ip, Heap::kFalseValueRootIndex);
645  __ bind(&done);
646  __ push(ip);
647 }
648 
649 
650 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
651  Label* materialize_false) const {
652  ASSERT(materialize_true == true_label_);
653  ASSERT(materialize_false == false_label_);
654 }
655 
656 
657 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
658 }
659 
660 
661 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
662  Heap::RootListIndex value_root_index =
663  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
664  __ LoadRoot(result_register(), value_root_index);
665 }
666 
667 
668 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
669  Heap::RootListIndex value_root_index =
670  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
671  __ LoadRoot(ip, value_root_index);
672  __ push(ip);
673 }
674 
675 
676 void FullCodeGenerator::TestContext::Plug(bool flag) const {
677  codegen()->PrepareForBailoutBeforeSplit(condition(),
678  true,
679  true_label_,
680  false_label_);
681  if (flag) {
682  if (true_label_ != fall_through_) __ b(true_label_);
683  } else {
684  if (false_label_ != fall_through_) __ b(false_label_);
685  }
686 }
687 
688 
689 void FullCodeGenerator::DoTest(Expression* condition,
690  Label* if_true,
691  Label* if_false,
692  Label* fall_through) {
693  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
694  CallIC(ic, condition->test_id());
695  __ tst(result_register(), result_register());
696  Split(ne, if_true, if_false, fall_through);
697 }
698 
699 
700 void FullCodeGenerator::Split(Condition cond,
701  Label* if_true,
702  Label* if_false,
703  Label* fall_through) {
704  if (if_false == fall_through) {
705  __ b(cond, if_true);
706  } else if (if_true == fall_through) {
707  __ b(NegateCondition(cond), if_false);
708  } else {
709  __ b(cond, if_true);
710  __ b(if_false);
711  }
712 }
713 
714 
715 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
716  ASSERT(var->IsStackAllocated());
717  // Offset is negative because higher indexes are at lower addresses.
718  int offset = -var->index() * kPointerSize;
719  // Adjust by a (parameter or local) base offset.
720  if (var->IsParameter()) {
721  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
722  } else {
724  }
725  return MemOperand(fp, offset);
726 }
727 
728 
729 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
730  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
731  if (var->IsContextSlot()) {
732  int context_chain_length = scope()->ContextChainLength(var->scope());
733  __ LoadContext(scratch, context_chain_length);
734  return ContextOperand(scratch, var->index());
735  } else {
736  return StackOperand(var);
737  }
738 }
739 
740 
741 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
742  // Use destination as scratch.
743  MemOperand location = VarOperand(var, dest);
744  __ ldr(dest, location);
745 }
746 
747 
748 void FullCodeGenerator::SetVar(Variable* var,
749  Register src,
750  Register scratch0,
751  Register scratch1) {
752  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
753  ASSERT(!scratch0.is(src));
754  ASSERT(!scratch0.is(scratch1));
755  ASSERT(!scratch1.is(src));
756  MemOperand location = VarOperand(var, scratch0);
757  __ str(src, location);
758 
759  // Emit the write barrier code if the location is in the heap.
760  if (var->IsContextSlot()) {
761  __ RecordWriteContextSlot(scratch0,
762  location.offset(),
763  src,
764  scratch1,
767  }
768 }
769 
770 
771 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
772  bool should_normalize,
773  Label* if_true,
774  Label* if_false) {
775  // Only prepare for bailouts before splits if we're in a test
776  // context. Otherwise, we let the Visit function deal with the
777  // preparation to avoid preparing with the same AST id twice.
778  if (!context()->IsTest() || !info_->IsOptimizable()) return;
779 
780  Label skip;
781  if (should_normalize) __ b(&skip);
782  PrepareForBailout(expr, TOS_REG);
783  if (should_normalize) {
784  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
785  __ cmp(r0, ip);
786  Split(eq, if_true, if_false, NULL);
787  __ bind(&skip);
788  }
789 }
790 
791 
792 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
793  // The variable in the declaration always resides in the current function
794  // context.
795  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
796  if (generate_debug_code_) {
797  // Check that we're not inside a with or catch context.
799  __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
800  __ Check(ne, kDeclarationInWithContext);
801  __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
802  __ Check(ne, kDeclarationInCatchContext);
803  }
804 }
805 
806 
807 void FullCodeGenerator::VisitVariableDeclaration(
808  VariableDeclaration* declaration) {
809  // If it was not possible to allocate the variable at compile time, we
810  // need to "declare" it at runtime to make sure it actually exists in the
811  // local context.
812  VariableProxy* proxy = declaration->proxy();
813  VariableMode mode = declaration->mode();
814  Variable* variable = proxy->var();
815  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
816  switch (variable->location()) {
818  globals_->Add(variable->name(), zone());
819  globals_->Add(variable->binding_needs_init()
820  ? isolate()->factory()->the_hole_value()
821  : isolate()->factory()->undefined_value(),
822  zone());
823  break;
824 
825  case Variable::PARAMETER:
826  case Variable::LOCAL:
827  if (hole_init) {
828  Comment cmnt(masm_, "[ VariableDeclaration");
829  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
830  __ str(ip, StackOperand(variable));
831  }
832  break;
833 
834  case Variable::CONTEXT:
835  if (hole_init) {
836  Comment cmnt(masm_, "[ VariableDeclaration");
837  EmitDebugCheckDeclarationContext(variable);
838  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
839  __ str(ip, ContextOperand(cp, variable->index()));
840  // No write barrier since the_hole_value is in old space.
841  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
842  }
843  break;
844 
845  case Variable::LOOKUP: {
846  Comment cmnt(masm_, "[ VariableDeclaration");
847  __ mov(r2, Operand(variable->name()));
848  // Declaration nodes are always introduced in one of four modes.
850  PropertyAttributes attr =
852  __ mov(r1, Operand(Smi::FromInt(attr)));
853  // Push initial value, if any.
854  // Note: For variables we must not push an initial value (such as
855  // 'undefined') because we may have a (legal) redeclaration and we
856  // must not destroy the current value.
857  if (hole_init) {
858  __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
859  __ Push(cp, r2, r1, r0);
860  } else {
861  __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
862  __ Push(cp, r2, r1, r0);
863  }
864  __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
865  break;
866  }
867  }
868 }
869 
870 
871 void FullCodeGenerator::VisitFunctionDeclaration(
872  FunctionDeclaration* declaration) {
873  VariableProxy* proxy = declaration->proxy();
874  Variable* variable = proxy->var();
875  switch (variable->location()) {
876  case Variable::UNALLOCATED: {
877  globals_->Add(variable->name(), zone());
878  Handle<SharedFunctionInfo> function =
879  Compiler::BuildFunctionInfo(declaration->fun(), script());
880  // Check for stack-overflow exception.
881  if (function.is_null()) return SetStackOverflow();
882  globals_->Add(function, zone());
883  break;
884  }
885 
886  case Variable::PARAMETER:
887  case Variable::LOCAL: {
888  Comment cmnt(masm_, "[ FunctionDeclaration");
889  VisitForAccumulatorValue(declaration->fun());
890  __ str(result_register(), StackOperand(variable));
891  break;
892  }
893 
894  case Variable::CONTEXT: {
895  Comment cmnt(masm_, "[ FunctionDeclaration");
896  EmitDebugCheckDeclarationContext(variable);
897  VisitForAccumulatorValue(declaration->fun());
898  __ str(result_register(), ContextOperand(cp, variable->index()));
899  int offset = Context::SlotOffset(variable->index());
900  // We know that we have written a function, which is not a smi.
901  __ RecordWriteContextSlot(cp,
902  offset,
903  result_register(),
904  r2,
909  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
910  break;
911  }
912 
913  case Variable::LOOKUP: {
914  Comment cmnt(masm_, "[ FunctionDeclaration");
915  __ mov(r2, Operand(variable->name()));
916  __ mov(r1, Operand(Smi::FromInt(NONE)));
917  __ Push(cp, r2, r1);
918  // Push initial value for function declaration.
919  VisitForStackValue(declaration->fun());
920  __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
921  break;
922  }
923  }
924 }
925 
926 
927 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
928  Variable* variable = declaration->proxy()->var();
929  ASSERT(variable->location() == Variable::CONTEXT);
930  ASSERT(variable->interface()->IsFrozen());
931 
932  Comment cmnt(masm_, "[ ModuleDeclaration");
933  EmitDebugCheckDeclarationContext(variable);
934 
935  // Load instance object.
936  __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope()));
937  __ ldr(r1, ContextOperand(r1, variable->interface()->Index()));
939 
940  // Assign it.
941  __ str(r1, ContextOperand(cp, variable->index()));
942  // We know that we have written a module, which is not a smi.
943  __ RecordWriteContextSlot(cp,
944  Context::SlotOffset(variable->index()),
945  r1,
946  r3,
951  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
952 
953  // Traverse into body.
954  Visit(declaration->module());
955 }
956 
957 
958 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
959  VariableProxy* proxy = declaration->proxy();
960  Variable* variable = proxy->var();
961  switch (variable->location()) {
963  // TODO(rossberg)
964  break;
965 
966  case Variable::CONTEXT: {
967  Comment cmnt(masm_, "[ ImportDeclaration");
968  EmitDebugCheckDeclarationContext(variable);
969  // TODO(rossberg)
970  break;
971  }
972 
973  case Variable::PARAMETER:
974  case Variable::LOCAL:
975  case Variable::LOOKUP:
976  UNREACHABLE();
977  }
978 }
979 
980 
981 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
982  // TODO(rossberg)
983 }
984 
985 
986 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
987  // Call the runtime to declare the globals.
988  // The context is the first argument.
989  __ mov(r1, Operand(pairs));
990  __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
991  __ Push(cp, r1, r0);
992  __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
993  // Return value is ignored.
994 }
995 
996 
997 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
998  // Call the runtime to declare the modules.
999  __ Push(descriptions);
1000  __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
1001  // Return value is ignored.
1002 }
1003 
1004 
1005 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1006  Comment cmnt(masm_, "[ SwitchStatement");
1007  Breakable nested_statement(this, stmt);
1008  SetStatementPosition(stmt);
1009 
1010  // Keep the switch value on the stack until a case matches.
1011  VisitForStackValue(stmt->tag());
1012  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1013 
1014  ZoneList<CaseClause*>* clauses = stmt->cases();
1015  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1016 
1017  Label next_test; // Recycled for each test.
1018  // Compile all the tests with branches to their bodies.
1019  for (int i = 0; i < clauses->length(); i++) {
1020  CaseClause* clause = clauses->at(i);
1021  clause->body_target()->Unuse();
1022 
1023  // The default is not a test, but remember it as final fall through.
1024  if (clause->is_default()) {
1025  default_clause = clause;
1026  continue;
1027  }
1028 
1029  Comment cmnt(masm_, "[ Case comparison");
1030  __ bind(&next_test);
1031  next_test.Unuse();
1032 
1033  // Compile the label expression.
1034  VisitForAccumulatorValue(clause->label());
1035 
1036  // Perform the comparison as if via '==='.
1037  __ ldr(r1, MemOperand(sp, 0)); // Switch value.
1038  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1039  JumpPatchSite patch_site(masm_);
1040  if (inline_smi_code) {
1041  Label slow_case;
1042  __ orr(r2, r1, r0);
1043  patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1044 
1045  __ cmp(r1, r0);
1046  __ b(ne, &next_test);
1047  __ Drop(1); // Switch value is no longer needed.
1048  __ b(clause->body_target());
1049  __ bind(&slow_case);
1050  }
1051 
1052  // Record position before stub call for type feedback.
1053  SetSourcePosition(clause->position());
1054  Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1055  CallIC(ic, clause->CompareId());
1056  patch_site.EmitPatchInfo();
1057 
1058  Label skip;
1059  __ b(&skip);
1060  PrepareForBailout(clause, TOS_REG);
1061  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1062  __ cmp(r0, ip);
1063  __ b(ne, &next_test);
1064  __ Drop(1);
1065  __ jmp(clause->body_target());
1066  __ bind(&skip);
1067 
1068  __ cmp(r0, Operand::Zero());
1069  __ b(ne, &next_test);
1070  __ Drop(1); // Switch value is no longer needed.
1071  __ b(clause->body_target());
1072  }
1073 
1074  // Discard the test value and jump to the default if present, otherwise to
1075  // the end of the statement.
1076  __ bind(&next_test);
1077  __ Drop(1); // Switch value is no longer needed.
1078  if (default_clause == NULL) {
1079  __ b(nested_statement.break_label());
1080  } else {
1081  __ b(default_clause->body_target());
1082  }
1083 
1084  // Compile all the case bodies.
1085  for (int i = 0; i < clauses->length(); i++) {
1086  Comment cmnt(masm_, "[ Case body");
1087  CaseClause* clause = clauses->at(i);
1088  __ bind(clause->body_target());
1089  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1090  VisitStatements(clause->statements());
1091  }
1092 
1093  __ bind(nested_statement.break_label());
1094  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1095 }
1096 
1097 
1098 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1099  Comment cmnt(masm_, "[ ForInStatement");
1100  int slot = stmt->ForInFeedbackSlot();
1101  SetStatementPosition(stmt);
1102 
1103  Label loop, exit;
1104  ForIn loop_statement(this, stmt);
1105  increment_loop_depth();
1106 
1107  // Get the object to enumerate over. If the object is null or undefined, skip
1108  // over the loop. See ECMA-262 version 5, section 12.6.4.
1109  VisitForAccumulatorValue(stmt->enumerable());
1110  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1111  __ cmp(r0, ip);
1112  __ b(eq, &exit);
1113  Register null_value = r5;
1114  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1115  __ cmp(r0, null_value);
1116  __ b(eq, &exit);
1117 
1118  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1119 
1120  // Convert the object to a JS object.
1121  Label convert, done_convert;
1122  __ JumpIfSmi(r0, &convert);
1123  __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1124  __ b(ge, &done_convert);
1125  __ bind(&convert);
1126  __ push(r0);
1127  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1128  __ bind(&done_convert);
1129  __ push(r0);
1130 
1131  // Check for proxies.
1132  Label call_runtime;
1134  __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1135  __ b(le, &call_runtime);
1136 
1137  // Check cache validity in generated code. This is a fast case for
1138  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1139  // guarantee cache validity, call the runtime system to check cache
1140  // validity or get the property names in a fixed array.
1141  __ CheckEnumCache(null_value, &call_runtime);
1142 
1143  // The enum cache is valid. Load the map of the object being
1144  // iterated over and use the cache for the iteration.
1145  Label use_cache;
1147  __ b(&use_cache);
1148 
1149  // Get the set of properties to enumerate.
1150  __ bind(&call_runtime);
1151  __ push(r0); // Duplicate the enumerable object on the stack.
1152  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1153 
1154  // If we got a map from the runtime call, we can do a fast
1155  // modification check. Otherwise, we got a fixed array, and we have
1156  // to do a slow check.
1157  Label fixed_array;
1159  __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1160  __ cmp(r2, ip);
1161  __ b(ne, &fixed_array);
1162 
1163  // We got a map in register r0. Get the enumeration cache from it.
1164  Label no_descriptors;
1165  __ bind(&use_cache);
1166 
1167  __ EnumLength(r1, r0);
1168  __ cmp(r1, Operand(Smi::FromInt(0)));
1169  __ b(eq, &no_descriptors);
1170 
1171  __ LoadInstanceDescriptors(r0, r2);
1174 
1175  // Set up the four remaining stack slots.
1176  __ push(r0); // Map.
1177  __ mov(r0, Operand(Smi::FromInt(0)));
1178  // Push enumeration cache, enumeration cache length (as smi) and zero.
1179  __ Push(r2, r1, r0);
1180  __ jmp(&loop);
1181 
1182  __ bind(&no_descriptors);
1183  __ Drop(1);
1184  __ jmp(&exit);
1185 
1186  // We got a fixed array in register r0. Iterate through that.
1187  Label non_proxy;
1188  __ bind(&fixed_array);
1189 
1190  Handle<Object> feedback = Handle<Object>(
1192  isolate());
1193  StoreFeedbackVectorSlot(slot, feedback);
1194  __ Move(r1, FeedbackVector());
1197 
1198  __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1199  __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1201  __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1202  __ b(gt, &non_proxy);
1203  __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1204  __ bind(&non_proxy);
1205  __ Push(r1, r0); // Smi and array
1207  __ mov(r0, Operand(Smi::FromInt(0)));
1208  __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1209 
1210  // Generate code for doing the condition check.
1211  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1212  __ bind(&loop);
1213  // Load the current count to r0, load the length to r1.
1214  __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1215  __ cmp(r0, r1); // Compare to the array length.
1216  __ b(hs, loop_statement.break_label());
1217 
1218  // Get the current entry of the array into register r3.
1219  __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1220  __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1221  __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1222 
1223  // Get the expected map from the stack or a smi in the
1224  // permanent slow case into register r2.
1225  __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1226 
1227  // Check if the expected map still matches that of the enumerable.
1228  // If not, we may have to filter the key.
1229  Label update_each;
1230  __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1232  __ cmp(r4, Operand(r2));
1233  __ b(eq, &update_each);
1234 
1235  // For proxies, no filtering is done.
1236  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1237  __ cmp(r2, Operand(Smi::FromInt(0)));
1238  __ b(eq, &update_each);
1239 
1240  // Convert the entry to a string or (smi) 0 if it isn't a property
1241  // any more. If the property has been removed while iterating, we
1242  // just skip it.
1243  __ push(r1); // Enumerable.
1244  __ push(r3); // Current entry.
1245  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1246  __ mov(r3, Operand(r0), SetCC);
1247  __ b(eq, loop_statement.continue_label());
1248 
1249  // Update the 'each' property or variable from the possibly filtered
1250  // entry in register r3.
1251  __ bind(&update_each);
1252  __ mov(result_register(), r3);
1253  // Perform the assignment as if via '='.
1254  { EffectContext context(this);
1255  EmitAssignment(stmt->each());
1256  }
1257 
1258  // Generate code for the body of the loop.
1259  Visit(stmt->body());
1260 
1261  // Generate code for the going to the next element by incrementing
1262  // the index (smi) stored on top of the stack.
1263  __ bind(loop_statement.continue_label());
1264  __ pop(r0);
1265  __ add(r0, r0, Operand(Smi::FromInt(1)));
1266  __ push(r0);
1267 
1268  EmitBackEdgeBookkeeping(stmt, &loop);
1269  __ b(&loop);
1270 
1271  // Remove the pointers stored on the stack.
1272  __ bind(loop_statement.break_label());
1273  __ Drop(5);
1274 
1275  // Exit and decrement the loop depth.
1276  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1277  __ bind(&exit);
1278  decrement_loop_depth();
1279 }
1280 
1281 
1282 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1283  Comment cmnt(masm_, "[ ForOfStatement");
1284  SetStatementPosition(stmt);
1285 
1286  Iteration loop_statement(this, stmt);
1287  increment_loop_depth();
1288 
1289  // var iterator = iterable[@@iterator]()
1290  VisitForAccumulatorValue(stmt->assign_iterator());
1291 
1292  // As with for-in, skip the loop if the iterator is null or undefined.
1293  __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
1294  __ b(eq, loop_statement.break_label());
1295  __ CompareRoot(r0, Heap::kNullValueRootIndex);
1296  __ b(eq, loop_statement.break_label());
1297 
1298  // Convert the iterator to a JS object.
1299  Label convert, done_convert;
1300  __ JumpIfSmi(r0, &convert);
1301  __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1302  __ b(ge, &done_convert);
1303  __ bind(&convert);
1304  __ push(r0);
1305  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1306  __ bind(&done_convert);
1307  __ push(r0);
1308 
1309  // Loop entry.
1310  __ bind(loop_statement.continue_label());
1311 
1312  // result = iterator.next()
1313  VisitForEffect(stmt->next_result());
1314 
1315  // if (result.done) break;
1316  Label result_not_done;
1317  VisitForControl(stmt->result_done(),
1318  loop_statement.break_label(),
1319  &result_not_done,
1320  &result_not_done);
1321  __ bind(&result_not_done);
1322 
1323  // each = result.value
1324  VisitForEffect(stmt->assign_each());
1325 
1326  // Generate code for the body of the loop.
1327  Visit(stmt->body());
1328 
1329  // Check stack before looping.
1330  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1331  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1332  __ jmp(loop_statement.continue_label());
1333 
1334  // Exit and decrement the loop depth.
1335  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1336  __ bind(loop_statement.break_label());
1337  decrement_loop_depth();
1338 }
1339 
1340 
1341 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1342  bool pretenure) {
1343  // Use the fast case closure allocation code that allocates in new
1344  // space for nested functions that don't need literals cloning. If
1345  // we're running with the --always-opt or the --prepare-always-opt
1346  // flag, we need to use the runtime function so that the new function
1347  // we are creating here gets a chance to have its code optimized and
1348  // doesn't just get a copy of the existing unoptimized code.
1349  if (!FLAG_always_opt &&
1350  !FLAG_prepare_always_opt &&
1351  !pretenure &&
1352  scope()->is_function_scope() &&
1353  info->num_literals() == 0) {
1354  FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1355  __ mov(r2, Operand(info));
1356  __ CallStub(&stub);
1357  } else {
1358  __ mov(r0, Operand(info));
1359  __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1360  : Heap::kFalseValueRootIndex);
1361  __ Push(cp, r0, r1);
1362  __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1363  }
1364  context()->Plug(r0);
1365 }
1366 
1367 
1368 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1369  Comment cmnt(masm_, "[ VariableProxy");
1370  EmitVariableLoad(expr);
1371 }
1372 
1373 
1374 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1375  TypeofState typeof_state,
1376  Label* slow) {
1377  Register current = cp;
1378  Register next = r1;
1379  Register temp = r2;
1380 
1381  Scope* s = scope();
1382  while (s != NULL) {
1383  if (s->num_heap_slots() > 0) {
1384  if (s->calls_sloppy_eval()) {
1385  // Check that extension is NULL.
1386  __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1387  __ tst(temp, temp);
1388  __ b(ne, slow);
1389  }
1390  // Load next context in chain.
1391  __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1392  // Walk the rest of the chain without clobbering cp.
1393  current = next;
1394  }
1395  // If no outer scope calls eval, we do not need to check more
1396  // context extensions.
1397  if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1398  s = s->outer_scope();
1399  }
1400 
1401  if (s->is_eval_scope()) {
1402  Label loop, fast;
1403  if (!current.is(next)) {
1404  __ Move(next, current);
1405  }
1406  __ bind(&loop);
1407  // Terminate at native context.
1408  __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1409  __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1410  __ cmp(temp, ip);
1411  __ b(eq, &fast);
1412  // Check that extension is NULL.
1413  __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1414  __ tst(temp, temp);
1415  __ b(ne, slow);
1416  // Load next context in chain.
1417  __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1418  __ b(&loop);
1419  __ bind(&fast);
1420  }
1421 
1422  __ ldr(r0, GlobalObjectOperand());
1423  __ mov(r2, Operand(var->name()));
1424  ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1425  ? NOT_CONTEXTUAL
1426  : CONTEXTUAL;
1427  CallLoadIC(mode);
1428 }
1429 
1430 
1431 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1432  Label* slow) {
1433  ASSERT(var->IsContextSlot());
1434  Register context = cp;
1435  Register next = r3;
1436  Register temp = r4;
1437 
1438  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1439  if (s->num_heap_slots() > 0) {
1440  if (s->calls_sloppy_eval()) {
1441  // Check that extension is NULL.
1442  __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1443  __ tst(temp, temp);
1444  __ b(ne, slow);
1445  }
1446  __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1447  // Walk the rest of the chain without clobbering cp.
1448  context = next;
1449  }
1450  }
1451  // Check that last extension is NULL.
1452  __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1453  __ tst(temp, temp);
1454  __ b(ne, slow);
1455 
1456  // This function is used only for loads, not stores, so it's safe to
1457  // return an cp-based operand (the write barrier cannot be allowed to
1458  // destroy the cp register).
1459  return ContextOperand(context, var->index());
1460 }
1461 
1462 
1463 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1464  TypeofState typeof_state,
1465  Label* slow,
1466  Label* done) {
1467  // Generate fast-case code for variables that might be shadowed by
1468  // eval-introduced variables. Eval is used a lot without
1469  // introducing variables. In those cases, we do not want to
1470  // perform a runtime call for all variables in the scope
1471  // containing the eval.
1472  if (var->mode() == DYNAMIC_GLOBAL) {
1473  EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1474  __ jmp(done);
1475  } else if (var->mode() == DYNAMIC_LOCAL) {
1476  Variable* local = var->local_if_not_shadowed();
1477  __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1478  if (local->mode() == LET || local->mode() == CONST ||
1479  local->mode() == CONST_LEGACY) {
1480  __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1481  if (local->mode() == CONST_LEGACY) {
1482  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1483  } else { // LET || CONST
1484  __ b(ne, done);
1485  __ mov(r0, Operand(var->name()));
1486  __ push(r0);
1487  __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1488  }
1489  }
1490  __ jmp(done);
1491  }
1492 }
1493 
1494 
1495 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1496  // Record position before possible IC call.
1497  SetSourcePosition(proxy->position());
1498  Variable* var = proxy->var();
1499 
1500  // Three cases: global variables, lookup variables, and all other types of
1501  // variables.
1502  switch (var->location()) {
1503  case Variable::UNALLOCATED: {
1504  Comment cmnt(masm_, "[ Global variable");
1505  // Use inline caching. Variable name is passed in r2 and the global
1506  // object (receiver) in r0.
1507  __ ldr(r0, GlobalObjectOperand());
1508  __ mov(r2, Operand(var->name()));
1509  CallLoadIC(CONTEXTUAL);
1510  context()->Plug(r0);
1511  break;
1512  }
1513 
1514  case Variable::PARAMETER:
1515  case Variable::LOCAL:
1516  case Variable::CONTEXT: {
1517  Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1518  : "[ Stack variable");
1519  if (var->binding_needs_init()) {
1520  // var->scope() may be NULL when the proxy is located in eval code and
1521  // refers to a potential outside binding. Currently those bindings are
1522  // always looked up dynamically, i.e. in that case
1523  // var->location() == LOOKUP.
1524  // always holds.
1525  ASSERT(var->scope() != NULL);
1526 
1527  // Check if the binding really needs an initialization check. The check
1528  // can be skipped in the following situation: we have a LET or CONST
1529  // binding in harmony mode, both the Variable and the VariableProxy have
1530  // the same declaration scope (i.e. they are both in global code, in the
1531  // same function or in the same eval code) and the VariableProxy is in
1532  // the source physically located after the initializer of the variable.
1533  //
1534  // We cannot skip any initialization checks for CONST in non-harmony
1535  // mode because const variables may be declared but never initialized:
1536  // if (false) { const x; }; var y = x;
1537  //
1538  // The condition on the declaration scopes is a conservative check for
1539  // nested functions that access a binding and are called before the
1540  // binding is initialized:
1541  // function() { f(); let x = 1; function f() { x = 2; } }
1542  //
1543  bool skip_init_check;
1544  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1545  skip_init_check = false;
1546  } else {
1547  // Check that we always have valid source position.
1548  ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1549  ASSERT(proxy->position() != RelocInfo::kNoPosition);
1550  skip_init_check = var->mode() != CONST_LEGACY &&
1551  var->initializer_position() < proxy->position();
1552  }
1553 
1554  if (!skip_init_check) {
1555  // Let and const need a read barrier.
1556  GetVar(r0, var);
1557  __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1558  if (var->mode() == LET || var->mode() == CONST) {
1559  // Throw a reference error when using an uninitialized let/const
1560  // binding in harmony mode.
1561  Label done;
1562  __ b(ne, &done);
1563  __ mov(r0, Operand(var->name()));
1564  __ push(r0);
1565  __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1566  __ bind(&done);
1567  } else {
1568  // Uninitalized const bindings outside of harmony mode are unholed.
1569  ASSERT(var->mode() == CONST_LEGACY);
1570  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1571  }
1572  context()->Plug(r0);
1573  break;
1574  }
1575  }
1576  context()->Plug(var);
1577  break;
1578  }
1579 
1580  case Variable::LOOKUP: {
1581  Comment cmnt(masm_, "[ Lookup variable");
1582  Label done, slow;
1583  // Generate code for loading from variables potentially shadowed
1584  // by eval-introduced variables.
1585  EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1586  __ bind(&slow);
1587  __ mov(r1, Operand(var->name()));
1588  __ Push(cp, r1); // Context and name.
1589  __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1590  __ bind(&done);
1591  context()->Plug(r0);
1592  }
1593  }
1594 }
1595 
1596 
1597 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1598  Comment cmnt(masm_, "[ RegExpLiteral");
1599  Label materialized;
1600  // Registers will be used as follows:
1601  // r5 = materialized value (RegExp literal)
1602  // r4 = JS function, literals array
1603  // r3 = literal index
1604  // r2 = RegExp pattern
1605  // r1 = RegExp flags
1606  // r0 = RegExp literal clone
1609  int literal_offset =
1610  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1611  __ ldr(r5, FieldMemOperand(r4, literal_offset));
1612  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1613  __ cmp(r5, ip);
1614  __ b(ne, &materialized);
1615 
1616  // Create regexp literal using runtime function.
1617  // Result will be in r0.
1618  __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1619  __ mov(r2, Operand(expr->pattern()));
1620  __ mov(r1, Operand(expr->flags()));
1621  __ Push(r4, r3, r2, r1);
1622  __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1623  __ mov(r5, r0);
1624 
1625  __ bind(&materialized);
1627  Label allocated, runtime_allocate;
1628  __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1629  __ jmp(&allocated);
1630 
1631  __ bind(&runtime_allocate);
1632  __ mov(r0, Operand(Smi::FromInt(size)));
1633  __ Push(r5, r0);
1634  __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1635  __ pop(r5);
1636 
1637  __ bind(&allocated);
1638  // After this, registers are used as follows:
1639  // r0: Newly allocated regexp.
1640  // r5: Materialized regexp.
1641  // r2: temp.
1642  __ CopyFields(r0, r5, d0, size / kPointerSize);
1643  context()->Plug(r0);
1644 }
1645 
1646 
1647 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1648  if (expression == NULL) {
1649  __ LoadRoot(r1, Heap::kNullValueRootIndex);
1650  __ push(r1);
1651  } else {
1652  VisitForStackValue(expression);
1653  }
1654 }
1655 
1656 
1657 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1658  Comment cmnt(masm_, "[ ObjectLiteral");
1659 
1660  expr->BuildConstantProperties(isolate());
1661  Handle<FixedArray> constant_properties = expr->constant_properties();
1664  __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1665  __ mov(r1, Operand(constant_properties));
1666  int flags = expr->fast_elements()
1667  ? ObjectLiteral::kFastElements
1668  : ObjectLiteral::kNoFlags;
1669  flags |= expr->has_function()
1670  ? ObjectLiteral::kHasFunction
1671  : ObjectLiteral::kNoFlags;
1672  __ mov(r0, Operand(Smi::FromInt(flags)));
1673  int properties_count = constant_properties->length() / 2;
1674  if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() ||
1675  flags != ObjectLiteral::kFastElements ||
1677  __ Push(r3, r2, r1, r0);
1678  __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1679  } else {
1680  FastCloneShallowObjectStub stub(properties_count);
1681  __ CallStub(&stub);
1682  }
1683 
1684  // If result_saved is true the result is on top of the stack. If
1685  // result_saved is false the result is in r0.
1686  bool result_saved = false;
1687 
1688  // Mark all computed expressions that are bound to a key that
1689  // is shadowed by a later occurrence of the same key. For the
1690  // marked expressions, no store code is emitted.
1691  expr->CalculateEmitStore(zone());
1692 
1693  AccessorTable accessor_table(zone());
1694  for (int i = 0; i < expr->properties()->length(); i++) {
1695  ObjectLiteral::Property* property = expr->properties()->at(i);
1696  if (property->IsCompileTimeValue()) continue;
1697 
1698  Literal* key = property->key();
1699  Expression* value = property->value();
1700  if (!result_saved) {
1701  __ push(r0); // Save result on stack
1702  result_saved = true;
1703  }
1704  switch (property->kind()) {
1706  UNREACHABLE();
1707  case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1708  ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1709  // Fall through.
1710  case ObjectLiteral::Property::COMPUTED:
1711  if (key->value()->IsInternalizedString()) {
1712  if (property->emit_store()) {
1713  VisitForAccumulatorValue(value);
1714  __ mov(r2, Operand(key->value()));
1715  __ ldr(r1, MemOperand(sp));
1716  CallStoreIC(key->LiteralFeedbackId());
1717  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1718  } else {
1719  VisitForEffect(value);
1720  }
1721  break;
1722  }
1723  // Duplicate receiver on stack.
1724  __ ldr(r0, MemOperand(sp));
1725  __ push(r0);
1726  VisitForStackValue(key);
1727  VisitForStackValue(value);
1728  if (property->emit_store()) {
1729  __ mov(r0, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1730  __ push(r0);
1731  __ CallRuntime(Runtime::kSetProperty, 4);
1732  } else {
1733  __ Drop(3);
1734  }
1735  break;
1736  case ObjectLiteral::Property::PROTOTYPE:
1737  // Duplicate receiver on stack.
1738  __ ldr(r0, MemOperand(sp));
1739  __ push(r0);
1740  VisitForStackValue(value);
1741  if (property->emit_store()) {
1742  __ CallRuntime(Runtime::kSetPrototype, 2);
1743  } else {
1744  __ Drop(2);
1745  }
1746  break;
1747 
1748  case ObjectLiteral::Property::GETTER:
1749  accessor_table.lookup(key)->second->getter = value;
1750  break;
1751  case ObjectLiteral::Property::SETTER:
1752  accessor_table.lookup(key)->second->setter = value;
1753  break;
1754  }
1755  }
1756 
1757  // Emit code to define accessors, using only a single call to the runtime for
1758  // each pair of corresponding getters and setters.
1759  for (AccessorTable::Iterator it = accessor_table.begin();
1760  it != accessor_table.end();
1761  ++it) {
1762  __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1763  __ push(r0);
1764  VisitForStackValue(it->first);
1765  EmitAccessor(it->second->getter);
1766  EmitAccessor(it->second->setter);
1767  __ mov(r0, Operand(Smi::FromInt(NONE)));
1768  __ push(r0);
1769  __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1770  }
1771 
1772  if (expr->has_function()) {
1773  ASSERT(result_saved);
1774  __ ldr(r0, MemOperand(sp));
1775  __ push(r0);
1776  __ CallRuntime(Runtime::kToFastProperties, 1);
1777  }
1778 
1779  if (result_saved) {
1780  context()->PlugTOS();
1781  } else {
1782  context()->Plug(r0);
1783  }
1784 }
1785 
1786 
1787 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1788  Comment cmnt(masm_, "[ ArrayLiteral");
1789 
1790  expr->BuildConstantElements(isolate());
1791  int flags = expr->depth() == 1
1792  ? ArrayLiteral::kShallowElements
1793  : ArrayLiteral::kNoFlags;
1794 
1795  ZoneList<Expression*>* subexprs = expr->values();
1796  int length = subexprs->length();
1797  Handle<FixedArray> constant_elements = expr->constant_elements();
1798  ASSERT_EQ(2, constant_elements->length());
1799  ElementsKind constant_elements_kind =
1800  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1801  bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1802  Handle<FixedArrayBase> constant_elements_values(
1803  FixedArrayBase::cast(constant_elements->get(1)));
1804 
1805  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1806  if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1807  // If the only customer of allocation sites is transitioning, then
1808  // we can turn it off if we don't have anywhere else to transition to.
1809  allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1810  }
1811 
1814  __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1815  __ mov(r1, Operand(constant_elements));
1816  if (has_fast_elements && constant_elements_values->map() ==
1817  isolate()->heap()->fixed_cow_array_map()) {
1818  FastCloneShallowArrayStub stub(
1820  allocation_site_mode,
1821  length);
1822  __ CallStub(&stub);
1823  __ IncrementCounter(
1824  isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
1825  } else if (expr->depth() > 1 || Serializer::enabled() ||
1827  __ mov(r0, Operand(Smi::FromInt(flags)));
1828  __ Push(r3, r2, r1, r0);
1829  __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1830  } else {
1831  ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1832  FLAG_smi_only_arrays);
1835 
1836  if (has_fast_elements) {
1838  }
1839 
1840  FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1841  __ CallStub(&stub);
1842  }
1843 
1844  bool result_saved = false; // Is the result saved to the stack?
1845 
1846  // Emit code to evaluate all the non-constant subexpressions and to store
1847  // them into the newly cloned array.
1848  for (int i = 0; i < length; i++) {
1849  Expression* subexpr = subexprs->at(i);
1850  // If the subexpression is a literal or a simple materialized literal it
1851  // is already set in the cloned array.
1852  if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1853 
1854  if (!result_saved) {
1855  __ push(r0);
1856  __ Push(Smi::FromInt(expr->literal_index()));
1857  result_saved = true;
1858  }
1859  VisitForAccumulatorValue(subexpr);
1860 
1861  if (IsFastObjectElementsKind(constant_elements_kind)) {
1862  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1863  __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1865  __ str(result_register(), FieldMemOperand(r1, offset));
1866  // Update the write barrier for the array store.
1867  __ RecordWriteField(r1, offset, result_register(), r2,
1870  } else {
1871  __ mov(r3, Operand(Smi::FromInt(i)));
1872  StoreArrayLiteralElementStub stub;
1873  __ CallStub(&stub);
1874  }
1875 
1876  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1877  }
1878 
1879  if (result_saved) {
1880  __ pop(); // literal index
1881  context()->PlugTOS();
1882  } else {
1883  context()->Plug(r0);
1884  }
1885 }
1886 
1887 
1888 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1889  ASSERT(expr->target()->IsValidLeftHandSide());
1890 
1891  Comment cmnt(masm_, "[ Assignment");
1892 
1893  // Left-hand side can only be a property, a global or a (parameter or local)
1894  // slot.
1895  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1896  LhsKind assign_type = VARIABLE;
1897  Property* property = expr->target()->AsProperty();
1898  if (property != NULL) {
1899  assign_type = (property->key()->IsPropertyName())
1900  ? NAMED_PROPERTY
1901  : KEYED_PROPERTY;
1902  }
1903 
1904  // Evaluate LHS expression.
1905  switch (assign_type) {
1906  case VARIABLE:
1907  // Nothing to do here.
1908  break;
1909  case NAMED_PROPERTY:
1910  if (expr->is_compound()) {
1911  // We need the receiver both on the stack and in the accumulator.
1912  VisitForAccumulatorValue(property->obj());
1913  __ push(result_register());
1914  } else {
1915  VisitForStackValue(property->obj());
1916  }
1917  break;
1918  case KEYED_PROPERTY:
1919  if (expr->is_compound()) {
1920  VisitForStackValue(property->obj());
1921  VisitForAccumulatorValue(property->key());
1922  __ ldr(r1, MemOperand(sp, 0));
1923  __ push(r0);
1924  } else {
1925  VisitForStackValue(property->obj());
1926  VisitForStackValue(property->key());
1927  }
1928  break;
1929  }
1930 
1931  // For compound assignments we need another deoptimization point after the
1932  // variable/property load.
1933  if (expr->is_compound()) {
1934  { AccumulatorValueContext context(this);
1935  switch (assign_type) {
1936  case VARIABLE:
1937  EmitVariableLoad(expr->target()->AsVariableProxy());
1938  PrepareForBailout(expr->target(), TOS_REG);
1939  break;
1940  case NAMED_PROPERTY:
1941  EmitNamedPropertyLoad(property);
1942  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1943  break;
1944  case KEYED_PROPERTY:
1945  EmitKeyedPropertyLoad(property);
1946  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1947  break;
1948  }
1949  }
1950 
1951  Token::Value op = expr->binary_op();
1952  __ push(r0); // Left operand goes on the stack.
1953  VisitForAccumulatorValue(expr->value());
1954 
1955  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1956  ? OVERWRITE_RIGHT
1957  : NO_OVERWRITE;
1958  SetSourcePosition(expr->position() + 1);
1959  AccumulatorValueContext context(this);
1960  if (ShouldInlineSmiCase(op)) {
1961  EmitInlineSmiBinaryOp(expr->binary_operation(),
1962  op,
1963  mode,
1964  expr->target(),
1965  expr->value());
1966  } else {
1967  EmitBinaryOp(expr->binary_operation(), op, mode);
1968  }
1969 
1970  // Deoptimization point in case the binary operation may have side effects.
1971  PrepareForBailout(expr->binary_operation(), TOS_REG);
1972  } else {
1973  VisitForAccumulatorValue(expr->value());
1974  }
1975 
1976  // Record source position before possible IC call.
1977  SetSourcePosition(expr->position());
1978 
1979  // Store the value.
1980  switch (assign_type) {
1981  case VARIABLE:
1982  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1983  expr->op());
1984  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1985  context()->Plug(r0);
1986  break;
1987  case NAMED_PROPERTY:
1988  EmitNamedPropertyAssignment(expr);
1989  break;
1990  case KEYED_PROPERTY:
1991  EmitKeyedPropertyAssignment(expr);
1992  break;
1993  }
1994 }
1995 
1996 
1997 void FullCodeGenerator::VisitYield(Yield* expr) {
1998  Comment cmnt(masm_, "[ Yield");
1999  // Evaluate yielded value first; the initial iterator definition depends on
2000  // this. It stays on the stack while we update the iterator.
2001  VisitForStackValue(expr->expression());
2002 
2003  switch (expr->yield_kind()) {
2004  case Yield::SUSPEND:
2005  // Pop value from top-of-stack slot; box result into result register.
2006  EmitCreateIteratorResult(false);
2007  __ push(result_register());
2008  // Fall through.
2009  case Yield::INITIAL: {
2010  Label suspend, continuation, post_runtime, resume;
2011 
2012  __ jmp(&suspend);
2013 
2014  __ bind(&continuation);
2015  __ jmp(&resume);
2016 
2017  __ bind(&suspend);
2018  VisitForAccumulatorValue(expr->generator_object());
2019  ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2020  __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
2023  __ mov(r1, cp);
2024  __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2027  __ cmp(sp, r1);
2028  __ b(eq, &post_runtime);
2029  __ push(r0); // generator object
2030  __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2032  __ bind(&post_runtime);
2033  __ pop(result_register());
2034  EmitReturnSequence();
2035 
2036  __ bind(&resume);
2037  context()->Plug(result_register());
2038  break;
2039  }
2040 
2041  case Yield::FINAL: {
2042  VisitForAccumulatorValue(expr->generator_object());
2044  __ str(r1, FieldMemOperand(result_register(),
2046  // Pop value from top-of-stack slot, box result into result register.
2047  EmitCreateIteratorResult(true);
2048  EmitUnwindBeforeReturn();
2049  EmitReturnSequence();
2050  break;
2051  }
2052 
2053  case Yield::DELEGATING: {
2054  VisitForStackValue(expr->generator_object());
2055 
2056  // Initial stack layout is as follows:
2057  // [sp + 1 * kPointerSize] iter
2058  // [sp + 0 * kPointerSize] g
2059 
2060  Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2061  Label l_next, l_call, l_loop;
2062  // Initial send value is undefined.
2063  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2064  __ b(&l_next);
2065 
2066  // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2067  __ bind(&l_catch);
2068  handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2069  __ LoadRoot(r2, Heap::kthrow_stringRootIndex); // "throw"
2070  __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2071  __ Push(r2, r3, r0); // "throw", iter, except
2072  __ jmp(&l_call);
2073 
2074  // try { received = %yield result }
2075  // Shuffle the received result above a try handler and yield it without
2076  // re-boxing.
2077  __ bind(&l_try);
2078  __ pop(r0); // result
2079  __ PushTryHandler(StackHandler::CATCH, expr->index());
2080  const int handler_size = StackHandlerConstants::kSize;
2081  __ push(r0); // result
2082  __ jmp(&l_suspend);
2083  __ bind(&l_continuation);
2084  __ jmp(&l_resume);
2085  __ bind(&l_suspend);
2086  const int generator_object_depth = kPointerSize + handler_size;
2087  __ ldr(r0, MemOperand(sp, generator_object_depth));
2088  __ push(r0); // g
2089  ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2090  __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2093  __ mov(r1, cp);
2094  __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2096  __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2098  __ pop(r0); // result
2099  EmitReturnSequence();
2100  __ bind(&l_resume); // received in r0
2101  __ PopTryHandler();
2102 
2103  // receiver = iter; f = 'next'; arg = received;
2104  __ bind(&l_next);
2105  __ LoadRoot(r2, Heap::knext_stringRootIndex); // "next"
2106  __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2107  __ Push(r2, r3, r0); // "next", iter, received
2108 
2109  // result = receiver[f](arg);
2110  __ bind(&l_call);
2111  __ ldr(r1, MemOperand(sp, kPointerSize));
2112  __ ldr(r0, MemOperand(sp, 2 * kPointerSize));
2113  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2114  CallIC(ic, TypeFeedbackId::None());
2115  __ mov(r1, r0);
2116  __ str(r1, MemOperand(sp, 2 * kPointerSize));
2117  CallFunctionStub stub(1, CALL_AS_METHOD);
2118  __ CallStub(&stub);
2119 
2121  __ Drop(1); // The function is still on the stack; drop it.
2122 
2123  // if (!result.done) goto l_try;
2124  __ bind(&l_loop);
2125  __ push(r0); // save result
2126  __ LoadRoot(r2, Heap::kdone_stringRootIndex); // "done"
2127  CallLoadIC(NOT_CONTEXTUAL); // result.done in r0
2128  Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2129  CallIC(bool_ic);
2130  __ cmp(r0, Operand(0));
2131  __ b(eq, &l_try);
2132 
2133  // result.value
2134  __ pop(r0); // result
2135  __ LoadRoot(r2, Heap::kvalue_stringRootIndex); // "value"
2136  CallLoadIC(NOT_CONTEXTUAL); // result.value in r0
2137  context()->DropAndPlug(2, r0); // drop iter and g
2138  break;
2139  }
2140  }
2141 }
2142 
2143 
2144 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2145  Expression *value,
2146  JSGeneratorObject::ResumeMode resume_mode) {
2147  // The value stays in r0, and is ultimately read by the resumed generator, as
2148  // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
2149  // is read to throw the value when the resumed generator is already closed.
2150  // r1 will hold the generator object until the activation has been resumed.
2151  VisitForStackValue(generator);
2152  VisitForAccumulatorValue(value);
2153  __ pop(r1);
2154 
2155  // Check generator state.
2156  Label wrong_state, closed_state, done;
2160  __ cmp(r3, Operand(Smi::FromInt(0)));
2161  __ b(eq, &closed_state);
2162  __ b(lt, &wrong_state);
2163 
2164  // Load suspended function and context.
2167 
2168  // Load receiver and store as the first argument.
2170  __ push(r2);
2171 
2172  // Push holes for the rest of the arguments to the generator function.
2174  __ ldr(r3,
2176  __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2177  Label push_argument_holes, push_frame;
2178  __ bind(&push_argument_holes);
2179  __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2180  __ b(mi, &push_frame);
2181  __ push(r2);
2182  __ jmp(&push_argument_holes);
2183 
2184  // Enter a new JavaScript frame, and initialize its slots as they were when
2185  // the generator was suspended.
2186  Label resume_frame;
2187  __ bind(&push_frame);
2188  __ bl(&resume_frame);
2189  __ jmp(&done);
2190  __ bind(&resume_frame);
2191  // lr = return address.
2192  // fp = caller's frame pointer.
2193  // pp = caller's constant pool (if FLAG_enable_ool_constant_pool),
2194  // cp = callee's context,
2195  // r4 = callee's JS function.
2196  __ PushFixedFrame(r4);
2197  // Adjust FP to point to saved FP.
2199 
2200  // Load the operand stack size.
2203  __ SmiUntag(r3);
2204 
2205  // If we are sending a value and there is no operand stack, we can jump back
2206  // in directly.
2207  if (resume_mode == JSGeneratorObject::NEXT) {
2208  Label slow_resume;
2209  __ cmp(r3, Operand(0));
2210  __ b(ne, &slow_resume);
2212 
2213  { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2214  if (FLAG_enable_ool_constant_pool) {
2215  // Load the new code object's constant pool pointer.
2216  __ ldr(pp,
2218  }
2219 
2221  __ SmiUntag(r2);
2222  __ add(r3, r3, r2);
2225  __ Jump(r3);
2226  }
2227  __ bind(&slow_resume);
2228  }
2229 
2230  // Otherwise, we push holes for the operand stack and call the runtime to fix
2231  // up the stack and the handlers.
2232  Label push_operand_holes, call_resume;
2233  __ bind(&push_operand_holes);
2234  __ sub(r3, r3, Operand(1), SetCC);
2235  __ b(mi, &call_resume);
2236  __ push(r2);
2237  __ b(&push_operand_holes);
2238  __ bind(&call_resume);
2239  ASSERT(!result_register().is(r1));
2240  __ Push(r1, result_register());
2241  __ Push(Smi::FromInt(resume_mode));
2242  __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
2243  // Not reached: the runtime call returns elsewhere.
2244  __ stop("not-reached");
2245 
2246  // Reach here when generator is closed.
2247  __ bind(&closed_state);
2248  if (resume_mode == JSGeneratorObject::NEXT) {
2249  // Return completed iterator result when generator is closed.
2250  __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2251  __ push(r2);
2252  // Pop value from top-of-stack slot; box result into result register.
2253  EmitCreateIteratorResult(true);
2254  } else {
2255  // Throw the provided value.
2256  __ push(r0);
2257  __ CallRuntime(Runtime::kHiddenThrow, 1);
2258  }
2259  __ jmp(&done);
2260 
2261  // Throw error if we attempt to operate on a running generator.
2262  __ bind(&wrong_state);
2263  __ push(r1);
2264  __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
2265 
2266  __ bind(&done);
2267  context()->Plug(result_register());
2268 }
2269 
2270 
2271 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2272  Label gc_required;
2273  Label allocated;
2274 
2275  Handle<Map> map(isolate()->native_context()->generator_result_map());
2276 
2277  __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT);
2278  __ jmp(&allocated);
2279 
2280  __ bind(&gc_required);
2281  __ Push(Smi::FromInt(map->instance_size()));
2282  __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
2283  __ ldr(context_register(),
2285 
2286  __ bind(&allocated);
2287  __ mov(r1, Operand(map));
2288  __ pop(r2);
2289  __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2290  __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2291  ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2295  __ str(r2,
2297  __ str(r3,
2299 
2300  // Only the value field needs a write barrier, as the other values are in the
2301  // root set.
2303  r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2304 }
2305 
2306 
2307 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2308  SetSourcePosition(prop->position());
2309  Literal* key = prop->key()->AsLiteral();
2310  __ mov(r2, Operand(key->value()));
2311  // Call load IC. It has arguments receiver and property name r0 and r2.
2312  CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2313 }
2314 
2315 
2316 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2317  SetSourcePosition(prop->position());
2318  // Call keyed load IC. It has arguments key and receiver in r0 and r1.
2319  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2320  CallIC(ic, prop->PropertyFeedbackId());
2321 }
2322 
2323 
2324 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2325  Token::Value op,
2326  OverwriteMode mode,
2327  Expression* left_expr,
2328  Expression* right_expr) {
2329  Label done, smi_case, stub_call;
2330 
2331  Register scratch1 = r2;
2332  Register scratch2 = r3;
2333 
2334  // Get the arguments.
2335  Register left = r1;
2336  Register right = r0;
2337  __ pop(left);
2338 
2339  // Perform combined smi check on both operands.
2340  __ orr(scratch1, left, Operand(right));
2341  STATIC_ASSERT(kSmiTag == 0);
2342  JumpPatchSite patch_site(masm_);
2343  patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2344 
2345  __ bind(&stub_call);
2346  BinaryOpICStub stub(op, mode);
2347  CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2348  patch_site.EmitPatchInfo();
2349  __ jmp(&done);
2350 
2351  __ bind(&smi_case);
2352  // Smi case. This code works the same way as the smi-smi case in the type
2353  // recording binary operation stub, see
2354  switch (op) {
2355  case Token::SAR:
2356  __ GetLeastBitsFromSmi(scratch1, right, 5);
2357  __ mov(right, Operand(left, ASR, scratch1));
2358  __ bic(right, right, Operand(kSmiTagMask));
2359  break;
2360  case Token::SHL: {
2361  __ SmiUntag(scratch1, left);
2362  __ GetLeastBitsFromSmi(scratch2, right, 5);
2363  __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2364  __ TrySmiTag(right, scratch1, &stub_call);
2365  break;
2366  }
2367  case Token::SHR: {
2368  __ SmiUntag(scratch1, left);
2369  __ GetLeastBitsFromSmi(scratch2, right, 5);
2370  __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2371  __ tst(scratch1, Operand(0xc0000000));
2372  __ b(ne, &stub_call);
2373  __ SmiTag(right, scratch1);
2374  break;
2375  }
2376  case Token::ADD:
2377  __ add(scratch1, left, Operand(right), SetCC);
2378  __ b(vs, &stub_call);
2379  __ mov(right, scratch1);
2380  break;
2381  case Token::SUB:
2382  __ sub(scratch1, left, Operand(right), SetCC);
2383  __ b(vs, &stub_call);
2384  __ mov(right, scratch1);
2385  break;
2386  case Token::MUL: {
2387  __ SmiUntag(ip, right);
2388  __ smull(scratch1, scratch2, left, ip);
2389  __ mov(ip, Operand(scratch1, ASR, 31));
2390  __ cmp(ip, Operand(scratch2));
2391  __ b(ne, &stub_call);
2392  __ cmp(scratch1, Operand::Zero());
2393  __ mov(right, Operand(scratch1), LeaveCC, ne);
2394  __ b(ne, &done);
2395  __ add(scratch2, right, Operand(left), SetCC);
2396  __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2397  __ b(mi, &stub_call);
2398  break;
2399  }
2400  case Token::BIT_OR:
2401  __ orr(right, left, Operand(right));
2402  break;
2403  case Token::BIT_AND:
2404  __ and_(right, left, Operand(right));
2405  break;
2406  case Token::BIT_XOR:
2407  __ eor(right, left, Operand(right));
2408  break;
2409  default:
2410  UNREACHABLE();
2411  }
2412 
2413  __ bind(&done);
2414  context()->Plug(r0);
2415 }
2416 
2417 
2418 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2419  Token::Value op,
2420  OverwriteMode mode) {
2421  __ pop(r1);
2422  BinaryOpICStub stub(op, mode);
2423  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2424  CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2425  patch_site.EmitPatchInfo();
2426  context()->Plug(r0);
2427 }
2428 
2429 
2430 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2431  ASSERT(expr->IsValidLeftHandSide());
2432 
2433  // Left-hand side can only be a property, a global or a (parameter or local)
2434  // slot.
2435  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2436  LhsKind assign_type = VARIABLE;
2437  Property* prop = expr->AsProperty();
2438  if (prop != NULL) {
2439  assign_type = (prop->key()->IsPropertyName())
2440  ? NAMED_PROPERTY
2441  : KEYED_PROPERTY;
2442  }
2443 
2444  switch (assign_type) {
2445  case VARIABLE: {
2446  Variable* var = expr->AsVariableProxy()->var();
2447  EffectContext context(this);
2448  EmitVariableAssignment(var, Token::ASSIGN);
2449  break;
2450  }
2451  case NAMED_PROPERTY: {
2452  __ push(r0); // Preserve value.
2453  VisitForAccumulatorValue(prop->obj());
2454  __ mov(r1, r0);
2455  __ pop(r0); // Restore value.
2456  __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
2457  CallStoreIC();
2458  break;
2459  }
2460  case KEYED_PROPERTY: {
2461  __ push(r0); // Preserve value.
2462  VisitForStackValue(prop->obj());
2463  VisitForAccumulatorValue(prop->key());
2464  __ mov(r1, r0);
2465  __ Pop(r0, r2); // r0 = restored value.
2466  Handle<Code> ic = strict_mode() == SLOPPY
2467  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2468  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2469  CallIC(ic);
2470  break;
2471  }
2472  }
2473  context()->Plug(r0);
2474 }
2475 
2476 
2477 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2478  Variable* var, MemOperand location) {
2479  __ str(result_register(), location);
2480  if (var->IsContextSlot()) {
2481  // RecordWrite may destroy all its register arguments.
2482  __ mov(r3, result_register());
2483  int offset = Context::SlotOffset(var->index());
2484  __ RecordWriteContextSlot(
2485  r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2486  }
2487 }
2488 
2489 
2490 void FullCodeGenerator::EmitCallStoreContextSlot(
2491  Handle<String> name, StrictMode strict_mode) {
2492  __ push(r0); // Value.
2493  __ mov(r1, Operand(name));
2494  __ mov(r0, Operand(Smi::FromInt(strict_mode)));
2495  __ Push(cp, r1, r0); // Context, name, strict mode.
2496  __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2497 }
2498 
2499 
2500 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2501  if (var->IsUnallocated()) {
2502  // Global var, const, or let.
2503  __ mov(r2, Operand(var->name()));
2504  __ ldr(r1, GlobalObjectOperand());
2505  CallStoreIC();
2506 
2507  } else if (op == Token::INIT_CONST_LEGACY) {
2508  // Const initializers need a write barrier.
2509  ASSERT(!var->IsParameter()); // No const parameters.
2510  if (var->IsLookupSlot()) {
2511  __ push(r0);
2512  __ mov(r0, Operand(var->name()));
2513  __ Push(cp, r0); // Context and name.
2514  __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2515  } else {
2516  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2517  Label skip;
2518  MemOperand location = VarOperand(var, r1);
2519  __ ldr(r2, location);
2520  __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2521  __ b(ne, &skip);
2522  EmitStoreToStackLocalOrContextSlot(var, location);
2523  __ bind(&skip);
2524  }
2525 
2526  } else if (var->mode() == LET && op != Token::INIT_LET) {
2527  // Non-initializing assignment to let variable needs a write barrier.
2528  if (var->IsLookupSlot()) {
2529  EmitCallStoreContextSlot(var->name(), strict_mode());
2530  } else {
2531  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2532  Label assign;
2533  MemOperand location = VarOperand(var, r1);
2534  __ ldr(r3, location);
2535  __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2536  __ b(ne, &assign);
2537  __ mov(r3, Operand(var->name()));
2538  __ push(r3);
2539  __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2540  // Perform the assignment.
2541  __ bind(&assign);
2542  EmitStoreToStackLocalOrContextSlot(var, location);
2543  }
2544 
2545  } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2546  // Assignment to var or initializing assignment to let/const
2547  // in harmony mode.
2548  if (var->IsLookupSlot()) {
2549  EmitCallStoreContextSlot(var->name(), strict_mode());
2550  } else {
2551  ASSERT((var->IsStackAllocated() || var->IsContextSlot()));
2552  MemOperand location = VarOperand(var, r1);
2553  if (generate_debug_code_ && op == Token::INIT_LET) {
2554  // Check for an uninitialized let binding.
2555  __ ldr(r2, location);
2556  __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2557  __ Check(eq, kLetBindingReInitialization);
2558  }
2559  EmitStoreToStackLocalOrContextSlot(var, location);
2560  }
2561  }
2562  // Non-initializing assignments to consts are ignored.
2563 }
2564 
2565 
2566 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2567  // Assignment to a property, using a named store IC.
2568  Property* prop = expr->target()->AsProperty();
2569  ASSERT(prop != NULL);
2570  ASSERT(prop->key()->AsLiteral() != NULL);
2571 
2572  // Record source code position before IC call.
2573  SetSourcePosition(expr->position());
2574  __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
2575  __ pop(r1);
2576 
2577  CallStoreIC(expr->AssignmentFeedbackId());
2578 
2579  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2580  context()->Plug(r0);
2581 }
2582 
2583 
2584 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2585  // Assignment to a property, using a keyed store IC.
2586 
2587  // Record source code position before IC call.
2588  SetSourcePosition(expr->position());
2589  __ Pop(r2, r1); // r1 = key.
2590 
2591  Handle<Code> ic = strict_mode() == SLOPPY
2592  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2593  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2594  CallIC(ic, expr->AssignmentFeedbackId());
2595 
2596  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2597  context()->Plug(r0);
2598 }
2599 
2600 
2601 void FullCodeGenerator::VisitProperty(Property* expr) {
2602  Comment cmnt(masm_, "[ Property");
2603  Expression* key = expr->key();
2604 
2605  if (key->IsPropertyName()) {
2606  VisitForAccumulatorValue(expr->obj());
2607  EmitNamedPropertyLoad(expr);
2608  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2609  context()->Plug(r0);
2610  } else {
2611  VisitForStackValue(expr->obj());
2612  VisitForAccumulatorValue(expr->key());
2613  __ pop(r1);
2614  EmitKeyedPropertyLoad(expr);
2615  context()->Plug(r0);
2616  }
2617 }
2618 
2619 
2620 void FullCodeGenerator::CallIC(Handle<Code> code,
2621  TypeFeedbackId ast_id) {
2622  ic_total_count_++;
2623  // All calls must have a predictable size in full-codegen code to ensure that
2624  // the debugger can patch them correctly.
2625  __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2627 }
2628 
2629 
2630 // Code common for calls using the IC.
2631 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2632  Expression* callee = expr->expression();
2633  ZoneList<Expression*>* args = expr->arguments();
2634  int arg_count = args->length();
2635 
2637  // Get the target function.
2638  if (callee->IsVariableProxy()) {
2639  { StackValueContext context(this);
2640  EmitVariableLoad(callee->AsVariableProxy());
2641  PrepareForBailout(callee, NO_REGISTERS);
2642  }
2643  // Push undefined as receiver. This is patched in the method prologue if it
2644  // is a sloppy mode method.
2645  __ Push(isolate()->factory()->undefined_value());
2646  flags = NO_CALL_FUNCTION_FLAGS;
2647  } else {
2648  // Load the function from the receiver.
2649  ASSERT(callee->IsProperty());
2650  __ ldr(r0, MemOperand(sp, 0));
2651  EmitNamedPropertyLoad(callee->AsProperty());
2652  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2653  // Push the target function under the receiver.
2654  __ ldr(ip, MemOperand(sp, 0));
2655  __ push(ip);
2656  __ str(r0, MemOperand(sp, kPointerSize));
2657  flags = CALL_AS_METHOD;
2658  }
2659 
2660  // Load the arguments.
2661  { PreservePositionScope scope(masm()->positions_recorder());
2662  for (int i = 0; i < arg_count; i++) {
2663  VisitForStackValue(args->at(i));
2664  }
2665  }
2666 
2667  // Record source position for debugger.
2668  SetSourcePosition(expr->position());
2669  CallFunctionStub stub(arg_count, flags);
2670  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2671  __ CallStub(&stub);
2672 
2673  RecordJSReturnSite(expr);
2674 
2675  // Restore context register.
2677 
2678  context()->DropAndPlug(1, r0);
2679 }
2680 
2681 
2682 // Code common for calls using the IC.
2683 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2684  Expression* key) {
2685  // Load the key.
2686  VisitForAccumulatorValue(key);
2687 
2688  Expression* callee = expr->expression();
2689  ZoneList<Expression*>* args = expr->arguments();
2690  int arg_count = args->length();
2691 
2692  // Load the function from the receiver.
2693  ASSERT(callee->IsProperty());
2694  __ ldr(r1, MemOperand(sp, 0));
2695  EmitKeyedPropertyLoad(callee->AsProperty());
2696  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2697 
2698  // Push the target function under the receiver.
2699  __ ldr(ip, MemOperand(sp, 0));
2700  __ push(ip);
2701  __ str(r0, MemOperand(sp, kPointerSize));
2702 
2703  { PreservePositionScope scope(masm()->positions_recorder());
2704  for (int i = 0; i < arg_count; i++) {
2705  VisitForStackValue(args->at(i));
2706  }
2707  }
2708 
2709  // Record source position for debugger.
2710  SetSourcePosition(expr->position());
2711  CallFunctionStub stub(arg_count, CALL_AS_METHOD);
2712  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2713  __ CallStub(&stub);
2714 
2715  RecordJSReturnSite(expr);
2716  // Restore context register.
2718 
2719  context()->DropAndPlug(1, r0);
2720 }
2721 
2722 
2723 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2724  // Code common for calls using the call stub.
2725  ZoneList<Expression*>* args = expr->arguments();
2726  int arg_count = args->length();
2727  { PreservePositionScope scope(masm()->positions_recorder());
2728  for (int i = 0; i < arg_count; i++) {
2729  VisitForStackValue(args->at(i));
2730  }
2731  }
2732  // Record source position for debugger.
2733  SetSourcePosition(expr->position());
2734 
2735  Handle<Object> uninitialized =
2737  StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2738  __ Move(r2, FeedbackVector());
2739  __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
2740 
2741  // Record call targets in unoptimized code.
2742  CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2743  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2744  __ CallStub(&stub);
2745  RecordJSReturnSite(expr);
2746  // Restore context register.
2748  context()->DropAndPlug(1, r0);
2749 }
2750 
2751 
2752 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2753  // r4: copy of the first argument or undefined if it doesn't exist.
2754  if (arg_count > 0) {
2755  __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
2756  } else {
2757  __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2758  }
2759 
2760  // r3: the receiver of the enclosing function.
2761  int receiver_offset = 2 + info_->scope()->num_parameters();
2762  __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize));
2763 
2764  // r2: strict mode.
2765  __ mov(r2, Operand(Smi::FromInt(strict_mode())));
2766 
2767  // r1: the start position of the scope the calls resides in.
2768  __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2769 
2770  // Do the runtime call.
2771  __ Push(r4, r3, r2, r1);
2772  __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2773 }
2774 
2775 
2776 void FullCodeGenerator::VisitCall(Call* expr) {
2777 #ifdef DEBUG
2778  // We want to verify that RecordJSReturnSite gets called on all paths
2779  // through this function. Avoid early returns.
2780  expr->return_is_recorded_ = false;
2781 #endif
2782 
2783  Comment cmnt(masm_, "[ Call");
2784  Expression* callee = expr->expression();
2785  Call::CallType call_type = expr->GetCallType(isolate());
2786 
2787  if (call_type == Call::POSSIBLY_EVAL_CALL) {
2788  // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2789  // to resolve the function we need to call and the receiver of the
2790  // call. Then we call the resolved function using the given
2791  // arguments.
2792  ZoneList<Expression*>* args = expr->arguments();
2793  int arg_count = args->length();
2794 
2795  { PreservePositionScope pos_scope(masm()->positions_recorder());
2796  VisitForStackValue(callee);
2797  __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2798  __ push(r2); // Reserved receiver slot.
2799 
2800  // Push the arguments.
2801  for (int i = 0; i < arg_count; i++) {
2802  VisitForStackValue(args->at(i));
2803  }
2804 
2805  // Push a copy of the function (found below the arguments) and
2806  // resolve eval.
2807  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2808  __ push(r1);
2809  EmitResolvePossiblyDirectEval(arg_count);
2810 
2811  // The runtime call returns a pair of values in r0 (function) and
2812  // r1 (receiver). Touch up the stack with the right values.
2813  __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2814  __ str(r1, MemOperand(sp, arg_count * kPointerSize));
2815  }
2816 
2817  // Record source position for debugger.
2818  SetSourcePosition(expr->position());
2819  CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
2820  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2821  __ CallStub(&stub);
2822  RecordJSReturnSite(expr);
2823  // Restore context register.
2825  context()->DropAndPlug(1, r0);
2826  } else if (call_type == Call::GLOBAL_CALL) {
2827  EmitCallWithIC(expr);
2828 
2829  } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2830  // Call to a lookup slot (dynamically introduced variable).
2831  VariableProxy* proxy = callee->AsVariableProxy();
2832  Label slow, done;
2833 
2834  { PreservePositionScope scope(masm()->positions_recorder());
2835  // Generate code for loading from variables potentially shadowed
2836  // by eval-introduced variables.
2837  EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2838  }
2839 
2840  __ bind(&slow);
2841  // Call the runtime to find the function to call (returned in r0)
2842  // and the object holding it (returned in edx).
2843  ASSERT(!context_register().is(r2));
2844  __ mov(r2, Operand(proxy->name()));
2845  __ Push(context_register(), r2);
2846  __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2847  __ Push(r0, r1); // Function, receiver.
2848 
2849  // If fast case code has been generated, emit code to push the
2850  // function and receiver and have the slow path jump around this
2851  // code.
2852  if (done.is_linked()) {
2853  Label call;
2854  __ b(&call);
2855  __ bind(&done);
2856  // Push function.
2857  __ push(r0);
2858  // The receiver is implicitly the global receiver. Indicate this
2859  // by passing the hole to the call function stub.
2860  __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2861  __ push(r1);
2862  __ bind(&call);
2863  }
2864 
2865  // The receiver is either the global receiver or an object found
2866  // by LoadContextSlot.
2867  EmitCallWithStub(expr);
2868  } else if (call_type == Call::PROPERTY_CALL) {
2869  Property* property = callee->AsProperty();
2870  { PreservePositionScope scope(masm()->positions_recorder());
2871  VisitForStackValue(property->obj());
2872  }
2873  if (property->key()->IsPropertyName()) {
2874  EmitCallWithIC(expr);
2875  } else {
2876  EmitKeyedCallWithIC(expr, property->key());
2877  }
2878  } else {
2879  ASSERT(call_type == Call::OTHER_CALL);
2880  // Call to an arbitrary expression not handled specially above.
2881  { PreservePositionScope scope(masm()->positions_recorder());
2882  VisitForStackValue(callee);
2883  }
2884  __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2885  __ push(r1);
2886  // Emit function call.
2887  EmitCallWithStub(expr);
2888  }
2889 
2890 #ifdef DEBUG
2891  // RecordJSReturnSite should have been called.
2892  ASSERT(expr->return_is_recorded_);
2893 #endif
2894 }
2895 
2896 
2897 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2898  Comment cmnt(masm_, "[ CallNew");
2899  // According to ECMA-262, section 11.2.2, page 44, the function
2900  // expression in new calls must be evaluated before the
2901  // arguments.
2902 
2903  // Push constructor on the stack. If it's not a function it's used as
2904  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2905  // ignored.
2906  VisitForStackValue(expr->expression());
2907 
2908  // Push the arguments ("left-to-right") on the stack.
2909  ZoneList<Expression*>* args = expr->arguments();
2910  int arg_count = args->length();
2911  for (int i = 0; i < arg_count; i++) {
2912  VisitForStackValue(args->at(i));
2913  }
2914 
2915  // Call the construct call builtin that handles allocation and
2916  // constructor invocation.
2917  SetSourcePosition(expr->position());
2918 
2919  // Load function and argument count into r1 and r0.
2920  __ mov(r0, Operand(arg_count));
2921  __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2922 
2923  // Record call targets in unoptimized code.
2924  Handle<Object> uninitialized =
2926  StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2927  if (FLAG_pretenuring_call_new) {
2928  StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
2929  isolate()->factory()->NewAllocationSite());
2930  ASSERT(expr->AllocationSiteFeedbackSlot() ==
2931  expr->CallNewFeedbackSlot() + 1);
2932  }
2933 
2934  __ Move(r2, FeedbackVector());
2935  __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
2936 
2937  CallConstructStub stub(RECORD_CALL_TARGET);
2938  __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2939  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2940  context()->Plug(r0);
2941 }
2942 
2943 
2944 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2945  ZoneList<Expression*>* args = expr->arguments();
2946  ASSERT(args->length() == 1);
2947 
2948  VisitForAccumulatorValue(args->at(0));
2949 
2950  Label materialize_true, materialize_false;
2951  Label* if_true = NULL;
2952  Label* if_false = NULL;
2953  Label* fall_through = NULL;
2954  context()->PrepareTest(&materialize_true, &materialize_false,
2955  &if_true, &if_false, &fall_through);
2956 
2957  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2958  __ SmiTst(r0);
2959  Split(eq, if_true, if_false, fall_through);
2960 
2961  context()->Plug(if_true, if_false);
2962 }
2963 
2964 
2965 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2966  ZoneList<Expression*>* args = expr->arguments();
2967  ASSERT(args->length() == 1);
2968 
2969  VisitForAccumulatorValue(args->at(0));
2970 
2971  Label materialize_true, materialize_false;
2972  Label* if_true = NULL;
2973  Label* if_false = NULL;
2974  Label* fall_through = NULL;
2975  context()->PrepareTest(&materialize_true, &materialize_false,
2976  &if_true, &if_false, &fall_through);
2977 
2978  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2979  __ NonNegativeSmiTst(r0);
2980  Split(eq, if_true, if_false, fall_through);
2981 
2982  context()->Plug(if_true, if_false);
2983 }
2984 
2985 
2986 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2987  ZoneList<Expression*>* args = expr->arguments();
2988  ASSERT(args->length() == 1);
2989 
2990  VisitForAccumulatorValue(args->at(0));
2991 
2992  Label materialize_true, materialize_false;
2993  Label* if_true = NULL;
2994  Label* if_false = NULL;
2995  Label* fall_through = NULL;
2996  context()->PrepareTest(&materialize_true, &materialize_false,
2997  &if_true, &if_false, &fall_through);
2998 
2999  __ JumpIfSmi(r0, if_false);
3000  __ LoadRoot(ip, Heap::kNullValueRootIndex);
3001  __ cmp(r0, ip);
3002  __ b(eq, if_true);
3004  // Undetectable objects behave like undefined when tested with typeof.
3006  __ tst(r1, Operand(1 << Map::kIsUndetectable));
3007  __ b(ne, if_false);
3009  __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3010  __ b(lt, if_false);
3011  __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3012  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3013  Split(le, if_true, if_false, fall_through);
3014 
3015  context()->Plug(if_true, if_false);
3016 }
3017 
3018 
3019 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3020  ZoneList<Expression*>* args = expr->arguments();
3021  ASSERT(args->length() == 1);
3022 
3023  VisitForAccumulatorValue(args->at(0));
3024 
3025  Label materialize_true, materialize_false;
3026  Label* if_true = NULL;
3027  Label* if_false = NULL;
3028  Label* fall_through = NULL;
3029  context()->PrepareTest(&materialize_true, &materialize_false,
3030  &if_true, &if_false, &fall_through);
3031 
3032  __ JumpIfSmi(r0, if_false);
3033  __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
3034  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3035  Split(ge, if_true, if_false, fall_through);
3036 
3037  context()->Plug(if_true, if_false);
3038 }
3039 
3040 
3041 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3042  ZoneList<Expression*>* args = expr->arguments();
3043  ASSERT(args->length() == 1);
3044 
3045  VisitForAccumulatorValue(args->at(0));
3046 
3047  Label materialize_true, materialize_false;
3048  Label* if_true = NULL;
3049  Label* if_false = NULL;
3050  Label* fall_through = NULL;
3051  context()->PrepareTest(&materialize_true, &materialize_false,
3052  &if_true, &if_false, &fall_through);
3053 
3054  __ JumpIfSmi(r0, if_false);
3057  __ tst(r1, Operand(1 << Map::kIsUndetectable));
3058  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3059  Split(ne, if_true, if_false, fall_through);
3060 
3061  context()->Plug(if_true, if_false);
3062 }
3063 
3064 
3065 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3066  CallRuntime* expr) {
3067  ZoneList<Expression*>* args = expr->arguments();
3068  ASSERT(args->length() == 1);
3069 
3070  VisitForAccumulatorValue(args->at(0));
3071 
3072  Label materialize_true, materialize_false, skip_lookup;
3073  Label* if_true = NULL;
3074  Label* if_false = NULL;
3075  Label* fall_through = NULL;
3076  context()->PrepareTest(&materialize_true, &materialize_false,
3077  &if_true, &if_false, &fall_through);
3078 
3079  __ AssertNotSmi(r0);
3080 
3083  __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3084  __ b(ne, &skip_lookup);
3085 
3086  // Check for fast case object. Generate false result for slow case object.
3089  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3090  __ cmp(r2, ip);
3091  __ b(eq, if_false);
3092 
3093  // Look for valueOf name in the descriptor array, and indicate false if
3094  // found. Since we omit an enumeration index check, if it is added via a
3095  // transition that shares its descriptor array, this is a false positive.
3096  Label entry, loop, done;
3097 
3098  // Skip loop if no descriptors are valid.
3099  __ NumberOfOwnDescriptors(r3, r1);
3100  __ cmp(r3, Operand::Zero());
3101  __ b(eq, &done);
3102 
3103  __ LoadInstanceDescriptors(r1, r4);
3104  // r4: descriptor array.
3105  // r3: valid entries in the descriptor array.
3106  __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3107  __ mul(r3, r3, ip);
3108  // Calculate location of the first key name.
3110  // Calculate the end of the descriptor array.
3111  __ mov(r2, r4);
3112  __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
3113 
3114  // Loop through all the keys in the descriptor array. If one of these is the
3115  // string "valueOf" the result is false.
3116  // The use of ip to store the valueOf string assumes that it is not otherwise
3117  // used in the loop below.
3118  __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3119  __ jmp(&entry);
3120  __ bind(&loop);
3121  __ ldr(r3, MemOperand(r4, 0));
3122  __ cmp(r3, ip);
3123  __ b(eq, if_false);
3124  __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3125  __ bind(&entry);
3126  __ cmp(r4, Operand(r2));
3127  __ b(ne, &loop);
3128 
3129  __ bind(&done);
3130 
3131  // Set the bit in the map to indicate that there is no local valueOf field.
3133  __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3135 
3136  __ bind(&skip_lookup);
3137 
3138  // If a valueOf property is not found on the object check that its
3139  // prototype is the un-modified String prototype. If not result is false.
3141  __ JumpIfSmi(r2, if_false);
3146  __ cmp(r2, r3);
3147  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3148  Split(eq, if_true, if_false, fall_through);
3149 
3150  context()->Plug(if_true, if_false);
3151 }
3152 
3153 
3154 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3155  ZoneList<Expression*>* args = expr->arguments();
3156  ASSERT(args->length() == 1);
3157 
3158  VisitForAccumulatorValue(args->at(0));
3159 
3160  Label materialize_true, materialize_false;
3161  Label* if_true = NULL;
3162  Label* if_false = NULL;
3163  Label* fall_through = NULL;
3164  context()->PrepareTest(&materialize_true, &materialize_false,
3165  &if_true, &if_false, &fall_through);
3166 
3167  __ JumpIfSmi(r0, if_false);
3168  __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
3169  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3170  Split(eq, if_true, if_false, fall_through);
3171 
3172  context()->Plug(if_true, if_false);
3173 }
3174 
3175 
3176 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3177  ZoneList<Expression*>* args = expr->arguments();
3178  ASSERT(args->length() == 1);
3179 
3180  VisitForAccumulatorValue(args->at(0));
3181 
3182  Label materialize_true, materialize_false;
3183  Label* if_true = NULL;
3184  Label* if_false = NULL;
3185  Label* fall_through = NULL;
3186  context()->PrepareTest(&materialize_true, &materialize_false,
3187  &if_true, &if_false, &fall_through);
3188 
3189  __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3192  __ cmp(r2, Operand(0x80000000));
3193  __ cmp(r1, Operand(0x00000000), eq);
3194 
3195  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3196  Split(eq, if_true, if_false, fall_through);
3197 
3198  context()->Plug(if_true, if_false);
3199 }
3200 
3201 
3202 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3203  ZoneList<Expression*>* args = expr->arguments();
3204  ASSERT(args->length() == 1);
3205 
3206  VisitForAccumulatorValue(args->at(0));
3207 
3208  Label materialize_true, materialize_false;
3209  Label* if_true = NULL;
3210  Label* if_false = NULL;
3211  Label* fall_through = NULL;
3212  context()->PrepareTest(&materialize_true, &materialize_false,
3213  &if_true, &if_false, &fall_through);
3214 
3215  __ JumpIfSmi(r0, if_false);
3216  __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3217  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3218  Split(eq, if_true, if_false, fall_through);
3219 
3220  context()->Plug(if_true, if_false);
3221 }
3222 
3223 
3224 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3225  ZoneList<Expression*>* args = expr->arguments();
3226  ASSERT(args->length() == 1);
3227 
3228  VisitForAccumulatorValue(args->at(0));
3229 
3230  Label materialize_true, materialize_false;
3231  Label* if_true = NULL;
3232  Label* if_false = NULL;
3233  Label* fall_through = NULL;
3234  context()->PrepareTest(&materialize_true, &materialize_false,
3235  &if_true, &if_false, &fall_through);
3236 
3237  __ JumpIfSmi(r0, if_false);
3238  __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3239  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3240  Split(eq, if_true, if_false, fall_through);
3241 
3242  context()->Plug(if_true, if_false);
3243 }
3244 
3245 
3246 
3247 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3248  ASSERT(expr->arguments()->length() == 0);
3249 
3250  Label materialize_true, materialize_false;
3251  Label* if_true = NULL;
3252  Label* if_false = NULL;
3253  Label* fall_through = NULL;
3254  context()->PrepareTest(&materialize_true, &materialize_false,
3255  &if_true, &if_false, &fall_through);
3256 
3257  // Get the frame pointer for the calling frame.
3259 
3260  // Skip the arguments adaptor frame if it exists.
3264 
3265  // Check the marker in the calling frame.
3267  __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3268  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3269  Split(eq, if_true, if_false, fall_through);
3270 
3271  context()->Plug(if_true, if_false);
3272 }
3273 
3274 
3275 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3276  ZoneList<Expression*>* args = expr->arguments();
3277  ASSERT(args->length() == 2);
3278 
3279  // Load the two objects into registers and perform the comparison.
3280  VisitForStackValue(args->at(0));
3281  VisitForAccumulatorValue(args->at(1));
3282 
3283  Label materialize_true, materialize_false;
3284  Label* if_true = NULL;
3285  Label* if_false = NULL;
3286  Label* fall_through = NULL;
3287  context()->PrepareTest(&materialize_true, &materialize_false,
3288  &if_true, &if_false, &fall_through);
3289 
3290  __ pop(r1);
3291  __ cmp(r0, r1);
3292  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3293  Split(eq, if_true, if_false, fall_through);
3294 
3295  context()->Plug(if_true, if_false);
3296 }
3297 
3298 
3299 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3300  ZoneList<Expression*>* args = expr->arguments();
3301  ASSERT(args->length() == 1);
3302 
3303  // ArgumentsAccessStub expects the key in edx and the formal
3304  // parameter count in r0.
3305  VisitForAccumulatorValue(args->at(0));
3306  __ mov(r1, r0);
3307  __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3308  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3309  __ CallStub(&stub);
3310  context()->Plug(r0);
3311 }
3312 
3313 
3314 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3315  ASSERT(expr->arguments()->length() == 0);
3316 
3317  // Get the number of formal parameters.
3318  __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3319 
3320  // Check if the calling frame is an arguments adaptor frame.
3323  __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3324 
3325  // Arguments adaptor case: Read the arguments length from the
3326  // adaptor frame.
3328 
3329  context()->Plug(r0);
3330 }
3331 
3332 
3333 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3334  ZoneList<Expression*>* args = expr->arguments();
3335  ASSERT(args->length() == 1);
3336  Label done, null, function, non_function_constructor;
3337 
3338  VisitForAccumulatorValue(args->at(0));
3339 
3340  // If the object is a smi, we return null.
3341  __ JumpIfSmi(r0, &null);
3342 
3343  // Check that the object is a JS object but take special care of JS
3344  // functions to make sure they have 'Function' as their class.
3345  // Assume that there are only two callable types, and one of them is at
3346  // either end of the type range for JS object types. Saves extra comparisons.
3348  __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
3349  // Map is now in r0.
3350  __ b(lt, &null);
3353  __ b(eq, &function);
3354 
3355  __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
3357  LAST_SPEC_OBJECT_TYPE - 1);
3358  __ b(eq, &function);
3359  // Assume that there is no larger type.
3361 
3362  // Check if the constructor in the map is a JS function.
3364  __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3365  __ b(ne, &non_function_constructor);
3366 
3367  // r0 now contains the constructor function. Grab the
3368  // instance class name from there.
3371  __ b(&done);
3372 
3373  // Functions have class 'Function'.
3374  __ bind(&function);
3375  __ LoadRoot(r0, Heap::kfunction_class_stringRootIndex);
3376  __ jmp(&done);
3377 
3378  // Objects with a non-function constructor have class 'Object'.
3379  __ bind(&non_function_constructor);
3380  __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3381  __ jmp(&done);
3382 
3383  // Non-JS objects have class null.
3384  __ bind(&null);
3385  __ LoadRoot(r0, Heap::kNullValueRootIndex);
3386 
3387  // All done.
3388  __ bind(&done);
3389 
3390  context()->Plug(r0);
3391 }
3392 
3393 
3394 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3395  // Conditionally generate a log call.
3396  // Args:
3397  // 0 (literal string): The type of logging (corresponds to the flags).
3398  // This is used to determine whether or not to generate the log call.
3399  // 1 (string): Format string. Access the string at argument index 2
3400  // with '%2s' (see Logger::LogRuntime for all the formats).
3401  // 2 (array): Arguments to the format string.
3402  ZoneList<Expression*>* args = expr->arguments();
3403  ASSERT_EQ(args->length(), 3);
3404  if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3405  VisitForStackValue(args->at(1));
3406  VisitForStackValue(args->at(2));
3407  __ CallRuntime(Runtime::kHiddenLog, 2);
3408  }
3409 
3410  // Finally, we're expected to leave a value on the top of the stack.
3411  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3412  context()->Plug(r0);
3413 }
3414 
3415 
3416 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3417  // Load the arguments on the stack and call the stub.
3418  SubStringStub stub;
3419  ZoneList<Expression*>* args = expr->arguments();
3420  ASSERT(args->length() == 3);
3421  VisitForStackValue(args->at(0));
3422  VisitForStackValue(args->at(1));
3423  VisitForStackValue(args->at(2));
3424  __ CallStub(&stub);
3425  context()->Plug(r0);
3426 }
3427 
3428 
3429 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3430  // Load the arguments on the stack and call the stub.
3431  RegExpExecStub stub;
3432  ZoneList<Expression*>* args = expr->arguments();
3433  ASSERT(args->length() == 4);
3434  VisitForStackValue(args->at(0));
3435  VisitForStackValue(args->at(1));
3436  VisitForStackValue(args->at(2));
3437  VisitForStackValue(args->at(3));
3438  __ CallStub(&stub);
3439  context()->Plug(r0);
3440 }
3441 
3442 
3443 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3444  ZoneList<Expression*>* args = expr->arguments();
3445  ASSERT(args->length() == 1);
3446  VisitForAccumulatorValue(args->at(0)); // Load the object.
3447 
3448  Label done;
3449  // If the object is a smi return the object.
3450  __ JumpIfSmi(r0, &done);
3451  // If the object is not a value type, return the object.
3452  __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3454 
3455  __ bind(&done);
3456  context()->Plug(r0);
3457 }
3458 
3459 
3460 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3461  ZoneList<Expression*>* args = expr->arguments();
3462  ASSERT(args->length() == 2);
3463  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3464  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3465 
3466  VisitForAccumulatorValue(args->at(0)); // Load the object.
3467 
3468  Label runtime, done, not_date_object;
3469  Register object = r0;
3470  Register result = r0;
3471  Register scratch0 = r9;
3472  Register scratch1 = r1;
3473 
3474  __ JumpIfSmi(object, &not_date_object);
3475  __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3476  __ b(ne, &not_date_object);
3477 
3478  if (index->value() == 0) {
3479  __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3480  __ jmp(&done);
3481  } else {
3482  if (index->value() < JSDate::kFirstUncachedField) {
3483  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3484  __ mov(scratch1, Operand(stamp));
3485  __ ldr(scratch1, MemOperand(scratch1));
3486  __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3487  __ cmp(scratch1, scratch0);
3488  __ b(ne, &runtime);
3489  __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3490  kPointerSize * index->value()));
3491  __ jmp(&done);
3492  }
3493  __ bind(&runtime);
3494  __ PrepareCallCFunction(2, scratch1);
3495  __ mov(r1, Operand(index));
3496  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3497  __ jmp(&done);
3498  }
3499 
3500  __ bind(&not_date_object);
3501  __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3502  __ bind(&done);
3503  context()->Plug(r0);
3504 }
3505 
3506 
3507 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3508  ZoneList<Expression*>* args = expr->arguments();
3509  ASSERT_EQ(3, args->length());
3510 
3511  Register string = r0;
3512  Register index = r1;
3513  Register value = r2;
3514 
3515  VisitForStackValue(args->at(1)); // index
3516  VisitForStackValue(args->at(2)); // value
3517  VisitForAccumulatorValue(args->at(0)); // string
3518  __ Pop(index, value);
3519 
3520  if (FLAG_debug_code) {
3521  __ SmiTst(value);
3522  __ Check(eq, kNonSmiValue);
3523  __ SmiTst(index);
3524  __ Check(eq, kNonSmiIndex);
3525  __ SmiUntag(index, index);
3526  static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3527  __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3528  __ SmiTag(index, index);
3529  }
3530 
3531  __ SmiUntag(value, value);
3532  __ add(ip,
3533  string,
3535  __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3536  context()->Plug(string);
3537 }
3538 
3539 
3540 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3541  ZoneList<Expression*>* args = expr->arguments();
3542  ASSERT_EQ(3, args->length());
3543 
3544  Register string = r0;
3545  Register index = r1;
3546  Register value = r2;
3547 
3548  VisitForStackValue(args->at(1)); // index
3549  VisitForStackValue(args->at(2)); // value
3550  VisitForAccumulatorValue(args->at(0)); // string
3551  __ Pop(index, value);
3552 
3553  if (FLAG_debug_code) {
3554  __ SmiTst(value);
3555  __ Check(eq, kNonSmiValue);
3556  __ SmiTst(index);
3557  __ Check(eq, kNonSmiIndex);
3558  __ SmiUntag(index, index);
3559  static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3560  __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3561  __ SmiTag(index, index);
3562  }
3563 
3564  __ SmiUntag(value, value);
3565  __ add(ip,
3566  string,
3568  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3569  __ strh(value, MemOperand(ip, index));
3570  context()->Plug(string);
3571 }
3572 
3573 
3574 
3575 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3576  // Load the arguments on the stack and call the runtime function.
3577  ZoneList<Expression*>* args = expr->arguments();
3578  ASSERT(args->length() == 2);
3579  VisitForStackValue(args->at(0));
3580  VisitForStackValue(args->at(1));
3581  MathPowStub stub(MathPowStub::ON_STACK);
3582  __ CallStub(&stub);
3583  context()->Plug(r0);
3584 }
3585 
3586 
3587 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3588  ZoneList<Expression*>* args = expr->arguments();
3589  ASSERT(args->length() == 2);
3590  VisitForStackValue(args->at(0)); // Load the object.
3591  VisitForAccumulatorValue(args->at(1)); // Load the value.
3592  __ pop(r1); // r0 = value. r1 = object.
3593 
3594  Label done;
3595  // If the object is a smi, return the value.
3596  __ JumpIfSmi(r1, &done);
3597 
3598  // If the object is not a value type, return the value.
3599  __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3600  __ b(ne, &done);
3601 
3602  // Store the value.
3604  // Update the write barrier. Save the value as it will be
3605  // overwritten by the write barrier code and is needed afterward.
3606  __ mov(r2, r0);
3607  __ RecordWriteField(
3609 
3610  __ bind(&done);
3611  context()->Plug(r0);
3612 }
3613 
3614 
3615 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3616  ZoneList<Expression*>* args = expr->arguments();
3617  ASSERT_EQ(args->length(), 1);
3618  // Load the argument into r0 and call the stub.
3619  VisitForAccumulatorValue(args->at(0));
3620 
3621  NumberToStringStub stub;
3622  __ CallStub(&stub);
3623  context()->Plug(r0);
3624 }
3625 
3626 
3627 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3628  ZoneList<Expression*>* args = expr->arguments();
3629  ASSERT(args->length() == 1);
3630  VisitForAccumulatorValue(args->at(0));
3631 
3632  Label done;
3633  StringCharFromCodeGenerator generator(r0, r1);
3634  generator.GenerateFast(masm_);
3635  __ jmp(&done);
3636 
3637  NopRuntimeCallHelper call_helper;
3638  generator.GenerateSlow(masm_, call_helper);
3639 
3640  __ bind(&done);
3641  context()->Plug(r1);
3642 }
3643 
3644 
3645 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3646  ZoneList<Expression*>* args = expr->arguments();
3647  ASSERT(args->length() == 2);
3648  VisitForStackValue(args->at(0));
3649  VisitForAccumulatorValue(args->at(1));
3650 
3651  Register object = r1;
3652  Register index = r0;
3653  Register result = r3;
3654 
3655  __ pop(object);
3656 
3657  Label need_conversion;
3658  Label index_out_of_range;
3659  Label done;
3660  StringCharCodeAtGenerator generator(object,
3661  index,
3662  result,
3663  &need_conversion,
3664  &need_conversion,
3665  &index_out_of_range,
3667  generator.GenerateFast(masm_);
3668  __ jmp(&done);
3669 
3670  __ bind(&index_out_of_range);
3671  // When the index is out of range, the spec requires us to return
3672  // NaN.
3673  __ LoadRoot(result, Heap::kNanValueRootIndex);
3674  __ jmp(&done);
3675 
3676  __ bind(&need_conversion);
3677  // Load the undefined value into the result register, which will
3678  // trigger conversion.
3679  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3680  __ jmp(&done);
3681 
3682  NopRuntimeCallHelper call_helper;
3683  generator.GenerateSlow(masm_, call_helper);
3684 
3685  __ bind(&done);
3686  context()->Plug(result);
3687 }
3688 
3689 
3690 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3691  ZoneList<Expression*>* args = expr->arguments();
3692  ASSERT(args->length() == 2);
3693  VisitForStackValue(args->at(0));
3694  VisitForAccumulatorValue(args->at(1));
3695 
3696  Register object = r1;
3697  Register index = r0;
3698  Register scratch = r3;
3699  Register result = r0;
3700 
3701  __ pop(object);
3702 
3703  Label need_conversion;
3704  Label index_out_of_range;
3705  Label done;
3706  StringCharAtGenerator generator(object,
3707  index,
3708  scratch,
3709  result,
3710  &need_conversion,
3711  &need_conversion,
3712  &index_out_of_range,
3714  generator.GenerateFast(masm_);
3715  __ jmp(&done);
3716 
3717  __ bind(&index_out_of_range);
3718  // When the index is out of range, the spec requires us to return
3719  // the empty string.
3720  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3721  __ jmp(&done);
3722 
3723  __ bind(&need_conversion);
3724  // Move smi zero into the result register, which will trigger
3725  // conversion.
3726  __ mov(result, Operand(Smi::FromInt(0)));
3727  __ jmp(&done);
3728 
3729  NopRuntimeCallHelper call_helper;
3730  generator.GenerateSlow(masm_, call_helper);
3731 
3732  __ bind(&done);
3733  context()->Plug(result);
3734 }
3735 
3736 
3737 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3738  ZoneList<Expression*>* args = expr->arguments();
3739  ASSERT_EQ(2, args->length());
3740  VisitForStackValue(args->at(0));
3741  VisitForAccumulatorValue(args->at(1));
3742 
3743  __ pop(r1);
3744  StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
3745  __ CallStub(&stub);
3746  context()->Plug(r0);
3747 }
3748 
3749 
3750 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3751  ZoneList<Expression*>* args = expr->arguments();
3752  ASSERT_EQ(2, args->length());
3753  VisitForStackValue(args->at(0));
3754  VisitForStackValue(args->at(1));
3755 
3756  StringCompareStub stub;
3757  __ CallStub(&stub);
3758  context()->Plug(r0);
3759 }
3760 
3761 
3762 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3763  // Load the argument on the stack and call the runtime function.
3764  ZoneList<Expression*>* args = expr->arguments();
3765  ASSERT(args->length() == 1);
3766  VisitForStackValue(args->at(0));
3767  __ CallRuntime(Runtime::kMath_log, 1);
3768  context()->Plug(r0);
3769 }
3770 
3771 
3772 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3773  // Load the argument on the stack and call the runtime function.
3774  ZoneList<Expression*>* args = expr->arguments();
3775  ASSERT(args->length() == 1);
3776  VisitForStackValue(args->at(0));
3777  __ CallRuntime(Runtime::kMath_sqrt, 1);
3778  context()->Plug(r0);
3779 }
3780 
3781 
3782 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3783  ZoneList<Expression*>* args = expr->arguments();
3784  ASSERT(args->length() >= 2);
3785 
3786  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3787  for (int i = 0; i < arg_count + 1; i++) {
3788  VisitForStackValue(args->at(i));
3789  }
3790  VisitForAccumulatorValue(args->last()); // Function.
3791 
3792  Label runtime, done;
3793  // Check for non-function argument (including proxy).
3794  __ JumpIfSmi(r0, &runtime);
3795  __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3796  __ b(ne, &runtime);
3797 
3798  // InvokeFunction requires the function in r1. Move it in there.
3799  __ mov(r1, result_register());
3800  ParameterCount count(arg_count);
3801  __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper());
3803  __ jmp(&done);
3804 
3805  __ bind(&runtime);
3806  __ push(r0);
3807  __ CallRuntime(Runtime::kCall, args->length());
3808  __ bind(&done);
3809 
3810  context()->Plug(r0);
3811 }
3812 
3813 
3814 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3815  RegExpConstructResultStub stub;
3816  ZoneList<Expression*>* args = expr->arguments();
3817  ASSERT(args->length() == 3);
3818  VisitForStackValue(args->at(0));
3819  VisitForStackValue(args->at(1));
3820  VisitForAccumulatorValue(args->at(2));
3821  __ pop(r1);
3822  __ pop(r2);
3823  __ CallStub(&stub);
3824  context()->Plug(r0);
3825 }
3826 
3827 
3828 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3829  ZoneList<Expression*>* args = expr->arguments();
3830  ASSERT_EQ(2, args->length());
3831  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3832  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3833 
3834  Handle<FixedArray> jsfunction_result_caches(
3835  isolate()->native_context()->jsfunction_result_caches());
3836  if (jsfunction_result_caches->length() <= cache_id) {
3837  __ Abort(kAttemptToUseUndefinedCache);
3838  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3839  context()->Plug(r0);
3840  return;
3841  }
3842 
3843  VisitForAccumulatorValue(args->at(1));
3844 
3845  Register key = r0;
3846  Register cache = r1;
3850  __ ldr(cache,
3852 
3853 
3854  Label done, not_found;
3856  // r2 now holds finger offset as a smi.
3857  __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3858  // r3 now points to the start of fixed array elements.
3859  __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
3860  // Note side effect of PreIndex: r3 now points to the key of the pair.
3861  __ cmp(key, r2);
3862  __ b(ne, &not_found);
3863 
3864  __ ldr(r0, MemOperand(r3, kPointerSize));
3865  __ b(&done);
3866 
3867  __ bind(&not_found);
3868  // Call runtime to perform the lookup.
3869  __ Push(cache, key);
3870  __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3871 
3872  __ bind(&done);
3873  context()->Plug(r0);
3874 }
3875 
3876 
3877 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3878  ZoneList<Expression*>* args = expr->arguments();
3879  VisitForAccumulatorValue(args->at(0));
3880 
3881  Label materialize_true, materialize_false;
3882  Label* if_true = NULL;
3883  Label* if_false = NULL;
3884  Label* fall_through = NULL;
3885  context()->PrepareTest(&materialize_true, &materialize_false,
3886  &if_true, &if_false, &fall_through);
3887 
3890  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3891  Split(eq, if_true, if_false, fall_through);
3892 
3893  context()->Plug(if_true, if_false);
3894 }
3895 
3896 
3897 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3898  ZoneList<Expression*>* args = expr->arguments();
3899  ASSERT(args->length() == 1);
3900  VisitForAccumulatorValue(args->at(0));
3901 
3902  __ AssertString(r0);
3903 
3905  __ IndexFromHash(r0, r0);
3906 
3907  context()->Plug(r0);
3908 }
3909 
3910 
3911 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3912  Label bailout, done, one_char_separator, long_separator, non_trivial_array,
3913  not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
3914  one_char_separator_loop_entry, long_separator_loop;
3915  ZoneList<Expression*>* args = expr->arguments();
3916  ASSERT(args->length() == 2);
3917  VisitForStackValue(args->at(1));
3918  VisitForAccumulatorValue(args->at(0));
3919 
3920  // All aliases of the same register have disjoint lifetimes.
3921  Register array = r0;
3922  Register elements = no_reg; // Will be r0.
3923  Register result = no_reg; // Will be r0.
3924  Register separator = r1;
3925  Register array_length = r2;
3926  Register result_pos = no_reg; // Will be r2
3927  Register string_length = r3;
3928  Register string = r4;
3929  Register element = r5;
3930  Register elements_end = r6;
3931  Register scratch = r9;
3932 
3933  // Separator operand is on the stack.
3934  __ pop(separator);
3935 
3936  // Check that the array is a JSArray.
3937  __ JumpIfSmi(array, &bailout);
3938  __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
3939  __ b(ne, &bailout);
3940 
3941  // Check that the array has fast elements.
3942  __ CheckFastElements(scratch, array_length, &bailout);
3943 
3944  // If the array has length zero, return the empty string.
3945  __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3946  __ SmiUntag(array_length, SetCC);
3947  __ b(ne, &non_trivial_array);
3948  __ LoadRoot(r0, Heap::kempty_stringRootIndex);
3949  __ b(&done);
3950 
3951  __ bind(&non_trivial_array);
3952 
3953  // Get the FixedArray containing array's elements.
3954  elements = array;
3955  __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3956  array = no_reg; // End of array's live range.
3957 
3958  // Check that all array elements are sequential ASCII strings, and
3959  // accumulate the sum of their lengths, as a smi-encoded value.
3960  __ mov(string_length, Operand::Zero());
3961  __ add(element,
3962  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3963  __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3964  // Loop condition: while (element < elements_end).
3965  // Live values in registers:
3966  // elements: Fixed array of strings.
3967  // array_length: Length of the fixed array of strings (not smi)
3968  // separator: Separator string
3969  // string_length: Accumulated sum of string lengths (smi).
3970  // element: Current array element.
3971  // elements_end: Array end.
3972  if (generate_debug_code_) {
3973  __ cmp(array_length, Operand::Zero());
3974  __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3975  }
3976  __ bind(&loop);
3977  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3978  __ JumpIfSmi(string, &bailout);
3979  __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
3980  __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3981  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
3982  __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
3983  __ add(string_length, string_length, Operand(scratch), SetCC);
3984  __ b(vs, &bailout);
3985  __ cmp(element, elements_end);
3986  __ b(lt, &loop);
3987 
3988  // If array_length is 1, return elements[0], a string.
3989  __ cmp(array_length, Operand(1));
3990  __ b(ne, &not_size_one_array);
3991  __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3992  __ b(&done);
3993 
3994  __ bind(&not_size_one_array);
3995 
3996  // Live values in registers:
3997  // separator: Separator string
3998  // array_length: Length of the array.
3999  // string_length: Sum of string lengths (smi).
4000  // elements: FixedArray of strings.
4001 
4002  // Check that the separator is a flat ASCII string.
4003  __ JumpIfSmi(separator, &bailout);
4004  __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
4005  __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4006  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
4007 
4008  // Add (separator length times array_length) - separator length to the
4009  // string_length to get the length of the result string. array_length is not
4010  // smi but the other values are, so the result is a smi
4011  __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4012  __ sub(string_length, string_length, Operand(scratch));
4013  __ smull(scratch, ip, array_length, scratch);
4014  // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4015  // zero.
4016  __ cmp(ip, Operand::Zero());
4017  __ b(ne, &bailout);
4018  __ tst(scratch, Operand(0x80000000));
4019  __ b(ne, &bailout);
4020  __ add(string_length, string_length, Operand(scratch), SetCC);
4021  __ b(vs, &bailout);
4022  __ SmiUntag(string_length);
4023 
4024  // Get first element in the array to free up the elements register to be used
4025  // for the result.
4026  __ add(element,
4027  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4028  result = elements; // End of live range for elements.
4029  elements = no_reg;
4030  // Live values in registers:
4031  // element: First array element
4032  // separator: Separator string
4033  // string_length: Length of result string (not smi)
4034  // array_length: Length of the array.
4035  __ AllocateAsciiString(result,
4036  string_length,
4037  scratch,
4038  string, // used as scratch
4039  elements_end, // used as scratch
4040  &bailout);
4041  // Prepare for looping. Set up elements_end to end of the array. Set
4042  // result_pos to the position of the result where to write the first
4043  // character.
4044  __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4045  result_pos = array_length; // End of live range for array_length.
4046  array_length = no_reg;
4047  __ add(result_pos,
4048  result,
4050 
4051  // Check the length of the separator.
4052  __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4053  __ cmp(scratch, Operand(Smi::FromInt(1)));
4054  __ b(eq, &one_char_separator);
4055  __ b(gt, &long_separator);
4056 
4057  // Empty separator case
4058  __ bind(&empty_separator_loop);
4059  // Live values in registers:
4060  // result_pos: the position to which we are currently copying characters.
4061  // element: Current array element.
4062  // elements_end: Array end.
4063 
4064  // Copy next array element to the result.
4065  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4066  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4067  __ SmiUntag(string_length);
4068  __ add(string,
4069  string,
4071  __ CopyBytes(string, result_pos, string_length, scratch);
4072  __ cmp(element, elements_end);
4073  __ b(lt, &empty_separator_loop); // End while (element < elements_end).
4074  ASSERT(result.is(r0));
4075  __ b(&done);
4076 
4077  // One-character separator case
4078  __ bind(&one_char_separator);
4079  // Replace separator with its ASCII character value.
4080  __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4081  // Jump into the loop after the code that copies the separator, so the first
4082  // element is not preceded by a separator
4083  __ jmp(&one_char_separator_loop_entry);
4084 
4085  __ bind(&one_char_separator_loop);
4086  // Live values in registers:
4087  // result_pos: the position to which we are currently copying characters.
4088  // element: Current array element.
4089  // elements_end: Array end.
4090  // separator: Single separator ASCII char (in lower byte).
4091 
4092  // Copy the separator character to the result.
4093  __ strb(separator, MemOperand(result_pos, 1, PostIndex));
4094 
4095  // Copy next array element to the result.
4096  __ bind(&one_char_separator_loop_entry);
4097  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4098  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4099  __ SmiUntag(string_length);
4100  __ add(string,
4101  string,
4103  __ CopyBytes(string, result_pos, string_length, scratch);
4104  __ cmp(element, elements_end);
4105  __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4106  ASSERT(result.is(r0));
4107  __ b(&done);
4108 
4109  // Long separator case (separator is more than one character). Entry is at the
4110  // label long_separator below.
4111  __ bind(&long_separator_loop);
4112  // Live values in registers:
4113  // result_pos: the position to which we are currently copying characters.
4114  // element: Current array element.
4115  // elements_end: Array end.
4116  // separator: Separator string.
4117 
4118  // Copy the separator to the result.
4119  __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
4120  __ SmiUntag(string_length);
4121  __ add(string,
4122  separator,
4124  __ CopyBytes(string, result_pos, string_length, scratch);
4125 
4126  __ bind(&long_separator);
4127  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4128  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4129  __ SmiUntag(string_length);
4130  __ add(string,
4131  string,
4133  __ CopyBytes(string, result_pos, string_length, scratch);
4134  __ cmp(element, elements_end);
4135  __ b(lt, &long_separator_loop); // End while (element < elements_end).
4136  ASSERT(result.is(r0));
4137  __ b(&done);
4138 
4139  __ bind(&bailout);
4140  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4141  __ bind(&done);
4142  context()->Plug(r0);
4143 }
4144 
4145 
4146 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4147  if (expr->function() != NULL &&
4148  expr->function()->intrinsic_type == Runtime::INLINE) {
4149  Comment cmnt(masm_, "[ InlineRuntimeCall");
4150  EmitInlineRuntimeCall(expr);
4151  return;
4152  }
4153 
4154  Comment cmnt(masm_, "[ CallRuntime");
4155  ZoneList<Expression*>* args = expr->arguments();
4156  int arg_count = args->length();
4157 
4158  if (expr->is_jsruntime()) {
4159  // Push the builtins object as the receiver.
4160  __ ldr(r0, GlobalObjectOperand());
4162  __ push(r0);
4163 
4164  // Load the function from the receiver.
4165  __ mov(r2, Operand(expr->name()));
4166  CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4167 
4168  // Push the target function under the receiver.
4169  __ ldr(ip, MemOperand(sp, 0));
4170  __ push(ip);
4171  __ str(r0, MemOperand(sp, kPointerSize));
4172 
4173  // Push the arguments ("left-to-right").
4174  int arg_count = args->length();
4175  for (int i = 0; i < arg_count; i++) {
4176  VisitForStackValue(args->at(i));
4177  }
4178 
4179  // Record source position of the IC call.
4180  SetSourcePosition(expr->position());
4181  CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
4182  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4183  __ CallStub(&stub);
4184 
4185  // Restore context register.
4187 
4188  context()->DropAndPlug(1, r0);
4189  } else {
4190  // Push the arguments ("left-to-right").
4191  for (int i = 0; i < arg_count; i++) {
4192  VisitForStackValue(args->at(i));
4193  }
4194 
4195  // Call the C runtime function.
4196  __ CallRuntime(expr->function(), arg_count);
4197  context()->Plug(r0);
4198  }
4199 }
4200 
4201 
4202 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4203  switch (expr->op()) {
4204  case Token::DELETE: {
4205  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4206  Property* property = expr->expression()->AsProperty();
4207  VariableProxy* proxy = expr->expression()->AsVariableProxy();
4208 
4209  if (property != NULL) {
4210  VisitForStackValue(property->obj());
4211  VisitForStackValue(property->key());
4212  __ mov(r1, Operand(Smi::FromInt(strict_mode())));
4213  __ push(r1);
4214  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4215  context()->Plug(r0);
4216  } else if (proxy != NULL) {
4217  Variable* var = proxy->var();
4218  // Delete of an unqualified identifier is disallowed in strict mode
4219  // but "delete this" is allowed.
4220  ASSERT(strict_mode() == SLOPPY || var->is_this());
4221  if (var->IsUnallocated()) {
4222  __ ldr(r2, GlobalObjectOperand());
4223  __ mov(r1, Operand(var->name()));
4224  __ mov(r0, Operand(Smi::FromInt(SLOPPY)));
4225  __ Push(r2, r1, r0);
4226  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4227  context()->Plug(r0);
4228  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4229  // Result of deleting non-global, non-dynamic variables is false.
4230  // The subexpression does not have side effects.
4231  context()->Plug(var->is_this());
4232  } else {
4233  // Non-global variable. Call the runtime to try to delete from the
4234  // context where the variable was introduced.
4235  ASSERT(!context_register().is(r2));
4236  __ mov(r2, Operand(var->name()));
4237  __ Push(context_register(), r2);
4238  __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
4239  context()->Plug(r0);
4240  }
4241  } else {
4242  // Result of deleting non-property, non-variable reference is true.
4243  // The subexpression may have side effects.
4244  VisitForEffect(expr->expression());
4245  context()->Plug(true);
4246  }
4247  break;
4248  }
4249 
4250  case Token::VOID: {
4251  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4252  VisitForEffect(expr->expression());
4253  context()->Plug(Heap::kUndefinedValueRootIndex);
4254  break;
4255  }
4256 
4257  case Token::NOT: {
4258  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4259  if (context()->IsEffect()) {
4260  // Unary NOT has no side effects so it's only necessary to visit the
4261  // subexpression. Match the optimizing compiler by not branching.
4262  VisitForEffect(expr->expression());
4263  } else if (context()->IsTest()) {
4264  const TestContext* test = TestContext::cast(context());
4265  // The labels are swapped for the recursive call.
4266  VisitForControl(expr->expression(),
4267  test->false_label(),
4268  test->true_label(),
4269  test->fall_through());
4270  context()->Plug(test->true_label(), test->false_label());
4271  } else {
4272  // We handle value contexts explicitly rather than simply visiting
4273  // for control and plugging the control flow into the context,
4274  // because we need to prepare a pair of extra administrative AST ids
4275  // for the optimizing compiler.
4276  ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4277  Label materialize_true, materialize_false, done;
4278  VisitForControl(expr->expression(),
4279  &materialize_false,
4280  &materialize_true,
4281  &materialize_true);
4282  __ bind(&materialize_true);
4283  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4284  __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4285  if (context()->IsStackValue()) __ push(r0);
4286  __ jmp(&done);
4287  __ bind(&materialize_false);
4288  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4289  __ LoadRoot(r0, Heap::kFalseValueRootIndex);
4290  if (context()->IsStackValue()) __ push(r0);
4291  __ bind(&done);
4292  }
4293  break;
4294  }
4295 
4296  case Token::TYPEOF: {
4297  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4298  { StackValueContext context(this);
4299  VisitForTypeofValue(expr->expression());
4300  }
4301  __ CallRuntime(Runtime::kTypeof, 1);
4302  context()->Plug(r0);
4303  break;
4304  }
4305 
4306  default:
4307  UNREACHABLE();
4308  }
4309 }
4310 
4311 
4312 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4313  ASSERT(expr->expression()->IsValidLeftHandSide());
4314 
4315  Comment cmnt(masm_, "[ CountOperation");
4316  SetSourcePosition(expr->position());
4317 
4318  // Expression can only be a property, a global or a (parameter or local)
4319  // slot.
4320  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4321  LhsKind assign_type = VARIABLE;
4322  Property* prop = expr->expression()->AsProperty();
4323  // In case of a property we use the uninitialized expression context
4324  // of the key to detect a named property.
4325  if (prop != NULL) {
4326  assign_type =
4327  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4328  }
4329 
4330  // Evaluate expression and get value.
4331  if (assign_type == VARIABLE) {
4332  ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4333  AccumulatorValueContext context(this);
4334  EmitVariableLoad(expr->expression()->AsVariableProxy());
4335  } else {
4336  // Reserve space for result of postfix operation.
4337  if (expr->is_postfix() && !context()->IsEffect()) {
4338  __ mov(ip, Operand(Smi::FromInt(0)));
4339  __ push(ip);
4340  }
4341  if (assign_type == NAMED_PROPERTY) {
4342  // Put the object both on the stack and in the accumulator.
4343  VisitForAccumulatorValue(prop->obj());
4344  __ push(r0);
4345  EmitNamedPropertyLoad(prop);
4346  } else {
4347  VisitForStackValue(prop->obj());
4348  VisitForAccumulatorValue(prop->key());
4349  __ ldr(r1, MemOperand(sp, 0));
4350  __ push(r0);
4351  EmitKeyedPropertyLoad(prop);
4352  }
4353  }
4354 
4355  // We need a second deoptimization point after loading the value
4356  // in case evaluating the property load my have a side effect.
4357  if (assign_type == VARIABLE) {
4358  PrepareForBailout(expr->expression(), TOS_REG);
4359  } else {
4360  PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4361  }
4362 
4363  // Inline smi case if we are in a loop.
4364  Label stub_call, done;
4365  JumpPatchSite patch_site(masm_);
4366 
4367  int count_value = expr->op() == Token::INC ? 1 : -1;
4368  if (ShouldInlineSmiCase(expr->op())) {
4369  Label slow;
4370  patch_site.EmitJumpIfNotSmi(r0, &slow);
4371 
4372  // Save result for postfix expressions.
4373  if (expr->is_postfix()) {
4374  if (!context()->IsEffect()) {
4375  // Save the result on the stack. If we have a named or keyed property
4376  // we store the result under the receiver that is currently on top
4377  // of the stack.
4378  switch (assign_type) {
4379  case VARIABLE:
4380  __ push(r0);
4381  break;
4382  case NAMED_PROPERTY:
4383  __ str(r0, MemOperand(sp, kPointerSize));
4384  break;
4385  case KEYED_PROPERTY:
4386  __ str(r0, MemOperand(sp, 2 * kPointerSize));
4387  break;
4388  }
4389  }
4390  }
4391 
4392  __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4393  __ b(vc, &done);
4394  // Call stub. Undo operation first.
4395  __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4396  __ jmp(&stub_call);
4397  __ bind(&slow);
4398  }
4399  ToNumberStub convert_stub;
4400  __ CallStub(&convert_stub);
4401 
4402  // Save result for postfix expressions.
4403  if (expr->is_postfix()) {
4404  if (!context()->IsEffect()) {
4405  // Save the result on the stack. If we have a named or keyed property
4406  // we store the result under the receiver that is currently on top
4407  // of the stack.
4408  switch (assign_type) {
4409  case VARIABLE:
4410  __ push(r0);
4411  break;
4412  case NAMED_PROPERTY:
4413  __ str(r0, MemOperand(sp, kPointerSize));
4414  break;
4415  case KEYED_PROPERTY:
4416  __ str(r0, MemOperand(sp, 2 * kPointerSize));
4417  break;
4418  }
4419  }
4420  }
4421 
4422 
4423  __ bind(&stub_call);
4424  __ mov(r1, r0);
4425  __ mov(r0, Operand(Smi::FromInt(count_value)));
4426 
4427  // Record position before stub call.
4428  SetSourcePosition(expr->position());
4429 
4430  BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
4431  CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4432  patch_site.EmitPatchInfo();
4433  __ bind(&done);
4434 
4435  // Store the value returned in r0.
4436  switch (assign_type) {
4437  case VARIABLE:
4438  if (expr->is_postfix()) {
4439  { EffectContext context(this);
4440  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4441  Token::ASSIGN);
4442  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4443  context.Plug(r0);
4444  }
4445  // For all contexts except EffectConstant We have the result on
4446  // top of the stack.
4447  if (!context()->IsEffect()) {
4448  context()->PlugTOS();
4449  }
4450  } else {
4451  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4452  Token::ASSIGN);
4453  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4454  context()->Plug(r0);
4455  }
4456  break;
4457  case NAMED_PROPERTY: {
4458  __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
4459  __ pop(r1);
4460  CallStoreIC(expr->CountStoreFeedbackId());
4461  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4462  if (expr->is_postfix()) {
4463  if (!context()->IsEffect()) {
4464  context()->PlugTOS();
4465  }
4466  } else {
4467  context()->Plug(r0);
4468  }
4469  break;
4470  }
4471  case KEYED_PROPERTY: {
4472  __ Pop(r2, r1); // r1 = key. r2 = receiver.
4473  Handle<Code> ic = strict_mode() == SLOPPY
4474  ? isolate()->builtins()->KeyedStoreIC_Initialize()
4475  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4476  CallIC(ic, expr->CountStoreFeedbackId());
4477  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4478  if (expr->is_postfix()) {
4479  if (!context()->IsEffect()) {
4480  context()->PlugTOS();
4481  }
4482  } else {
4483  context()->Plug(r0);
4484  }
4485  break;
4486  }
4487  }
4488 }
4489 
4490 
4491 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4492  ASSERT(!context()->IsEffect());
4493  ASSERT(!context()->IsTest());
4494  VariableProxy* proxy = expr->AsVariableProxy();
4495  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4496  Comment cmnt(masm_, "[ Global variable");
4497  __ ldr(r0, GlobalObjectOperand());
4498  __ mov(r2, Operand(proxy->name()));
4499  // Use a regular load, not a contextual load, to avoid a reference
4500  // error.
4501  CallLoadIC(NOT_CONTEXTUAL);
4502  PrepareForBailout(expr, TOS_REG);
4503  context()->Plug(r0);
4504  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4505  Comment cmnt(masm_, "[ Lookup slot");
4506  Label done, slow;
4507 
4508  // Generate code for loading from variables potentially shadowed
4509  // by eval-introduced variables.
4510  EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4511 
4512  __ bind(&slow);
4513  __ mov(r0, Operand(proxy->name()));
4514  __ Push(cp, r0);
4515  __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4516  PrepareForBailout(expr, TOS_REG);
4517  __ bind(&done);
4518 
4519  context()->Plug(r0);
4520  } else {
4521  // This expression cannot throw a reference error at the top level.
4522  VisitInDuplicateContext(expr);
4523  }
4524 }
4525 
4526 
4527 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4528  Expression* sub_expr,
4529  Handle<String> check) {
4530  Label materialize_true, materialize_false;
4531  Label* if_true = NULL;
4532  Label* if_false = NULL;
4533  Label* fall_through = NULL;
4534  context()->PrepareTest(&materialize_true, &materialize_false,
4535  &if_true, &if_false, &fall_through);
4536 
4537  { AccumulatorValueContext context(this);
4538  VisitForTypeofValue(sub_expr);
4539  }
4540  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4541 
4542  if (check->Equals(isolate()->heap()->number_string())) {
4543  __ JumpIfSmi(r0, if_true);
4545  __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4546  __ cmp(r0, ip);
4547  Split(eq, if_true, if_false, fall_through);
4548  } else if (check->Equals(isolate()->heap()->string_string())) {
4549  __ JumpIfSmi(r0, if_false);
4550  // Check for undetectable objects => false.
4551  __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4552  __ b(ge, if_false);
4554  __ tst(r1, Operand(1 << Map::kIsUndetectable));
4555  Split(eq, if_true, if_false, fall_through);
4556  } else if (check->Equals(isolate()->heap()->symbol_string())) {
4557  __ JumpIfSmi(r0, if_false);
4558  __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
4559  Split(eq, if_true, if_false, fall_through);
4560  } else if (check->Equals(isolate()->heap()->boolean_string())) {
4561  __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4562  __ b(eq, if_true);
4563  __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4564  Split(eq, if_true, if_false, fall_through);
4565  } else if (FLAG_harmony_typeof &&
4566  check->Equals(isolate()->heap()->null_string())) {
4567  __ CompareRoot(r0, Heap::kNullValueRootIndex);
4568  Split(eq, if_true, if_false, fall_through);
4569  } else if (check->Equals(isolate()->heap()->undefined_string())) {
4570  __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4571  __ b(eq, if_true);
4572  __ JumpIfSmi(r0, if_false);
4573  // Check for undetectable objects => true.
4576  __ tst(r1, Operand(1 << Map::kIsUndetectable));
4577  Split(ne, if_true, if_false, fall_through);
4578 
4579  } else if (check->Equals(isolate()->heap()->function_string())) {
4580  __ JumpIfSmi(r0, if_false);
4582  __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
4583  __ b(eq, if_true);
4584  __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
4585  Split(eq, if_true, if_false, fall_through);
4586  } else if (check->Equals(isolate()->heap()->object_string())) {
4587  __ JumpIfSmi(r0, if_false);
4588  if (!FLAG_harmony_typeof) {
4589  __ CompareRoot(r0, Heap::kNullValueRootIndex);
4590  __ b(eq, if_true);
4591  }
4592  // Check for JS objects => true.
4593  __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4594  __ b(lt, if_false);
4595  __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4596  __ b(gt, if_false);
4597  // Check for undetectable objects => false.
4599  __ tst(r1, Operand(1 << Map::kIsUndetectable));
4600  Split(eq, if_true, if_false, fall_through);
4601  } else {
4602  if (if_false != fall_through) __ jmp(if_false);
4603  }
4604  context()->Plug(if_true, if_false);
4605 }
4606 
4607 
4608 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4609  Comment cmnt(masm_, "[ CompareOperation");
4610  SetSourcePosition(expr->position());
4611 
4612  // First we try a fast inlined version of the compare when one of
4613  // the operands is a literal.
4614  if (TryLiteralCompare(expr)) return;
4615 
4616  // Always perform the comparison for its control flow. Pack the result
4617  // into the expression's context after the comparison is performed.
4618  Label materialize_true, materialize_false;
4619  Label* if_true = NULL;
4620  Label* if_false = NULL;
4621  Label* fall_through = NULL;
4622  context()->PrepareTest(&materialize_true, &materialize_false,
4623  &if_true, &if_false, &fall_through);
4624 
4625  Token::Value op = expr->op();
4626  VisitForStackValue(expr->left());
4627  switch (op) {
4628  case Token::IN:
4629  VisitForStackValue(expr->right());
4630  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4631  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4632  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4633  __ cmp(r0, ip);
4634  Split(eq, if_true, if_false, fall_through);
4635  break;
4636 
4637  case Token::INSTANCEOF: {
4638  VisitForStackValue(expr->right());
4639  InstanceofStub stub(InstanceofStub::kNoFlags);
4640  __ CallStub(&stub);
4641  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4642  // The stub returns 0 for true.
4643  __ tst(r0, r0);
4644  Split(eq, if_true, if_false, fall_through);
4645  break;
4646  }
4647 
4648  default: {
4649  VisitForAccumulatorValue(expr->right());
4651  __ pop(r1);
4652 
4653  bool inline_smi_code = ShouldInlineSmiCase(op);
4654  JumpPatchSite patch_site(masm_);
4655  if (inline_smi_code) {
4656  Label slow_case;
4657  __ orr(r2, r0, Operand(r1));
4658  patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4659  __ cmp(r1, r0);
4660  Split(cond, if_true, if_false, NULL);
4661  __ bind(&slow_case);
4662  }
4663 
4664  // Record position and call the compare IC.
4665  SetSourcePosition(expr->position());
4666  Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4667  CallIC(ic, expr->CompareOperationFeedbackId());
4668  patch_site.EmitPatchInfo();
4669  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4670  __ cmp(r0, Operand::Zero());
4671  Split(cond, if_true, if_false, fall_through);
4672  }
4673  }
4674 
4675  // Convert the result of the comparison into one expected for this
4676  // expression's context.
4677  context()->Plug(if_true, if_false);
4678 }
4679 
4680 
4681 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4682  Expression* sub_expr,
4683  NilValue nil) {
4684  Label materialize_true, materialize_false;
4685  Label* if_true = NULL;
4686  Label* if_false = NULL;
4687  Label* fall_through = NULL;
4688  context()->PrepareTest(&materialize_true, &materialize_false,
4689  &if_true, &if_false, &fall_through);
4690 
4691  VisitForAccumulatorValue(sub_expr);
4692  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4693  if (expr->op() == Token::EQ_STRICT) {
4694  Heap::RootListIndex nil_value = nil == kNullValue ?
4695  Heap::kNullValueRootIndex :
4696  Heap::kUndefinedValueRootIndex;
4697  __ LoadRoot(r1, nil_value);
4698  __ cmp(r0, r1);
4699  Split(eq, if_true, if_false, fall_through);
4700  } else {
4701  Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4702  CallIC(ic, expr->CompareOperationFeedbackId());
4703  __ cmp(r0, Operand(0));
4704  Split(ne, if_true, if_false, fall_through);
4705  }
4706  context()->Plug(if_true, if_false);
4707 }
4708 
4709 
4710 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4712  context()->Plug(r0);
4713 }
4714 
4715 
4716 Register FullCodeGenerator::result_register() {
4717  return r0;
4718 }
4719 
4720 
4721 Register FullCodeGenerator::context_register() {
4722  return cp;
4723 }
4724 
4725 
4726 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4727  ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4728  __ str(value, MemOperand(fp, frame_offset));
4729 }
4730 
4731 
4732 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4733  __ ldr(dst, ContextOperand(cp, context_index));
4734 }
4735 
4736 
4737 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4738  Scope* declaration_scope = scope()->DeclarationScope();
4739  if (declaration_scope->is_global_scope() ||
4740  declaration_scope->is_module_scope()) {
4741  // Contexts nested in the native context have a canonical empty function
4742  // as their closure, not the anonymous closure containing the global
4743  // code. Pass a smi sentinel and let the runtime look up the empty
4744  // function.
4745  __ mov(ip, Operand(Smi::FromInt(0)));
4746  } else if (declaration_scope->is_eval_scope()) {
4747  // Contexts created by a call to eval have the same closure as the
4748  // context calling eval, not the anonymous closure containing the eval
4749  // code. Fetch it from the context.
4751  } else {
4752  ASSERT(declaration_scope->is_function_scope());
4754  }
4755  __ push(ip);
4756 }
4757 
4758 
4759 // ----------------------------------------------------------------------------
4760 // Non-local control flow support.
4761 
4762 void FullCodeGenerator::EnterFinallyBlock() {
4763  ASSERT(!result_register().is(r1));
4764  // Store result register while executing finally block.
4765  __ push(result_register());
4766  // Cook return address in link register to stack (smi encoded Code* delta)
4767  __ sub(r1, lr, Operand(masm_->CodeObject()));
4768  __ SmiTag(r1);
4769 
4770  // Store result register while executing finally block.
4771  __ push(r1);
4772 
4773  // Store pending message while executing finally block.
4774  ExternalReference pending_message_obj =
4775  ExternalReference::address_of_pending_message_obj(isolate());
4776  __ mov(ip, Operand(pending_message_obj));
4777  __ ldr(r1, MemOperand(ip));
4778  __ push(r1);
4779 
4780  ExternalReference has_pending_message =
4781  ExternalReference::address_of_has_pending_message(isolate());
4782  __ mov(ip, Operand(has_pending_message));
4783  __ ldr(r1, MemOperand(ip));
4784  __ SmiTag(r1);
4785  __ push(r1);
4786 
4787  ExternalReference pending_message_script =
4788  ExternalReference::address_of_pending_message_script(isolate());
4789  __ mov(ip, Operand(pending_message_script));
4790  __ ldr(r1, MemOperand(ip));
4791  __ push(r1);
4792 }
4793 
4794 
4795 void FullCodeGenerator::ExitFinallyBlock() {
4796  ASSERT(!result_register().is(r1));
4797  // Restore pending message from stack.
4798  __ pop(r1);
4799  ExternalReference pending_message_script =
4800  ExternalReference::address_of_pending_message_script(isolate());
4801  __ mov(ip, Operand(pending_message_script));
4802  __ str(r1, MemOperand(ip));
4803 
4804  __ pop(r1);
4805  __ SmiUntag(r1);
4806  ExternalReference has_pending_message =
4807  ExternalReference::address_of_has_pending_message(isolate());
4808  __ mov(ip, Operand(has_pending_message));
4809  __ str(r1, MemOperand(ip));
4810 
4811  __ pop(r1);
4812  ExternalReference pending_message_obj =
4813  ExternalReference::address_of_pending_message_obj(isolate());
4814  __ mov(ip, Operand(pending_message_obj));
4815  __ str(r1, MemOperand(ip));
4816 
4817  // Restore result register from stack.
4818  __ pop(r1);
4819 
4820  // Uncook return address and return.
4821  __ pop(result_register());
4822  __ SmiUntag(r1);
4823  __ add(pc, r1, Operand(masm_->CodeObject()));
4824 }
4825 
4826 
4827 #undef __
4828 
4829 #define __ ACCESS_MASM(masm())
4830 
4831 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4832  int* stack_depth,
4833  int* context_length) {
4834  // The macros used here must preserve the result register.
4835 
4836  // Because the handler block contains the context of the finally
4837  // code, we can restore it directly from there for the finally code
4838  // rather than iteratively unwinding contexts via their previous
4839  // links.
4840  __ Drop(*stack_depth); // Down to the handler block.
4841  if (*context_length > 0) {
4842  // Restore the context to its dedicated register and the stack.
4845  }
4846  __ PopTryHandler();
4847  __ bl(finally_entry_);
4848 
4849  *stack_depth = 0;
4850  *context_length = 0;
4851  return previous_;
4852 }
4853 
4854 
4855 #undef __
4856 
4857 
4858 static Address GetInterruptImmediateLoadAddress(Address pc) {
4859  Address load_address = pc - 2 * Assembler::kInstrSize;
4860  if (!FLAG_enable_ool_constant_pool) {
4862  } else if (Assembler::IsMovT(Memory::int32_at(load_address))) {
4863  load_address -= Assembler::kInstrSize;
4864  ASSERT(Assembler::IsMovW(Memory::int32_at(load_address)));
4865  } else {
4867  }
4868  return load_address;
4869 }
4870 
4871 
4872 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4873  Address pc,
4874  BackEdgeState target_state,
4875  Code* replacement_code) {
4876  static const int kInstrSize = Assembler::kInstrSize;
4877  Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4878  Address branch_address = pc_immediate_load_address - kInstrSize;
4879  CodePatcher patcher(branch_address, 1);
4880  switch (target_state) {
4881  case INTERRUPT:
4882  {
4883  // <decrement profiling counter>
4884  // bpl ok
4885  // ; load interrupt stub address into ip - either of:
4886  // ldr ip, [pc/pp, <constant pool offset>] | movw ip, <immed low>
4887  // | movt ip, <immed high>
4888  // blx ip
4889  // ok-label
4890 
4891  // Calculate branch offet to the ok-label - this is the difference between
4892  // the branch address and |pc| (which points at <blx ip>) plus one instr.
4893  int branch_offset = pc + kInstrSize - branch_address;
4894  patcher.masm()->b(branch_offset, pl);
4895  break;
4896  }
4897  case ON_STACK_REPLACEMENT:
4898  case OSR_AFTER_STACK_CHECK:
4899  // <decrement profiling counter>
4900  // mov r0, r0 (NOP)
4901  // ; load on-stack replacement address into ip - either of:
4902  // ldr ip, [pc/pp, <constant pool offset>] | movw ip, <immed low>
4903  // | movt ip, <immed high>
4904  // blx ip
4905  // ok-label
4906  patcher.masm()->nop();
4907  break;
4908  }
4909 
4910  // Replace the call address.
4911  Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code,
4912  replacement_code->entry());
4913 
4914  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4915  unoptimized_code, pc_immediate_load_address, replacement_code);
4916 }
4917 
4918 
4920  Isolate* isolate,
4921  Code* unoptimized_code,
4922  Address pc) {
4923  static const int kInstrSize = Assembler::kInstrSize;
4924  ASSERT(Memory::int32_at(pc - kInstrSize) == kBlxIp);
4925 
4926  Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4927  Address branch_address = pc_immediate_load_address - kInstrSize;
4928  Address interrupt_address = Assembler::target_address_at(
4929  pc_immediate_load_address, unoptimized_code);
4930 
4931  if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
4932  ASSERT(interrupt_address ==
4933  isolate->builtins()->InterruptCheck()->entry());
4934  return INTERRUPT;
4935  }
4936 
4937  ASSERT(Assembler::IsNop(Assembler::instr_at(branch_address)));
4938 
4939  if (interrupt_address ==
4940  isolate->builtins()->OnStackReplacement()->entry()) {
4941  return ON_STACK_REPLACEMENT;
4942  }
4943 
4944  ASSERT(interrupt_address ==
4945  isolate->builtins()->OsrAfterStackCheck()->entry());
4946  return OSR_AFTER_STACK_CHECK;
4947 }
4948 
4949 
4950 } } // namespace v8::internal
4951 
4952 #endif // V8_TARGET_ARCH_ARM
static const int kFunctionOffset
Definition: objects.h:7324
byte * Address
Definition: globals.h:186
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
static bool IsBranch(Instr instr)
static const int kHashFieldOffset
Definition: objects.h:8629
static const int kBitFieldOffset
Definition: objects.h:6461
Scope * DeclarationScope()
Definition: scopes.cc:743
const intptr_t kSmiTagMask
Definition: v8.h:5480
static const int kForInFastCaseMarker
Definition: objects.h:8230
VariableDeclaration * function() const
Definition: scopes.h:326
static const int kCodeEntryOffset
Definition: objects.h:7518
static const int kValueOffset
Definition: objects.h:9547
static int SlotOffset(int index)
Definition: contexts.h:498
static const int kBuiltinsOffset
Definition: objects.h:7610
const Register r3
static Handle< Code > GetUninitialized(Isolate *isolate)
Definition: code-stubs.h:2385
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
static const int kEnumCacheOffset
Definition: objects.h:3499
static String * cast(Object *obj)
const uint32_t kTwoByteStringTag
Definition: objects.h:610
const Register cp
const LowDwVfpRegister d0
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
static const int kResultValuePropertyOffset
Definition: objects.h:7342
bool IsFastObjectElementsKind(ElementsKind kind)
static TypeFeedbackId None()
Definition: utils.h:1149
static bool IsMovW(Instr instr)
static Handle< Code > GetUninitialized(Isolate *isolate, Token::Value op)
Definition: ic.cc:2489
static const int kGlobalReceiverOffset
Definition: objects.h:7613
const Register r6
T Max(T a, T b)
Definition: utils.h:227
Scope * outer_scope() const
Definition: scopes.h:350
static const int kGeneratorClosed
Definition: objects.h:7321
int int32_t
Definition: unicode.cc:47
static const unsigned int kContainsCachedArrayIndexMask
Definition: objects.h:8673
static bool IsLdrPpImmediateOffset(Instr instr)
static const int kForInSlowCaseMarker
Definition: objects.h:8231
static bool enabled()
Definition: serialize.h:485
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
static const int kSize
Definition: objects.h:7922
static const int kResultDonePropertyOffset
Definition: objects.h:7343
#define ASSERT(condition)
Definition: checks.h:329
friend class BlockConstPoolScope
static const int kContextOffset
Definition: frames.h:185
const int kPointerSizeLog2
Definition: globals.h:281
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:121
static const int kInObjectFieldCount
Definition: objects.h:7976
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3090
#define POINTER_SIZE_ALIGN(value)
Definition: v8globals.h:390
static const int kReceiverOffset
Definition: objects.h:7326
MemOperand GlobalObjectOperand()
const Register r2
static const int kCallerFPOffset
Definition: frames.h:188
static const int kInstanceClassNameOffset
Definition: objects.h:7107
bool IsOptimizable() const
Definition: compiler.h:232
Variable * parameter(int index) const
Definition: scopes.h:333
PropertyAttributes
MemOperand ContextOperand(Register context, int index)
const Register pp
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
Definition: scopes.cc:721
kInstanceClassNameOffset flag
Definition: objects-inl.h:5115
#define IN
const Register sp
static const int kLiteralsOffset
Definition: objects.h:7524
#define UNREACHABLE()
Definition: checks.h:52
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static Condition ComputeCondition(Token::Value op)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
static const int kLengthOffset
Definition: objects.h:8905
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Definition: flags.cc:665
Variable * arguments() const
Definition: scopes.h:341
static const int kFirstOffset
Definition: objects.h:3500
NilValue
Definition: v8.h:133
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1278
const Register ip
static BailoutId Declarations()
Definition: utils.h:1166
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
const Register r9
const int kPointerSize
Definition: globals.h:268
void check(i::Vector< const uint8_t > string)
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:6478
static void MaybeCallEntryHook(MacroAssembler *masm)
const int kHeapObjectTag
Definition: v8.h:5473
static int32_t & int32_at(Address addr)
Definition: v8memory.h:51
const Instr kBlxIp
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define __
static const int kCallerSPOffset
Definition: frames.h:190
static const int kCacheStampOffset
Definition: objects.h:7787
static bool IsMovT(Instr instr)
const Register pc
static const int kDescriptorSize
Definition: objects.h:3509
static const int kPropertiesOffset
Definition: objects.h:2755
int num_parameters() const
Definition: scopes.h:338
static const int kMarkerOffset
Definition: frames.h:184
static const int kExpressionsOffset
Definition: frames.h:183
static const int kHeaderSize
Definition: objects.h:9042
void CheckConstPool(bool force_emit, bool require_jump)
const Register r0
static const int kElementsOffset
Definition: objects.h:2756
static BailoutId FunctionEntry()
Definition: utils.h:1165
#define BASE_EMBEDDED
Definition: allocation.h:68
OverwriteMode
Definition: ic.h:690
bool IsDeclaredVariableMode(VariableMode mode)
Definition: v8globals.h:503
Vector< const char > CStrVector(const char *data)
Definition: utils.h:574
static int OffsetOfElementAt(int index)
Definition: objects.h:3070
static const int kLengthOffset
Definition: objects.h:10076
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
Definition: objects.h:3016
Scope * GlobalScope()
Definition: scopes.cc:734
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
Definition: code-stubs.h:1406
static const int kContextOffset
Definition: objects.h:7325
const Register lr
static const int kMapOffset
Definition: objects.h:1890
static const int kValueOffset
Definition: objects.h:7779
static const int kFixedFrameSizeFromFp
Definition: frames.h:180
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:3503
const Register r1
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:545
static const int kLengthOffset
Definition: objects.h:3015
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:6675
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
MemOperand FieldMemOperand(Register object, int offset)
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:206
static const int kContextOffset
Definition: frames.h:97
static const int kFormalParameterCountOffset
Definition: objects.h:7156
const int kSmiTagSize
Definition: v8.h:5479
void CopyBytes(uint8_t *target, uint8_t *source)
Definition: runtime.cc:1309
static const int kHeaderSize
Definition: objects.h:5604
static const int kGeneratorExecuting
Definition: objects.h:7320
Condition NegateCondition(Condition cond)
static bool ShouldGenerateLog(Isolate *isolate, Expression *type)
Definition: codegen.cc:191
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
static const int kContinuationOffset
Definition: objects.h:7327
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:996
static const int kJSReturnSequenceInstructions
static const int kConstructorOffset
Definition: objects.h:6428
const uint32_t kOneByteStringTag
Definition: objects.h:611
const int kSmiTag
Definition: v8.h:5478
#define ASSERT_NE(v1, v2)
Definition: checks.h:331
static const int kIsUndetectable
Definition: objects.h:6472
static const int kInstrSize
static const int kPrototypeOffset
Definition: objects.h:6427
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target)
const Register no_reg
static const int kValueOffset
Definition: objects.h:7701
bool IsImmutableVariableMode(VariableMode mode)
Definition: v8globals.h:513
const Register fp
static const int kNativeContextOffset
Definition: objects.h:7611
void AddNoFrameRange(int from, int to)
Definition: compiler.h:296
T Min(T a, T b)
Definition: utils.h:234
static const int kSharedFunctionInfoOffset
Definition: objects.h:7521
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:2121
static const int kBitField2Offset
Definition: objects.h:6462
static const int kConstantPoolOffset
Definition: objects.h:5598
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
#define VOID
static const int kExponentOffset
Definition: objects.h:1977
const Register r5
static bool IsLdrPcImmediateOffset(Instr instr)
static const int kInstanceTypeOffset
Definition: objects.h:6459
static const int kOperandStackOffset
Definition: objects.h:7328
static const int kMantissaOffset
Definition: objects.h:1976
TypeofState
Definition: codegen.h:69
Scope * scope() const
Definition: compiler.h:78
const Register r4