v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
full-codegen-arm.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_ARM)
31 
32 #include "code-stubs.h"
33 #include "codegen.h"
34 #include "compiler.h"
35 #include "debug.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
38 #include "parser.h"
39 #include "scopes.h"
40 #include "stub-cache.h"
41 
42 #include "arm/code-stubs-arm.h"
44 
45 namespace v8 {
46 namespace internal {
47 
48 #define __ ACCESS_MASM(masm_)
49 
50 
51 // A patch site is a location in the code which it is possible to patch. This
52 // class has a number of methods to emit the code which is patchable and the
53 // method EmitPatchInfo to record a marker back to the patchable code. This
54 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
55 // immediate value is used) is the delta from the pc to the first instruction of
56 // the patchable code.
57 class JumpPatchSite BASE_EMBEDDED {
58  public:
59  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
60 #ifdef DEBUG
61  info_emitted_ = false;
62 #endif
63  }
64 
65  ~JumpPatchSite() {
66  ASSERT(patch_site_.is_bound() == info_emitted_);
67  }
68 
69  // When initially emitting this ensure that a jump is always generated to skip
70  // the inlined smi code.
71  void EmitJumpIfNotSmi(Register reg, Label* target) {
72  ASSERT(!patch_site_.is_bound() && !info_emitted_);
73  Assembler::BlockConstPoolScope block_const_pool(masm_);
74  __ bind(&patch_site_);
75  __ cmp(reg, Operand(reg));
76  __ b(eq, target); // Always taken before patched.
77  }
78 
79  // When initially emitting this ensure that a jump is never generated to skip
80  // the inlined smi code.
81  void EmitJumpIfSmi(Register reg, Label* target) {
82  ASSERT(!patch_site_.is_bound() && !info_emitted_);
83  Assembler::BlockConstPoolScope block_const_pool(masm_);
84  __ bind(&patch_site_);
85  __ cmp(reg, Operand(reg));
86  __ b(ne, target); // Never taken before patched.
87  }
88 
89  void EmitPatchInfo() {
90  // Block literal pool emission whilst recording patch site information.
91  Assembler::BlockConstPoolScope block_const_pool(masm_);
92  if (patch_site_.is_bound()) {
93  int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
94  Register reg;
95  reg.set_code(delta_to_patch_site / kOff12Mask);
96  __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
97 #ifdef DEBUG
98  info_emitted_ = true;
99 #endif
100  } else {
101  __ nop(); // Signals no inlined code.
102  }
103  }
104 
105  private:
106  MacroAssembler* masm_;
107  Label patch_site_;
108 #ifdef DEBUG
109  bool info_emitted_;
110 #endif
111 };
112 
113 
114 // Generate code for a JS function. On entry to the function the receiver
115 // and arguments have been pushed on the stack left to right. The actual
116 // argument count matches the formal parameter count expected by the
117 // function.
118 //
119 // The live registers are:
120 // o r1: the JS function object being called (i.e., ourselves)
121 // o cp: our context
122 // o fp: our caller's frame pointer
123 // o sp: stack pointer
124 // o lr: return address
125 //
126 // The function builds a JS frame. Please see JavaScriptFrameConstants in
127 // frames-arm.h for its layout.
128 void FullCodeGenerator::Generate() {
129  CompilationInfo* info = info_;
130  handler_table_ =
131  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
132  profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
133  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
134  SetFunctionPosition(function());
135  Comment cmnt(masm_, "[ function compiled by full code generator");
136 
138 
139 #ifdef DEBUG
140  if (strlen(FLAG_stop_at) > 0 &&
141  info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
142  __ stop("stop-at");
143  }
144 #endif
145 
146  // Strict mode functions and builtins need to replace the receiver
147  // with undefined when called as functions (without an explicit
148  // receiver object). r5 is zero for method calls and non-zero for
149  // function calls.
150  if (!info->is_classic_mode() || info->is_native()) {
151  Label ok;
152  __ cmp(r5, Operand(0));
153  __ b(eq, &ok);
154  int receiver_offset = info->scope()->num_parameters() * kPointerSize;
155  __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
156  __ str(r2, MemOperand(sp, receiver_offset));
157  __ bind(&ok);
158  }
159 
160  // Open a frame scope to indicate that there is a frame on the stack. The
161  // MANUAL indicates that the scope shouldn't actually generate code to set up
162  // the frame (that is done below).
163  FrameScope frame_scope(masm_, StackFrame::MANUAL);
164 
165  int locals_count = info->scope()->num_stack_slots();
166 
167  __ Push(lr, fp, cp, r1);
168  if (locals_count > 0) {
169  // Load undefined value here, so the value is ready for the loop
170  // below.
171  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
172  }
173  // Adjust fp to point to caller's fp.
174  __ add(fp, sp, Operand(2 * kPointerSize));
175 
176  { Comment cmnt(masm_, "[ Allocate locals");
177  for (int i = 0; i < locals_count; i++) {
178  __ push(ip);
179  }
180  }
181 
182  bool function_in_register = true;
183 
184  // Possibly allocate a local context.
185  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
186  if (heap_slots > 0) {
187  // Argument to NewContext is the function, which is still in r1.
188  Comment cmnt(masm_, "[ Allocate context");
189  __ push(r1);
190  if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
191  __ Push(info->scope()->GetScopeInfo());
192  __ CallRuntime(Runtime::kNewGlobalContext, 2);
193  } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
194  FastNewContextStub stub(heap_slots);
195  __ CallStub(&stub);
196  } else {
197  __ CallRuntime(Runtime::kNewFunctionContext, 1);
198  }
199  function_in_register = false;
200  // Context is returned in both r0 and cp. It replaces the context
201  // passed to us. It's saved in the stack and kept live in cp.
203  // Copy any necessary parameters into the context.
204  int num_parameters = info->scope()->num_parameters();
205  for (int i = 0; i < num_parameters; i++) {
206  Variable* var = scope()->parameter(i);
207  if (var->IsContextSlot()) {
208  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
209  (num_parameters - 1 - i) * kPointerSize;
210  // Load parameter from stack.
211  __ ldr(r0, MemOperand(fp, parameter_offset));
212  // Store it in the context.
213  MemOperand target = ContextOperand(cp, var->index());
214  __ str(r0, target);
215 
216  // Update the write barrier.
217  __ RecordWriteContextSlot(
218  cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
219  }
220  }
221  }
222 
223  Variable* arguments = scope()->arguments();
224  if (arguments != NULL) {
225  // Function uses arguments object.
226  Comment cmnt(masm_, "[ Allocate arguments object");
227  if (!function_in_register) {
228  // Load this again, if it's used by the local context below.
230  } else {
231  __ mov(r3, r1);
232  }
233  // Receiver is just before the parameters on the caller's stack.
234  int num_parameters = info->scope()->num_parameters();
235  int offset = num_parameters * kPointerSize;
236  __ add(r2, fp,
237  Operand(StandardFrameConstants::kCallerSPOffset + offset));
238  __ mov(r1, Operand(Smi::FromInt(num_parameters)));
239  __ Push(r3, r2, r1);
240 
241  // Arguments to ArgumentsAccessStub:
242  // function, receiver address, parameter count.
243  // The stub will rewrite receiever and parameter count if the previous
244  // stack frame was an arguments adapter frame.
246  if (!is_classic_mode()) {
248  } else if (function()->has_duplicate_parameters()) {
250  } else {
252  }
253  ArgumentsAccessStub stub(type);
254  __ CallStub(&stub);
255 
256  SetVar(arguments, r0, r1, r2);
257  }
258 
259  if (FLAG_trace) {
260  __ CallRuntime(Runtime::kTraceEnter, 0);
261  }
262 
263  // Visit the declarations and body unless there is an illegal
264  // redeclaration.
265  if (scope()->HasIllegalRedeclaration()) {
266  Comment cmnt(masm_, "[ Declarations");
267  scope()->VisitIllegalRedeclaration(this);
268 
269  } else {
270  PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
271  { Comment cmnt(masm_, "[ Declarations");
272  // For named function expressions, declare the function name as a
273  // constant.
274  if (scope()->is_function_scope() && scope()->function() != NULL) {
275  VariableDeclaration* function = scope()->function();
276  ASSERT(function->proxy()->var()->mode() == CONST ||
277  function->proxy()->var()->mode() == CONST_HARMONY);
278  ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
279  VisitVariableDeclaration(function);
280  }
281  VisitDeclarations(scope()->declarations());
282  }
283 
284  { Comment cmnt(masm_, "[ Stack check");
285  PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
286  Label ok;
287  __ LoadRoot(ip, Heap::kStackLimitRootIndex);
288  __ cmp(sp, Operand(ip));
289  __ b(hs, &ok);
290  PredictableCodeSizeScope predictable(masm_);
291  StackCheckStub stub;
292  __ CallStub(&stub);
293  __ bind(&ok);
294  }
295 
296  { Comment cmnt(masm_, "[ Body");
297  ASSERT(loop_depth() == 0);
298  VisitStatements(function()->body());
299  ASSERT(loop_depth() == 0);
300  }
301  }
302 
303  // Always emit a 'return undefined' in case control fell off the end of
304  // the body.
305  { Comment cmnt(masm_, "[ return <undefined>;");
306  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
307  }
308  EmitReturnSequence();
309 
310  // Force emit the constant pool, so it doesn't get emitted in the middle
311  // of the stack check table.
312  masm()->CheckConstPool(true, false);
313 }
314 
315 
316 void FullCodeGenerator::ClearAccumulator() {
317  __ mov(r0, Operand(Smi::FromInt(0)));
318 }
319 
320 
321 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
322  __ mov(r2, Operand(profiling_counter_));
324  __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
326 }
327 
328 
329 void FullCodeGenerator::EmitProfilingCounterReset() {
330  int reset_value = FLAG_interrupt_budget;
331  if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
332  // Self-optimization is a one-off thing: if it fails, don't try again.
333  reset_value = Smi::kMaxValue;
334  }
335  if (isolate()->IsDebuggerActive()) {
336  // Detect debug break requests as soon as possible.
337  reset_value = FLAG_interrupt_budget >> 4;
338  }
339  __ mov(r2, Operand(profiling_counter_));
340  __ mov(r3, Operand(Smi::FromInt(reset_value)));
342 }
343 
344 
345 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
346  Label* back_edge_target) {
347  Comment cmnt(masm_, "[ Stack check");
348  // Block literal pools whilst emitting stack check code.
349  Assembler::BlockConstPoolScope block_const_pool(masm_);
350  Label ok;
351 
352  if (FLAG_count_based_interrupts) {
353  int weight = 1;
354  if (FLAG_weighted_back_edges) {
355  ASSERT(back_edge_target->is_bound());
356  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
357  weight = Min(kMaxBackEdgeWeight,
358  Max(1, distance / kBackEdgeDistanceUnit));
359  }
360  EmitProfilingCounterDecrement(weight);
361  __ b(pl, &ok);
362  InterruptStub stub;
363  __ CallStub(&stub);
364  } else {
365  __ LoadRoot(ip, Heap::kStackLimitRootIndex);
366  __ cmp(sp, Operand(ip));
367  __ b(hs, &ok);
368  PredictableCodeSizeScope predictable(masm_);
369  StackCheckStub stub;
370  __ CallStub(&stub);
371  }
372 
373  // Record a mapping of this PC offset to the OSR id. This is used to find
374  // the AST id from the unoptimized code in order to use it as a key into
375  // the deoptimization input data found in the optimized code.
376  RecordStackCheck(stmt->OsrEntryId());
377 
378  if (FLAG_count_based_interrupts) {
379  EmitProfilingCounterReset();
380  }
381 
382  __ bind(&ok);
383  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
384  // Record a mapping of the OSR id to this PC. This is used if the OSR
385  // entry becomes the target of a bailout. We don't expect it to be, but
386  // we want it to work if it is.
387  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
388 }
389 
390 
391 void FullCodeGenerator::EmitReturnSequence() {
392  Comment cmnt(masm_, "[ Return sequence");
393  if (return_label_.is_bound()) {
394  __ b(&return_label_);
395  } else {
396  __ bind(&return_label_);
397  if (FLAG_trace) {
398  // Push the return value on the stack as the parameter.
399  // Runtime::TraceExit returns its parameter in r0.
400  __ push(r0);
401  __ CallRuntime(Runtime::kTraceExit, 1);
402  }
403  if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
404  // Pretend that the exit is a backwards jump to the entry.
405  int weight = 1;
406  if (info_->ShouldSelfOptimize()) {
407  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
408  } else if (FLAG_weighted_back_edges) {
409  int distance = masm_->pc_offset();
410  weight = Min(kMaxBackEdgeWeight,
411  Max(1, distance / kBackEdgeDistanceUnit));
412  }
413  EmitProfilingCounterDecrement(weight);
414  Label ok;
415  __ b(pl, &ok);
416  __ push(r0);
417  if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
419  __ push(r2);
420  __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
421  } else {
422  InterruptStub stub;
423  __ CallStub(&stub);
424  }
425  __ pop(r0);
426  EmitProfilingCounterReset();
427  __ bind(&ok);
428  }
429 
430 #ifdef DEBUG
431  // Add a label for checking the size of the code used for returning.
432  Label check_exit_codesize;
433  masm_->bind(&check_exit_codesize);
434 #endif
435  // Make sure that the constant pool is not emitted inside of the return
436  // sequence.
437  { Assembler::BlockConstPoolScope block_const_pool(masm_);
438  // Here we use masm_-> instead of the __ macro to avoid the code coverage
439  // tool from instrumenting as we rely on the code size here.
440  int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
441  CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
442  PredictableCodeSizeScope predictable(masm_);
443  __ RecordJSReturn();
444  masm_->mov(sp, fp);
445  masm_->ldm(ia_w, sp, fp.bit() | lr.bit());
446  masm_->add(sp, sp, Operand(sp_delta));
447  masm_->Jump(lr);
448  }
449 
450 #ifdef DEBUG
451  // Check that the size of the code used for returning is large enough
452  // for the debugger's requirements.
454  masm_->InstructionsGeneratedSince(&check_exit_codesize));
455 #endif
456  }
457 }
458 
459 
460 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
461  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
462 }
463 
464 
465 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
466  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
467  codegen()->GetVar(result_register(), var);
468 }
469 
470 
471 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
472  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
473  codegen()->GetVar(result_register(), var);
474  __ push(result_register());
475 }
476 
477 
478 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
479  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
480  // For simplicity we always test the accumulator register.
481  codegen()->GetVar(result_register(), var);
482  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
483  codegen()->DoTest(this);
484 }
485 
486 
487 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
488 }
489 
490 
491 void FullCodeGenerator::AccumulatorValueContext::Plug(
492  Heap::RootListIndex index) const {
493  __ LoadRoot(result_register(), index);
494 }
495 
496 
497 void FullCodeGenerator::StackValueContext::Plug(
498  Heap::RootListIndex index) const {
499  __ LoadRoot(result_register(), index);
500  __ push(result_register());
501 }
502 
503 
504 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
505  codegen()->PrepareForBailoutBeforeSplit(condition(),
506  true,
507  true_label_,
508  false_label_);
509  if (index == Heap::kUndefinedValueRootIndex ||
510  index == Heap::kNullValueRootIndex ||
511  index == Heap::kFalseValueRootIndex) {
512  if (false_label_ != fall_through_) __ b(false_label_);
513  } else if (index == Heap::kTrueValueRootIndex) {
514  if (true_label_ != fall_through_) __ b(true_label_);
515  } else {
516  __ LoadRoot(result_register(), index);
517  codegen()->DoTest(this);
518  }
519 }
520 
521 
522 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
523 }
524 
525 
526 void FullCodeGenerator::AccumulatorValueContext::Plug(
527  Handle<Object> lit) const {
528  __ mov(result_register(), Operand(lit));
529 }
530 
531 
532 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
533  // Immediates cannot be pushed directly.
534  __ mov(result_register(), Operand(lit));
535  __ push(result_register());
536 }
537 
538 
539 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
540  codegen()->PrepareForBailoutBeforeSplit(condition(),
541  true,
542  true_label_,
543  false_label_);
544  ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
545  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
546  if (false_label_ != fall_through_) __ b(false_label_);
547  } else if (lit->IsTrue() || lit->IsJSObject()) {
548  if (true_label_ != fall_through_) __ b(true_label_);
549  } else if (lit->IsString()) {
550  if (String::cast(*lit)->length() == 0) {
551  if (false_label_ != fall_through_) __ b(false_label_);
552  } else {
553  if (true_label_ != fall_through_) __ b(true_label_);
554  }
555  } else if (lit->IsSmi()) {
556  if (Smi::cast(*lit)->value() == 0) {
557  if (false_label_ != fall_through_) __ b(false_label_);
558  } else {
559  if (true_label_ != fall_through_) __ b(true_label_);
560  }
561  } else {
562  // For simplicity we always test the accumulator register.
563  __ mov(result_register(), Operand(lit));
564  codegen()->DoTest(this);
565  }
566 }
567 
568 
569 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
570  Register reg) const {
571  ASSERT(count > 0);
572  __ Drop(count);
573 }
574 
575 
576 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
577  int count,
578  Register reg) const {
579  ASSERT(count > 0);
580  __ Drop(count);
581  __ Move(result_register(), reg);
582 }
583 
584 
585 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
586  Register reg) const {
587  ASSERT(count > 0);
588  if (count > 1) __ Drop(count - 1);
589  __ str(reg, MemOperand(sp, 0));
590 }
591 
592 
593 void FullCodeGenerator::TestContext::DropAndPlug(int count,
594  Register reg) const {
595  ASSERT(count > 0);
596  // For simplicity we always test the accumulator register.
597  __ Drop(count);
598  __ Move(result_register(), reg);
599  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
600  codegen()->DoTest(this);
601 }
602 
603 
604 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
605  Label* materialize_false) const {
606  ASSERT(materialize_true == materialize_false);
607  __ bind(materialize_true);
608 }
609 
610 
611 void FullCodeGenerator::AccumulatorValueContext::Plug(
612  Label* materialize_true,
613  Label* materialize_false) const {
614  Label done;
615  __ bind(materialize_true);
616  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
617  __ jmp(&done);
618  __ bind(materialize_false);
619  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
620  __ bind(&done);
621 }
622 
623 
624 void FullCodeGenerator::StackValueContext::Plug(
625  Label* materialize_true,
626  Label* materialize_false) const {
627  Label done;
628  __ bind(materialize_true);
629  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
630  __ push(ip);
631  __ jmp(&done);
632  __ bind(materialize_false);
633  __ LoadRoot(ip, Heap::kFalseValueRootIndex);
634  __ push(ip);
635  __ bind(&done);
636 }
637 
638 
639 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
640  Label* materialize_false) const {
641  ASSERT(materialize_true == true_label_);
642  ASSERT(materialize_false == false_label_);
643 }
644 
645 
646 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
647 }
648 
649 
650 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
651  Heap::RootListIndex value_root_index =
652  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
653  __ LoadRoot(result_register(), value_root_index);
654 }
655 
656 
657 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
658  Heap::RootListIndex value_root_index =
659  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
660  __ LoadRoot(ip, value_root_index);
661  __ push(ip);
662 }
663 
664 
665 void FullCodeGenerator::TestContext::Plug(bool flag) const {
666  codegen()->PrepareForBailoutBeforeSplit(condition(),
667  true,
668  true_label_,
669  false_label_);
670  if (flag) {
671  if (true_label_ != fall_through_) __ b(true_label_);
672  } else {
673  if (false_label_ != fall_through_) __ b(false_label_);
674  }
675 }
676 
677 
678 void FullCodeGenerator::DoTest(Expression* condition,
679  Label* if_true,
680  Label* if_false,
681  Label* fall_through) {
682  ToBooleanStub stub(result_register());
683  __ CallStub(&stub);
684  __ tst(result_register(), result_register());
685  Split(ne, if_true, if_false, fall_through);
686 }
687 
688 
689 void FullCodeGenerator::Split(Condition cond,
690  Label* if_true,
691  Label* if_false,
692  Label* fall_through) {
693  if (if_false == fall_through) {
694  __ b(cond, if_true);
695  } else if (if_true == fall_through) {
696  __ b(NegateCondition(cond), if_false);
697  } else {
698  __ b(cond, if_true);
699  __ b(if_false);
700  }
701 }
702 
703 
704 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
705  ASSERT(var->IsStackAllocated());
706  // Offset is negative because higher indexes are at lower addresses.
707  int offset = -var->index() * kPointerSize;
708  // Adjust by a (parameter or local) base offset.
709  if (var->IsParameter()) {
710  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
711  } else {
713  }
714  return MemOperand(fp, offset);
715 }
716 
717 
718 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
719  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
720  if (var->IsContextSlot()) {
721  int context_chain_length = scope()->ContextChainLength(var->scope());
722  __ LoadContext(scratch, context_chain_length);
723  return ContextOperand(scratch, var->index());
724  } else {
725  return StackOperand(var);
726  }
727 }
728 
729 
730 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
731  // Use destination as scratch.
732  MemOperand location = VarOperand(var, dest);
733  __ ldr(dest, location);
734 }
735 
736 
737 void FullCodeGenerator::SetVar(Variable* var,
738  Register src,
739  Register scratch0,
740  Register scratch1) {
741  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
742  ASSERT(!scratch0.is(src));
743  ASSERT(!scratch0.is(scratch1));
744  ASSERT(!scratch1.is(src));
745  MemOperand location = VarOperand(var, scratch0);
746  __ str(src, location);
747 
748  // Emit the write barrier code if the location is in the heap.
749  if (var->IsContextSlot()) {
750  __ RecordWriteContextSlot(scratch0,
751  location.offset(),
752  src,
753  scratch1,
756  }
757 }
758 
759 
760 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
761  bool should_normalize,
762  Label* if_true,
763  Label* if_false) {
764  // Only prepare for bailouts before splits if we're in a test
765  // context. Otherwise, we let the Visit function deal with the
766  // preparation to avoid preparing with the same AST id twice.
767  if (!context()->IsTest() || !info_->IsOptimizable()) return;
768 
769  Label skip;
770  if (should_normalize) __ b(&skip);
771  PrepareForBailout(expr, TOS_REG);
772  if (should_normalize) {
773  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
774  __ cmp(r0, ip);
775  Split(eq, if_true, if_false, NULL);
776  __ bind(&skip);
777  }
778 }
779 
780 
781 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
782  // The variable in the declaration always resides in the current function
783  // context.
784  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
785  if (generate_debug_code_) {
786  // Check that we're not inside a with or catch context.
788  __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
789  __ Check(ne, "Declaration in with context.");
790  __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
791  __ Check(ne, "Declaration in catch context.");
792  }
793 }
794 
795 
796 void FullCodeGenerator::VisitVariableDeclaration(
797  VariableDeclaration* declaration) {
798  // If it was not possible to allocate the variable at compile time, we
799  // need to "declare" it at runtime to make sure it actually exists in the
800  // local context.
801  VariableProxy* proxy = declaration->proxy();
802  VariableMode mode = declaration->mode();
803  Variable* variable = proxy->var();
804  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
805  switch (variable->location()) {
807  globals_->Add(variable->name(), zone());
808  globals_->Add(variable->binding_needs_init()
809  ? isolate()->factory()->the_hole_value()
810  : isolate()->factory()->undefined_value(),
811  zone());
812  break;
813 
814  case Variable::PARAMETER:
815  case Variable::LOCAL:
816  if (hole_init) {
817  Comment cmnt(masm_, "[ VariableDeclaration");
818  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
819  __ str(ip, StackOperand(variable));
820  }
821  break;
822 
823  case Variable::CONTEXT:
824  if (hole_init) {
825  Comment cmnt(masm_, "[ VariableDeclaration");
826  EmitDebugCheckDeclarationContext(variable);
827  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
828  __ str(ip, ContextOperand(cp, variable->index()));
829  // No write barrier since the_hole_value is in old space.
830  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
831  }
832  break;
833 
834  case Variable::LOOKUP: {
835  Comment cmnt(masm_, "[ VariableDeclaration");
836  __ mov(r2, Operand(variable->name()));
837  // Declaration nodes are always introduced in one of four modes.
839  PropertyAttributes attr =
841  __ mov(r1, Operand(Smi::FromInt(attr)));
842  // Push initial value, if any.
843  // Note: For variables we must not push an initial value (such as
844  // 'undefined') because we may have a (legal) redeclaration and we
845  // must not destroy the current value.
846  if (hole_init) {
847  __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
848  __ Push(cp, r2, r1, r0);
849  } else {
850  __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
851  __ Push(cp, r2, r1, r0);
852  }
853  __ CallRuntime(Runtime::kDeclareContextSlot, 4);
854  break;
855  }
856  }
857 }
858 
859 
860 void FullCodeGenerator::VisitFunctionDeclaration(
861  FunctionDeclaration* declaration) {
862  VariableProxy* proxy = declaration->proxy();
863  Variable* variable = proxy->var();
864  switch (variable->location()) {
865  case Variable::UNALLOCATED: {
866  globals_->Add(variable->name(), zone());
867  Handle<SharedFunctionInfo> function =
868  Compiler::BuildFunctionInfo(declaration->fun(), script());
869  // Check for stack-overflow exception.
870  if (function.is_null()) return SetStackOverflow();
871  globals_->Add(function, zone());
872  break;
873  }
874 
875  case Variable::PARAMETER:
876  case Variable::LOCAL: {
877  Comment cmnt(masm_, "[ FunctionDeclaration");
878  VisitForAccumulatorValue(declaration->fun());
879  __ str(result_register(), StackOperand(variable));
880  break;
881  }
882 
883  case Variable::CONTEXT: {
884  Comment cmnt(masm_, "[ FunctionDeclaration");
885  EmitDebugCheckDeclarationContext(variable);
886  VisitForAccumulatorValue(declaration->fun());
887  __ str(result_register(), ContextOperand(cp, variable->index()));
888  int offset = Context::SlotOffset(variable->index());
889  // We know that we have written a function, which is not a smi.
890  __ RecordWriteContextSlot(cp,
891  offset,
892  result_register(),
893  r2,
898  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
899  break;
900  }
901 
902  case Variable::LOOKUP: {
903  Comment cmnt(masm_, "[ FunctionDeclaration");
904  __ mov(r2, Operand(variable->name()));
905  __ mov(r1, Operand(Smi::FromInt(NONE)));
906  __ Push(cp, r2, r1);
907  // Push initial value for function declaration.
908  VisitForStackValue(declaration->fun());
909  __ CallRuntime(Runtime::kDeclareContextSlot, 4);
910  break;
911  }
912  }
913 }
914 
915 
916 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
917  VariableProxy* proxy = declaration->proxy();
918  Variable* variable = proxy->var();
919  Handle<JSModule> instance = declaration->module()->interface()->Instance();
920  ASSERT(!instance.is_null());
921 
922  switch (variable->location()) {
923  case Variable::UNALLOCATED: {
924  Comment cmnt(masm_, "[ ModuleDeclaration");
925  globals_->Add(variable->name(), zone());
926  globals_->Add(instance, zone());
927  Visit(declaration->module());
928  break;
929  }
930 
931  case Variable::CONTEXT: {
932  Comment cmnt(masm_, "[ ModuleDeclaration");
933  EmitDebugCheckDeclarationContext(variable);
934  __ mov(r1, Operand(instance));
935  __ str(r1, ContextOperand(cp, variable->index()));
936  Visit(declaration->module());
937  break;
938  }
939 
940  case Variable::PARAMETER:
941  case Variable::LOCAL:
942  case Variable::LOOKUP:
943  UNREACHABLE();
944  }
945 }
946 
947 
948 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
949  VariableProxy* proxy = declaration->proxy();
950  Variable* variable = proxy->var();
951  switch (variable->location()) {
953  // TODO(rossberg)
954  break;
955 
956  case Variable::CONTEXT: {
957  Comment cmnt(masm_, "[ ImportDeclaration");
958  EmitDebugCheckDeclarationContext(variable);
959  // TODO(rossberg)
960  break;
961  }
962 
963  case Variable::PARAMETER:
964  case Variable::LOCAL:
965  case Variable::LOOKUP:
966  UNREACHABLE();
967  }
968 }
969 
970 
971 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
972  // TODO(rossberg)
973 }
974 
975 
976 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
977  // Call the runtime to declare the globals.
978  // The context is the first argument.
979  __ mov(r1, Operand(pairs));
980  __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
981  __ Push(cp, r1, r0);
982  __ CallRuntime(Runtime::kDeclareGlobals, 3);
983  // Return value is ignored.
984 }
985 
986 
987 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
988  Comment cmnt(masm_, "[ SwitchStatement");
989  Breakable nested_statement(this, stmt);
990  SetStatementPosition(stmt);
991 
992  // Keep the switch value on the stack until a case matches.
993  VisitForStackValue(stmt->tag());
994  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
995 
996  ZoneList<CaseClause*>* clauses = stmt->cases();
997  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
998 
999  Label next_test; // Recycled for each test.
1000  // Compile all the tests with branches to their bodies.
1001  for (int i = 0; i < clauses->length(); i++) {
1002  CaseClause* clause = clauses->at(i);
1003  clause->body_target()->Unuse();
1004 
1005  // The default is not a test, but remember it as final fall through.
1006  if (clause->is_default()) {
1007  default_clause = clause;
1008  continue;
1009  }
1010 
1011  Comment cmnt(masm_, "[ Case comparison");
1012  __ bind(&next_test);
1013  next_test.Unuse();
1014 
1015  // Compile the label expression.
1016  VisitForAccumulatorValue(clause->label());
1017 
1018  // Perform the comparison as if via '==='.
1019  __ ldr(r1, MemOperand(sp, 0)); // Switch value.
1020  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1021  JumpPatchSite patch_site(masm_);
1022  if (inline_smi_code) {
1023  Label slow_case;
1024  __ orr(r2, r1, r0);
1025  patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1026 
1027  __ cmp(r1, r0);
1028  __ b(ne, &next_test);
1029  __ Drop(1); // Switch value is no longer needed.
1030  __ b(clause->body_target());
1031  __ bind(&slow_case);
1032  }
1033 
1034  // Record position before stub call for type feedback.
1035  SetSourcePosition(clause->position());
1036  Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
1037  CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1038  patch_site.EmitPatchInfo();
1039 
1040  __ cmp(r0, Operand(0));
1041  __ b(ne, &next_test);
1042  __ Drop(1); // Switch value is no longer needed.
1043  __ b(clause->body_target());
1044  }
1045 
1046  // Discard the test value and jump to the default if present, otherwise to
1047  // the end of the statement.
1048  __ bind(&next_test);
1049  __ Drop(1); // Switch value is no longer needed.
1050  if (default_clause == NULL) {
1051  __ b(nested_statement.break_label());
1052  } else {
1053  __ b(default_clause->body_target());
1054  }
1055 
1056  // Compile all the case bodies.
1057  for (int i = 0; i < clauses->length(); i++) {
1058  Comment cmnt(masm_, "[ Case body");
1059  CaseClause* clause = clauses->at(i);
1060  __ bind(clause->body_target());
1061  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1062  VisitStatements(clause->statements());
1063  }
1064 
1065  __ bind(nested_statement.break_label());
1066  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1067 }
1068 
1069 
1070 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1071  Comment cmnt(masm_, "[ ForInStatement");
1072  SetStatementPosition(stmt);
1073 
1074  Label loop, exit;
1075  ForIn loop_statement(this, stmt);
1076  increment_loop_depth();
1077 
1078  // Get the object to enumerate over. Both SpiderMonkey and JSC
1079  // ignore null and undefined in contrast to the specification; see
1080  // ECMA-262 section 12.6.4.
1081  VisitForAccumulatorValue(stmt->enumerable());
1082  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1083  __ cmp(r0, ip);
1084  __ b(eq, &exit);
1085  Register null_value = r5;
1086  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1087  __ cmp(r0, null_value);
1088  __ b(eq, &exit);
1089 
1090  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1091 
1092  // Convert the object to a JS object.
1093  Label convert, done_convert;
1094  __ JumpIfSmi(r0, &convert);
1095  __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1096  __ b(ge, &done_convert);
1097  __ bind(&convert);
1098  __ push(r0);
1099  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1100  __ bind(&done_convert);
1101  __ push(r0);
1102 
1103  // Check for proxies.
1104  Label call_runtime;
1106  __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1107  __ b(le, &call_runtime);
1108 
1109  // Check cache validity in generated code. This is a fast case for
1110  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1111  // guarantee cache validity, call the runtime system to check cache
1112  // validity or get the property names in a fixed array.
1113  __ CheckEnumCache(null_value, &call_runtime);
1114 
1115  // The enum cache is valid. Load the map of the object being
1116  // iterated over and use the cache for the iteration.
1117  Label use_cache;
1119  __ b(&use_cache);
1120 
1121  // Get the set of properties to enumerate.
1122  __ bind(&call_runtime);
1123  __ push(r0); // Duplicate the enumerable object on the stack.
1124  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1125 
1126  // If we got a map from the runtime call, we can do a fast
1127  // modification check. Otherwise, we got a fixed array, and we have
1128  // to do a slow check.
1129  Label fixed_array;
1131  __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1132  __ cmp(r2, ip);
1133  __ b(ne, &fixed_array);
1134 
1135  // We got a map in register r0. Get the enumeration cache from it.
1136  Label no_descriptors;
1137  __ bind(&use_cache);
1138 
1139  __ EnumLength(r1, r0);
1140  __ cmp(r1, Operand(Smi::FromInt(0)));
1141  __ b(eq, &no_descriptors);
1142 
1143  __ LoadInstanceDescriptors(r0, r2);
1146 
1147  // Set up the four remaining stack slots.
1148  __ push(r0); // Map.
1149  __ mov(r0, Operand(Smi::FromInt(0)));
1150  // Push enumeration cache, enumeration cache length (as smi) and zero.
1151  __ Push(r2, r1, r0);
1152  __ jmp(&loop);
1153 
1154  __ bind(&no_descriptors);
1155  __ Drop(1);
1156  __ jmp(&exit);
1157 
1158  // We got a fixed array in register r0. Iterate through that.
1159  Label non_proxy;
1160  __ bind(&fixed_array);
1161 
1162  Handle<JSGlobalPropertyCell> cell =
1163  isolate()->factory()->NewJSGlobalPropertyCell(
1164  Handle<Object>(
1166  RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1167  __ LoadHeapObject(r1, cell);
1170 
1171  __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1172  __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1174  __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1175  __ b(gt, &non_proxy);
1176  __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1177  __ bind(&non_proxy);
1178  __ Push(r1, r0); // Smi and array
1180  __ mov(r0, Operand(Smi::FromInt(0)));
1181  __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1182 
1183  // Generate code for doing the condition check.
1184  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1185  __ bind(&loop);
1186  // Load the current count to r0, load the length to r1.
1187  __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1188  __ cmp(r0, r1); // Compare to the array length.
1189  __ b(hs, loop_statement.break_label());
1190 
1191  // Get the current entry of the array into register r3.
1192  __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1193  __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1195 
1196  // Get the expected map from the stack or a smi in the
1197  // permanent slow case into register r2.
1198  __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1199 
1200  // Check if the expected map still matches that of the enumerable.
1201  // If not, we may have to filter the key.
1202  Label update_each;
1203  __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1205  __ cmp(r4, Operand(r2));
1206  __ b(eq, &update_each);
1207 
1208  // For proxies, no filtering is done.
1209  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1210  __ cmp(r2, Operand(Smi::FromInt(0)));
1211  __ b(eq, &update_each);
1212 
1213  // Convert the entry to a string or (smi) 0 if it isn't a property
1214  // any more. If the property has been removed while iterating, we
1215  // just skip it.
1216  __ push(r1); // Enumerable.
1217  __ push(r3); // Current entry.
1218  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1219  __ mov(r3, Operand(r0), SetCC);
1220  __ b(eq, loop_statement.continue_label());
1221 
1222  // Update the 'each' property or variable from the possibly filtered
1223  // entry in register r3.
1224  __ bind(&update_each);
1225  __ mov(result_register(), r3);
1226  // Perform the assignment as if via '='.
1227  { EffectContext context(this);
1228  EmitAssignment(stmt->each());
1229  }
1230 
1231  // Generate code for the body of the loop.
1232  Visit(stmt->body());
1233 
1234  // Generate code for the going to the next element by incrementing
1235  // the index (smi) stored on top of the stack.
1236  __ bind(loop_statement.continue_label());
1237  __ pop(r0);
1238  __ add(r0, r0, Operand(Smi::FromInt(1)));
1239  __ push(r0);
1240 
1241  EmitStackCheck(stmt, &loop);
1242  __ b(&loop);
1243 
1244  // Remove the pointers stored on the stack.
1245  __ bind(loop_statement.break_label());
1246  __ Drop(5);
1247 
1248  // Exit and decrement the loop depth.
1249  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1250  __ bind(&exit);
1251  decrement_loop_depth();
1252 }
1253 
1254 
1255 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1256  bool pretenure) {
1257  // Use the fast case closure allocation code that allocates in new
1258  // space for nested functions that don't need literals cloning. If
1259  // we're running with the --always-opt or the --prepare-always-opt
1260  // flag, we need to use the runtime function so that the new function
1261  // we are creating here gets a chance to have its code optimized and
1262  // doesn't just get a copy of the existing unoptimized code.
1263  if (!FLAG_always_opt &&
1264  !FLAG_prepare_always_opt &&
1265  !pretenure &&
1266  scope()->is_function_scope() &&
1267  info->num_literals() == 0) {
1268  FastNewClosureStub stub(info->language_mode());
1269  __ mov(r0, Operand(info));
1270  __ push(r0);
1271  __ CallStub(&stub);
1272  } else {
1273  __ mov(r0, Operand(info));
1274  __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1275  : Heap::kFalseValueRootIndex);
1276  __ Push(cp, r0, r1);
1277  __ CallRuntime(Runtime::kNewClosure, 3);
1278  }
1279  context()->Plug(r0);
1280 }
1281 
1282 
1283 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1284  Comment cmnt(masm_, "[ VariableProxy");
1285  EmitVariableLoad(expr);
1286 }
1287 
1288 
1289 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1290  TypeofState typeof_state,
1291  Label* slow) {
1292  Register current = cp;
1293  Register next = r1;
1294  Register temp = r2;
1295 
1296  Scope* s = scope();
1297  while (s != NULL) {
1298  if (s->num_heap_slots() > 0) {
1299  if (s->calls_non_strict_eval()) {
1300  // Check that extension is NULL.
1301  __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1302  __ tst(temp, temp);
1303  __ b(ne, slow);
1304  }
1305  // Load next context in chain.
1306  __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1307  // Walk the rest of the chain without clobbering cp.
1308  current = next;
1309  }
1310  // If no outer scope calls eval, we do not need to check more
1311  // context extensions.
1312  if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1313  s = s->outer_scope();
1314  }
1315 
1316  if (s->is_eval_scope()) {
1317  Label loop, fast;
1318  if (!current.is(next)) {
1319  __ Move(next, current);
1320  }
1321  __ bind(&loop);
1322  // Terminate at native context.
1323  __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1324  __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1325  __ cmp(temp, ip);
1326  __ b(eq, &fast);
1327  // Check that extension is NULL.
1328  __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1329  __ tst(temp, temp);
1330  __ b(ne, slow);
1331  // Load next context in chain.
1332  __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1333  __ b(&loop);
1334  __ bind(&fast);
1335  }
1336 
1337  __ ldr(r0, GlobalObjectOperand());
1338  __ mov(r2, Operand(var->name()));
1339  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1340  ? RelocInfo::CODE_TARGET
1341  : RelocInfo::CODE_TARGET_CONTEXT;
1342  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1343  CallIC(ic, mode);
1344 }
1345 
1346 
1347 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1348  Label* slow) {
1349  ASSERT(var->IsContextSlot());
1350  Register context = cp;
1351  Register next = r3;
1352  Register temp = r4;
1353 
1354  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1355  if (s->num_heap_slots() > 0) {
1356  if (s->calls_non_strict_eval()) {
1357  // Check that extension is NULL.
1358  __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1359  __ tst(temp, temp);
1360  __ b(ne, slow);
1361  }
1362  __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1363  // Walk the rest of the chain without clobbering cp.
1364  context = next;
1365  }
1366  }
1367  // Check that last extension is NULL.
1368  __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1369  __ tst(temp, temp);
1370  __ b(ne, slow);
1371 
1372  // This function is used only for loads, not stores, so it's safe to
1373  // return an cp-based operand (the write barrier cannot be allowed to
1374  // destroy the cp register).
1375  return ContextOperand(context, var->index());
1376 }
1377 
1378 
1379 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1380  TypeofState typeof_state,
1381  Label* slow,
1382  Label* done) {
1383  // Generate fast-case code for variables that might be shadowed by
1384  // eval-introduced variables. Eval is used a lot without
1385  // introducing variables. In those cases, we do not want to
1386  // perform a runtime call for all variables in the scope
1387  // containing the eval.
1388  if (var->mode() == DYNAMIC_GLOBAL) {
1389  EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1390  __ jmp(done);
1391  } else if (var->mode() == DYNAMIC_LOCAL) {
1392  Variable* local = var->local_if_not_shadowed();
1393  __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1394  if (local->mode() == CONST ||
1395  local->mode() == CONST_HARMONY ||
1396  local->mode() == LET) {
1397  __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1398  if (local->mode() == CONST) {
1399  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1400  } else { // LET || CONST_HARMONY
1401  __ b(ne, done);
1402  __ mov(r0, Operand(var->name()));
1403  __ push(r0);
1404  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1405  }
1406  }
1407  __ jmp(done);
1408  }
1409 }
1410 
1411 
1412 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1413  // Record position before possible IC call.
1414  SetSourcePosition(proxy->position());
1415  Variable* var = proxy->var();
1416 
1417  // Three cases: global variables, lookup variables, and all other types of
1418  // variables.
1419  switch (var->location()) {
1420  case Variable::UNALLOCATED: {
1421  Comment cmnt(masm_, "Global variable");
1422  // Use inline caching. Variable name is passed in r2 and the global
1423  // object (receiver) in r0.
1424  __ ldr(r0, GlobalObjectOperand());
1425  __ mov(r2, Operand(var->name()));
1426  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1427  CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1428  context()->Plug(r0);
1429  break;
1430  }
1431 
1432  case Variable::PARAMETER:
1433  case Variable::LOCAL:
1434  case Variable::CONTEXT: {
1435  Comment cmnt(masm_, var->IsContextSlot()
1436  ? "Context variable"
1437  : "Stack variable");
1438  if (var->binding_needs_init()) {
1439  // var->scope() may be NULL when the proxy is located in eval code and
1440  // refers to a potential outside binding. Currently those bindings are
1441  // always looked up dynamically, i.e. in that case
1442  // var->location() == LOOKUP.
1443  // always holds.
1444  ASSERT(var->scope() != NULL);
1445 
1446  // Check if the binding really needs an initialization check. The check
1447  // can be skipped in the following situation: we have a LET or CONST
1448  // binding in harmony mode, both the Variable and the VariableProxy have
1449  // the same declaration scope (i.e. they are both in global code, in the
1450  // same function or in the same eval code) and the VariableProxy is in
1451  // the source physically located after the initializer of the variable.
1452  //
1453  // We cannot skip any initialization checks for CONST in non-harmony
1454  // mode because const variables may be declared but never initialized:
1455  // if (false) { const x; }; var y = x;
1456  //
1457  // The condition on the declaration scopes is a conservative check for
1458  // nested functions that access a binding and are called before the
1459  // binding is initialized:
1460  // function() { f(); let x = 1; function f() { x = 2; } }
1461  //
1462  bool skip_init_check;
1463  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1464  skip_init_check = false;
1465  } else {
1466  // Check that we always have valid source position.
1467  ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1468  ASSERT(proxy->position() != RelocInfo::kNoPosition);
1469  skip_init_check = var->mode() != CONST &&
1470  var->initializer_position() < proxy->position();
1471  }
1472 
1473  if (!skip_init_check) {
1474  // Let and const need a read barrier.
1475  GetVar(r0, var);
1476  __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1477  if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1478  // Throw a reference error when using an uninitialized let/const
1479  // binding in harmony mode.
1480  Label done;
1481  __ b(ne, &done);
1482  __ mov(r0, Operand(var->name()));
1483  __ push(r0);
1484  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1485  __ bind(&done);
1486  } else {
1487  // Uninitalized const bindings outside of harmony mode are unholed.
1488  ASSERT(var->mode() == CONST);
1489  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1490  }
1491  context()->Plug(r0);
1492  break;
1493  }
1494  }
1495  context()->Plug(var);
1496  break;
1497  }
1498 
1499  case Variable::LOOKUP: {
1500  Label done, slow;
1501  // Generate code for loading from variables potentially shadowed
1502  // by eval-introduced variables.
1503  EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1504  __ bind(&slow);
1505  Comment cmnt(masm_, "Lookup variable");
1506  __ mov(r1, Operand(var->name()));
1507  __ Push(cp, r1); // Context and name.
1508  __ CallRuntime(Runtime::kLoadContextSlot, 2);
1509  __ bind(&done);
1510  context()->Plug(r0);
1511  }
1512  }
1513 }
1514 
1515 
1516 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1517  Comment cmnt(masm_, "[ RegExpLiteral");
1518  Label materialized;
1519  // Registers will be used as follows:
1520  // r5 = materialized value (RegExp literal)
1521  // r4 = JS function, literals array
1522  // r3 = literal index
1523  // r2 = RegExp pattern
1524  // r1 = RegExp flags
1525  // r0 = RegExp literal clone
1528  int literal_offset =
1529  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1530  __ ldr(r5, FieldMemOperand(r4, literal_offset));
1531  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1532  __ cmp(r5, ip);
1533  __ b(ne, &materialized);
1534 
1535  // Create regexp literal using runtime function.
1536  // Result will be in r0.
1537  __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1538  __ mov(r2, Operand(expr->pattern()));
1539  __ mov(r1, Operand(expr->flags()));
1540  __ Push(r4, r3, r2, r1);
1541  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1542  __ mov(r5, r0);
1543 
1544  __ bind(&materialized);
1546  Label allocated, runtime_allocate;
1547  __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1548  __ jmp(&allocated);
1549 
1550  __ bind(&runtime_allocate);
1551  __ push(r5);
1552  __ mov(r0, Operand(Smi::FromInt(size)));
1553  __ push(r0);
1554  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1555  __ pop(r5);
1556 
1557  __ bind(&allocated);
1558  // After this, registers are used as follows:
1559  // r0: Newly allocated regexp.
1560  // r5: Materialized regexp.
1561  // r2: temp.
1562  __ CopyFields(r0, r5, r2.bit(), size / kPointerSize);
1563  context()->Plug(r0);
1564 }
1565 
1566 
1567 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1568  if (expression == NULL) {
1569  __ LoadRoot(r1, Heap::kNullValueRootIndex);
1570  __ push(r1);
1571  } else {
1572  VisitForStackValue(expression);
1573  }
1574 }
1575 
1576 
1577 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1578  Comment cmnt(masm_, "[ ObjectLiteral");
1579  Handle<FixedArray> constant_properties = expr->constant_properties();
1582  __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1583  __ mov(r1, Operand(constant_properties));
1584  int flags = expr->fast_elements()
1587  flags |= expr->has_function()
1590  __ mov(r0, Operand(Smi::FromInt(flags)));
1591  __ Push(r3, r2, r1, r0);
1592  int properties_count = constant_properties->length() / 2;
1593  if (expr->depth() > 1) {
1594  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1595  } else if (flags != ObjectLiteral::kFastElements ||
1597  __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1598  } else {
1599  FastCloneShallowObjectStub stub(properties_count);
1600  __ CallStub(&stub);
1601  }
1602 
1603  // If result_saved is true the result is on top of the stack. If
1604  // result_saved is false the result is in r0.
1605  bool result_saved = false;
1606 
1607  // Mark all computed expressions that are bound to a key that
1608  // is shadowed by a later occurrence of the same key. For the
1609  // marked expressions, no store code is emitted.
1610  expr->CalculateEmitStore(zone());
1611 
1612  AccessorTable accessor_table(zone());
1613  for (int i = 0; i < expr->properties()->length(); i++) {
1614  ObjectLiteral::Property* property = expr->properties()->at(i);
1615  if (property->IsCompileTimeValue()) continue;
1616 
1617  Literal* key = property->key();
1618  Expression* value = property->value();
1619  if (!result_saved) {
1620  __ push(r0); // Save result on stack
1621  result_saved = true;
1622  }
1623  switch (property->kind()) {
1625  UNREACHABLE();
1627  ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1628  // Fall through.
1630  if (key->handle()->IsSymbol()) {
1631  if (property->emit_store()) {
1632  VisitForAccumulatorValue(value);
1633  __ mov(r2, Operand(key->handle()));
1634  __ ldr(r1, MemOperand(sp));
1635  Handle<Code> ic = is_classic_mode()
1636  ? isolate()->builtins()->StoreIC_Initialize()
1637  : isolate()->builtins()->StoreIC_Initialize_Strict();
1638  CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
1639  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1640  } else {
1641  VisitForEffect(value);
1642  }
1643  break;
1644  }
1645  // Fall through.
1647  // Duplicate receiver on stack.
1648  __ ldr(r0, MemOperand(sp));
1649  __ push(r0);
1650  VisitForStackValue(key);
1651  VisitForStackValue(value);
1652  if (property->emit_store()) {
1653  __ mov(r0, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1654  __ push(r0);
1655  __ CallRuntime(Runtime::kSetProperty, 4);
1656  } else {
1657  __ Drop(3);
1658  }
1659  break;
1661  accessor_table.lookup(key)->second->getter = value;
1662  break;
1664  accessor_table.lookup(key)->second->setter = value;
1665  break;
1666  }
1667  }
1668 
1669  // Emit code to define accessors, using only a single call to the runtime for
1670  // each pair of corresponding getters and setters.
1671  for (AccessorTable::Iterator it = accessor_table.begin();
1672  it != accessor_table.end();
1673  ++it) {
1674  __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1675  __ push(r0);
1676  VisitForStackValue(it->first);
1677  EmitAccessor(it->second->getter);
1678  EmitAccessor(it->second->setter);
1679  __ mov(r0, Operand(Smi::FromInt(NONE)));
1680  __ push(r0);
1681  __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1682  }
1683 
1684  if (expr->has_function()) {
1685  ASSERT(result_saved);
1686  __ ldr(r0, MemOperand(sp));
1687  __ push(r0);
1688  __ CallRuntime(Runtime::kToFastProperties, 1);
1689  }
1690 
1691  if (result_saved) {
1692  context()->PlugTOS();
1693  } else {
1694  context()->Plug(r0);
1695  }
1696 }
1697 
1698 
1699 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1700  Comment cmnt(masm_, "[ ArrayLiteral");
1701 
1702  ZoneList<Expression*>* subexprs = expr->values();
1703  int length = subexprs->length();
1704  Handle<FixedArray> constant_elements = expr->constant_elements();
1705  ASSERT_EQ(2, constant_elements->length());
1706  ElementsKind constant_elements_kind =
1707  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1708  bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1709  Handle<FixedArrayBase> constant_elements_values(
1710  FixedArrayBase::cast(constant_elements->get(1)));
1711 
1714  __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1715  __ mov(r1, Operand(constant_elements));
1716  __ Push(r3, r2, r1);
1717  if (has_fast_elements && constant_elements_values->map() ==
1718  isolate()->heap()->fixed_cow_array_map()) {
1719  FastCloneShallowArrayStub stub(
1721  __ CallStub(&stub);
1722  __ IncrementCounter(
1723  isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
1724  } else if (expr->depth() > 1) {
1725  __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1727  __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1728  } else {
1729  ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1730  FLAG_smi_only_arrays);
1731  FastCloneShallowArrayStub::Mode mode = has_fast_elements
1734  FastCloneShallowArrayStub stub(mode, length);
1735  __ CallStub(&stub);
1736  }
1737 
1738  bool result_saved = false; // Is the result saved to the stack?
1739 
1740  // Emit code to evaluate all the non-constant subexpressions and to store
1741  // them into the newly cloned array.
1742  for (int i = 0; i < length; i++) {
1743  Expression* subexpr = subexprs->at(i);
1744  // If the subexpression is a literal or a simple materialized literal it
1745  // is already set in the cloned array.
1746  if (subexpr->AsLiteral() != NULL ||
1748  continue;
1749  }
1750 
1751  if (!result_saved) {
1752  __ push(r0);
1753  result_saved = true;
1754  }
1755  VisitForAccumulatorValue(subexpr);
1756 
1757  if (IsFastObjectElementsKind(constant_elements_kind)) {
1758  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1759  __ ldr(r6, MemOperand(sp)); // Copy of array literal.
1761  __ str(result_register(), FieldMemOperand(r1, offset));
1762  // Update the write barrier for the array store.
1763  __ RecordWriteField(r1, offset, result_register(), r2,
1766  } else {
1767  __ ldr(r1, MemOperand(sp)); // Copy of array literal.
1769  __ mov(r3, Operand(Smi::FromInt(i)));
1770  __ mov(r4, Operand(Smi::FromInt(expr->literal_index())));
1771  StoreArrayLiteralElementStub stub;
1772  __ CallStub(&stub);
1773  }
1774 
1775  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1776  }
1777 
1778  if (result_saved) {
1779  context()->PlugTOS();
1780  } else {
1781  context()->Plug(r0);
1782  }
1783 }
1784 
1785 
1786 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1787  Comment cmnt(masm_, "[ Assignment");
1788  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1789  // on the left-hand side.
1790  if (!expr->target()->IsValidLeftHandSide()) {
1791  VisitForEffect(expr->target());
1792  return;
1793  }
1794 
1795  // Left-hand side can only be a property, a global or a (parameter or local)
1796  // slot.
1797  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1798  LhsKind assign_type = VARIABLE;
1799  Property* property = expr->target()->AsProperty();
1800  if (property != NULL) {
1801  assign_type = (property->key()->IsPropertyName())
1802  ? NAMED_PROPERTY
1803  : KEYED_PROPERTY;
1804  }
1805 
1806  // Evaluate LHS expression.
1807  switch (assign_type) {
1808  case VARIABLE:
1809  // Nothing to do here.
1810  break;
1811  case NAMED_PROPERTY:
1812  if (expr->is_compound()) {
1813  // We need the receiver both on the stack and in the accumulator.
1814  VisitForAccumulatorValue(property->obj());
1815  __ push(result_register());
1816  } else {
1817  VisitForStackValue(property->obj());
1818  }
1819  break;
1820  case KEYED_PROPERTY:
1821  if (expr->is_compound()) {
1822  VisitForStackValue(property->obj());
1823  VisitForAccumulatorValue(property->key());
1824  __ ldr(r1, MemOperand(sp, 0));
1825  __ push(r0);
1826  } else {
1827  VisitForStackValue(property->obj());
1828  VisitForStackValue(property->key());
1829  }
1830  break;
1831  }
1832 
1833  // For compound assignments we need another deoptimization point after the
1834  // variable/property load.
1835  if (expr->is_compound()) {
1836  { AccumulatorValueContext context(this);
1837  switch (assign_type) {
1838  case VARIABLE:
1839  EmitVariableLoad(expr->target()->AsVariableProxy());
1840  PrepareForBailout(expr->target(), TOS_REG);
1841  break;
1842  case NAMED_PROPERTY:
1843  EmitNamedPropertyLoad(property);
1844  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1845  break;
1846  case KEYED_PROPERTY:
1847  EmitKeyedPropertyLoad(property);
1848  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1849  break;
1850  }
1851  }
1852 
1853  Token::Value op = expr->binary_op();
1854  __ push(r0); // Left operand goes on the stack.
1855  VisitForAccumulatorValue(expr->value());
1856 
1857  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1858  ? OVERWRITE_RIGHT
1859  : NO_OVERWRITE;
1860  SetSourcePosition(expr->position() + 1);
1861  AccumulatorValueContext context(this);
1862  if (ShouldInlineSmiCase(op)) {
1863  EmitInlineSmiBinaryOp(expr->binary_operation(),
1864  op,
1865  mode,
1866  expr->target(),
1867  expr->value());
1868  } else {
1869  EmitBinaryOp(expr->binary_operation(), op, mode);
1870  }
1871 
1872  // Deoptimization point in case the binary operation may have side effects.
1873  PrepareForBailout(expr->binary_operation(), TOS_REG);
1874  } else {
1875  VisitForAccumulatorValue(expr->value());
1876  }
1877 
1878  // Record source position before possible IC call.
1879  SetSourcePosition(expr->position());
1880 
1881  // Store the value.
1882  switch (assign_type) {
1883  case VARIABLE:
1884  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1885  expr->op());
1886  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1887  context()->Plug(r0);
1888  break;
1889  case NAMED_PROPERTY:
1890  EmitNamedPropertyAssignment(expr);
1891  break;
1892  case KEYED_PROPERTY:
1893  EmitKeyedPropertyAssignment(expr);
1894  break;
1895  }
1896 }
1897 
1898 
1899 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1900  SetSourcePosition(prop->position());
1901  Literal* key = prop->key()->AsLiteral();
1902  __ mov(r2, Operand(key->handle()));
1903  // Call load IC. It has arguments receiver and property name r0 and r2.
1904  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1905  CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1906 }
1907 
1908 
1909 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1910  SetSourcePosition(prop->position());
1911  // Call keyed load IC. It has arguments key and receiver in r0 and r1.
1912  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1913  CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1914 }
1915 
1916 
1917 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1918  Token::Value op,
1919  OverwriteMode mode,
1920  Expression* left_expr,
1921  Expression* right_expr) {
1922  Label done, smi_case, stub_call;
1923 
1924  Register scratch1 = r2;
1925  Register scratch2 = r3;
1926 
1927  // Get the arguments.
1928  Register left = r1;
1929  Register right = r0;
1930  __ pop(left);
1931 
1932  // Perform combined smi check on both operands.
1933  __ orr(scratch1, left, Operand(right));
1934  STATIC_ASSERT(kSmiTag == 0);
1935  JumpPatchSite patch_site(masm_);
1936  patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1937 
1938  __ bind(&stub_call);
1939  BinaryOpStub stub(op, mode);
1940  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
1941  expr->BinaryOperationFeedbackId());
1942  patch_site.EmitPatchInfo();
1943  __ jmp(&done);
1944 
1945  __ bind(&smi_case);
1946  // Smi case. This code works the same way as the smi-smi case in the type
1947  // recording binary operation stub, see
1948  // BinaryOpStub::GenerateSmiSmiOperation for comments.
1949  switch (op) {
1950  case Token::SAR:
1951  __ b(&stub_call);
1952  __ GetLeastBitsFromSmi(scratch1, right, 5);
1953  __ mov(right, Operand(left, ASR, scratch1));
1954  __ bic(right, right, Operand(kSmiTagMask));
1955  break;
1956  case Token::SHL: {
1957  __ b(&stub_call);
1958  __ SmiUntag(scratch1, left);
1959  __ GetLeastBitsFromSmi(scratch2, right, 5);
1960  __ mov(scratch1, Operand(scratch1, LSL, scratch2));
1961  __ add(scratch2, scratch1, Operand(0x40000000), SetCC);
1962  __ b(mi, &stub_call);
1963  __ SmiTag(right, scratch1);
1964  break;
1965  }
1966  case Token::SHR: {
1967  __ b(&stub_call);
1968  __ SmiUntag(scratch1, left);
1969  __ GetLeastBitsFromSmi(scratch2, right, 5);
1970  __ mov(scratch1, Operand(scratch1, LSR, scratch2));
1971  __ tst(scratch1, Operand(0xc0000000));
1972  __ b(ne, &stub_call);
1973  __ SmiTag(right, scratch1);
1974  break;
1975  }
1976  case Token::ADD:
1977  __ add(scratch1, left, Operand(right), SetCC);
1978  __ b(vs, &stub_call);
1979  __ mov(right, scratch1);
1980  break;
1981  case Token::SUB:
1982  __ sub(scratch1, left, Operand(right), SetCC);
1983  __ b(vs, &stub_call);
1984  __ mov(right, scratch1);
1985  break;
1986  case Token::MUL: {
1987  __ SmiUntag(ip, right);
1988  __ smull(scratch1, scratch2, left, ip);
1989  __ mov(ip, Operand(scratch1, ASR, 31));
1990  __ cmp(ip, Operand(scratch2));
1991  __ b(ne, &stub_call);
1992  __ cmp(scratch1, Operand(0));
1993  __ mov(right, Operand(scratch1), LeaveCC, ne);
1994  __ b(ne, &done);
1995  __ add(scratch2, right, Operand(left), SetCC);
1996  __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
1997  __ b(mi, &stub_call);
1998  break;
1999  }
2000  case Token::BIT_OR:
2001  __ orr(right, left, Operand(right));
2002  break;
2003  case Token::BIT_AND:
2004  __ and_(right, left, Operand(right));
2005  break;
2006  case Token::BIT_XOR:
2007  __ eor(right, left, Operand(right));
2008  break;
2009  default:
2010  UNREACHABLE();
2011  }
2012 
2013  __ bind(&done);
2014  context()->Plug(r0);
2015 }
2016 
2017 
2018 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2019  Token::Value op,
2020  OverwriteMode mode) {
2021  __ pop(r1);
2022  BinaryOpStub stub(op, mode);
2023  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2024  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
2025  expr->BinaryOperationFeedbackId());
2026  patch_site.EmitPatchInfo();
2027  context()->Plug(r0);
2028 }
2029 
2030 
2031 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2032  // Invalid left-hand sides are rewritten to have a 'throw
2033  // ReferenceError' on the left-hand side.
2034  if (!expr->IsValidLeftHandSide()) {
2035  VisitForEffect(expr);
2036  return;
2037  }
2038 
2039  // Left-hand side can only be a property, a global or a (parameter or local)
2040  // slot.
2041  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2042  LhsKind assign_type = VARIABLE;
2043  Property* prop = expr->AsProperty();
2044  if (prop != NULL) {
2045  assign_type = (prop->key()->IsPropertyName())
2046  ? NAMED_PROPERTY
2047  : KEYED_PROPERTY;
2048  }
2049 
2050  switch (assign_type) {
2051  case VARIABLE: {
2052  Variable* var = expr->AsVariableProxy()->var();
2053  EffectContext context(this);
2054  EmitVariableAssignment(var, Token::ASSIGN);
2055  break;
2056  }
2057  case NAMED_PROPERTY: {
2058  __ push(r0); // Preserve value.
2059  VisitForAccumulatorValue(prop->obj());
2060  __ mov(r1, r0);
2061  __ pop(r0); // Restore value.
2062  __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
2063  Handle<Code> ic = is_classic_mode()
2064  ? isolate()->builtins()->StoreIC_Initialize()
2065  : isolate()->builtins()->StoreIC_Initialize_Strict();
2066  CallIC(ic);
2067  break;
2068  }
2069  case KEYED_PROPERTY: {
2070  __ push(r0); // Preserve value.
2071  VisitForStackValue(prop->obj());
2072  VisitForAccumulatorValue(prop->key());
2073  __ mov(r1, r0);
2074  __ pop(r2);
2075  __ pop(r0); // Restore value.
2076  Handle<Code> ic = is_classic_mode()
2077  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2078  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2079  CallIC(ic);
2080  break;
2081  }
2082  }
2083  context()->Plug(r0);
2084 }
2085 
2086 
2087 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2088  Token::Value op) {
2089  if (var->IsUnallocated()) {
2090  // Global var, const, or let.
2091  __ mov(r2, Operand(var->name()));
2092  __ ldr(r1, GlobalObjectOperand());
2093  Handle<Code> ic = is_classic_mode()
2094  ? isolate()->builtins()->StoreIC_Initialize()
2095  : isolate()->builtins()->StoreIC_Initialize_Strict();
2096  CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2097 
2098  } else if (op == Token::INIT_CONST) {
2099  // Const initializers need a write barrier.
2100  ASSERT(!var->IsParameter()); // No const parameters.
2101  if (var->IsStackLocal()) {
2102  Label skip;
2103  __ ldr(r1, StackOperand(var));
2104  __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
2105  __ b(ne, &skip);
2106  __ str(result_register(), StackOperand(var));
2107  __ bind(&skip);
2108  } else {
2109  ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2110  // Like var declarations, const declarations are hoisted to function
2111  // scope. However, unlike var initializers, const initializers are
2112  // able to drill a hole to that function context, even from inside a
2113  // 'with' context. We thus bypass the normal static scope lookup for
2114  // var->IsContextSlot().
2115  __ push(r0);
2116  __ mov(r0, Operand(var->name()));
2117  __ Push(cp, r0); // Context and name.
2118  __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2119  }
2120 
2121  } else if (var->mode() == LET && op != Token::INIT_LET) {
2122  // Non-initializing assignment to let variable needs a write barrier.
2123  if (var->IsLookupSlot()) {
2124  __ push(r0); // Value.
2125  __ mov(r1, Operand(var->name()));
2126  __ mov(r0, Operand(Smi::FromInt(language_mode())));
2127  __ Push(cp, r1, r0); // Context, name, strict mode.
2128  __ CallRuntime(Runtime::kStoreContextSlot, 4);
2129  } else {
2130  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2131  Label assign;
2132  MemOperand location = VarOperand(var, r1);
2133  __ ldr(r3, location);
2134  __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2135  __ b(ne, &assign);
2136  __ mov(r3, Operand(var->name()));
2137  __ push(r3);
2138  __ CallRuntime(Runtime::kThrowReferenceError, 1);
2139  // Perform the assignment.
2140  __ bind(&assign);
2141  __ str(result_register(), location);
2142  if (var->IsContextSlot()) {
2143  // RecordWrite may destroy all its register arguments.
2144  __ mov(r3, result_register());
2145  int offset = Context::SlotOffset(var->index());
2146  __ RecordWriteContextSlot(
2147  r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2148  }
2149  }
2150 
2151  } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2152  // Assignment to var or initializing assignment to let/const
2153  // in harmony mode.
2154  if (var->IsStackAllocated() || var->IsContextSlot()) {
2155  MemOperand location = VarOperand(var, r1);
2156  if (generate_debug_code_ && op == Token::INIT_LET) {
2157  // Check for an uninitialized let binding.
2158  __ ldr(r2, location);
2159  __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2160  __ Check(eq, "Let binding re-initialization.");
2161  }
2162  // Perform the assignment.
2163  __ str(r0, location);
2164  if (var->IsContextSlot()) {
2165  __ mov(r3, r0);
2166  int offset = Context::SlotOffset(var->index());
2167  __ RecordWriteContextSlot(
2168  r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2169  }
2170  } else {
2171  ASSERT(var->IsLookupSlot());
2172  __ push(r0); // Value.
2173  __ mov(r1, Operand(var->name()));
2174  __ mov(r0, Operand(Smi::FromInt(language_mode())));
2175  __ Push(cp, r1, r0); // Context, name, strict mode.
2176  __ CallRuntime(Runtime::kStoreContextSlot, 4);
2177  }
2178  }
2179  // Non-initializing assignments to consts are ignored.
2180 }
2181 
2182 
2183 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2184  // Assignment to a property, using a named store IC.
2185  Property* prop = expr->target()->AsProperty();
2186  ASSERT(prop != NULL);
2187  ASSERT(prop->key()->AsLiteral() != NULL);
2188 
2189  // Record source code position before IC call.
2190  SetSourcePosition(expr->position());
2191  __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
2192  __ pop(r1);
2193 
2194  Handle<Code> ic = is_classic_mode()
2195  ? isolate()->builtins()->StoreIC_Initialize()
2196  : isolate()->builtins()->StoreIC_Initialize_Strict();
2197  CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2198 
2199  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2200  context()->Plug(r0);
2201 }
2202 
2203 
2204 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2205  // Assignment to a property, using a keyed store IC.
2206 
2207  // Record source code position before IC call.
2208  SetSourcePosition(expr->position());
2209  __ pop(r1); // Key.
2210  __ pop(r2);
2211 
2212  Handle<Code> ic = is_classic_mode()
2213  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2214  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2215  CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2216 
2217  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2218  context()->Plug(r0);
2219 }
2220 
2221 
2222 void FullCodeGenerator::VisitProperty(Property* expr) {
2223  Comment cmnt(masm_, "[ Property");
2224  Expression* key = expr->key();
2225 
2226  if (key->IsPropertyName()) {
2227  VisitForAccumulatorValue(expr->obj());
2228  EmitNamedPropertyLoad(expr);
2229  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2230  context()->Plug(r0);
2231  } else {
2232  VisitForStackValue(expr->obj());
2233  VisitForAccumulatorValue(expr->key());
2234  __ pop(r1);
2235  EmitKeyedPropertyLoad(expr);
2236  context()->Plug(r0);
2237  }
2238 }
2239 
2240 
2241 void FullCodeGenerator::CallIC(Handle<Code> code,
2242  RelocInfo::Mode rmode,
2243  TypeFeedbackId ast_id) {
2244  ic_total_count_++;
2245  // All calls must have a predictable size in full-codegen code to ensure that
2246  // the debugger can patch them correctly.
2247  __ Call(code, rmode, ast_id, al, NEVER_INLINE_TARGET_ADDRESS);
2248 }
2249 
2250 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2251  Handle<Object> name,
2252  RelocInfo::Mode mode) {
2253  // Code common for calls using the IC.
2254  ZoneList<Expression*>* args = expr->arguments();
2255  int arg_count = args->length();
2256  { PreservePositionScope scope(masm()->positions_recorder());
2257  for (int i = 0; i < arg_count; i++) {
2258  VisitForStackValue(args->at(i));
2259  }
2260  __ mov(r2, Operand(name));
2261  }
2262  // Record source position for debugger.
2263  SetSourcePosition(expr->position());
2264  // Call the IC initialization code.
2265  Handle<Code> ic =
2266  isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2267  CallIC(ic, mode, expr->CallFeedbackId());
2268  RecordJSReturnSite(expr);
2269  // Restore context register.
2271  context()->Plug(r0);
2272 }
2273 
2274 
2275 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2276  Expression* key) {
2277  // Load the key.
2278  VisitForAccumulatorValue(key);
2279 
2280  // Swap the name of the function and the receiver on the stack to follow
2281  // the calling convention for call ICs.
2282  __ pop(r1);
2283  __ push(r0);
2284  __ push(r1);
2285 
2286  // Code common for calls using the IC.
2287  ZoneList<Expression*>* args = expr->arguments();
2288  int arg_count = args->length();
2289  { PreservePositionScope scope(masm()->positions_recorder());
2290  for (int i = 0; i < arg_count; i++) {
2291  VisitForStackValue(args->at(i));
2292  }
2293  }
2294  // Record source position for debugger.
2295  SetSourcePosition(expr->position());
2296  // Call the IC initialization code.
2297  Handle<Code> ic =
2298  isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2299  __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
2300  CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
2301  RecordJSReturnSite(expr);
2302  // Restore context register.
2304  context()->DropAndPlug(1, r0); // Drop the key still on the stack.
2305 }
2306 
2307 
2308 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2309  // Code common for calls using the call stub.
2310  ZoneList<Expression*>* args = expr->arguments();
2311  int arg_count = args->length();
2312  { PreservePositionScope scope(masm()->positions_recorder());
2313  for (int i = 0; i < arg_count; i++) {
2314  VisitForStackValue(args->at(i));
2315  }
2316  }
2317  // Record source position for debugger.
2318  SetSourcePosition(expr->position());
2319 
2320  // Record call targets in unoptimized code.
2321  flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2322  Handle<Object> uninitialized =
2324  Handle<JSGlobalPropertyCell> cell =
2325  isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2326  RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2327  __ mov(r2, Operand(cell));
2328 
2329  CallFunctionStub stub(arg_count, flags);
2330  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2331  __ CallStub(&stub);
2332  RecordJSReturnSite(expr);
2333  // Restore context register.
2335  context()->DropAndPlug(1, r0);
2336 }
2337 
2338 
2339 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2340  // Push copy of the first argument or undefined if it doesn't exist.
2341  if (arg_count > 0) {
2342  __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2343  } else {
2344  __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2345  }
2346  __ push(r1);
2347 
2348  // Push the receiver of the enclosing function.
2349  int receiver_offset = 2 + info_->scope()->num_parameters();
2350  __ ldr(r1, MemOperand(fp, receiver_offset * kPointerSize));
2351  __ push(r1);
2352  // Push the language mode.
2353  __ mov(r1, Operand(Smi::FromInt(language_mode())));
2354  __ push(r1);
2355 
2356  // Push the start position of the scope the calls resides in.
2357  __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2358  __ push(r1);
2359 
2360  // Do the runtime call.
2361  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2362 }
2363 
2364 
2365 void FullCodeGenerator::VisitCall(Call* expr) {
2366 #ifdef DEBUG
2367  // We want to verify that RecordJSReturnSite gets called on all paths
2368  // through this function. Avoid early returns.
2369  expr->return_is_recorded_ = false;
2370 #endif
2371 
2372  Comment cmnt(masm_, "[ Call");
2373  Expression* callee = expr->expression();
2374  VariableProxy* proxy = callee->AsVariableProxy();
2375  Property* property = callee->AsProperty();
2376 
2377  if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2378  // In a call to eval, we first call %ResolvePossiblyDirectEval to
2379  // resolve the function we need to call and the receiver of the
2380  // call. Then we call the resolved function using the given
2381  // arguments.
2382  ZoneList<Expression*>* args = expr->arguments();
2383  int arg_count = args->length();
2384 
2385  { PreservePositionScope pos_scope(masm()->positions_recorder());
2386  VisitForStackValue(callee);
2387  __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2388  __ push(r2); // Reserved receiver slot.
2389 
2390  // Push the arguments.
2391  for (int i = 0; i < arg_count; i++) {
2392  VisitForStackValue(args->at(i));
2393  }
2394 
2395  // Push a copy of the function (found below the arguments) and
2396  // resolve eval.
2397  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2398  __ push(r1);
2399  EmitResolvePossiblyDirectEval(arg_count);
2400 
2401  // The runtime call returns a pair of values in r0 (function) and
2402  // r1 (receiver). Touch up the stack with the right values.
2403  __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2404  __ str(r1, MemOperand(sp, arg_count * kPointerSize));
2405  }
2406 
2407  // Record source position for debugger.
2408  SetSourcePosition(expr->position());
2409  CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2410  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2411  __ CallStub(&stub);
2412  RecordJSReturnSite(expr);
2413  // Restore context register.
2415  context()->DropAndPlug(1, r0);
2416  } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2417  // Push global object as receiver for the call IC.
2418  __ ldr(r0, GlobalObjectOperand());
2419  __ push(r0);
2420  EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2421  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2422  // Call to a lookup slot (dynamically introduced variable).
2423  Label slow, done;
2424 
2425  { PreservePositionScope scope(masm()->positions_recorder());
2426  // Generate code for loading from variables potentially shadowed
2427  // by eval-introduced variables.
2428  EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2429  }
2430 
2431  __ bind(&slow);
2432  // Call the runtime to find the function to call (returned in r0)
2433  // and the object holding it (returned in edx).
2434  __ push(context_register());
2435  __ mov(r2, Operand(proxy->name()));
2436  __ push(r2);
2437  __ CallRuntime(Runtime::kLoadContextSlot, 2);
2438  __ Push(r0, r1); // Function, receiver.
2439 
2440  // If fast case code has been generated, emit code to push the
2441  // function and receiver and have the slow path jump around this
2442  // code.
2443  if (done.is_linked()) {
2444  Label call;
2445  __ b(&call);
2446  __ bind(&done);
2447  // Push function.
2448  __ push(r0);
2449  // The receiver is implicitly the global receiver. Indicate this
2450  // by passing the hole to the call function stub.
2451  __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
2452  __ push(r1);
2453  __ bind(&call);
2454  }
2455 
2456  // The receiver is either the global receiver or an object found
2457  // by LoadContextSlot. That object could be the hole if the
2458  // receiver is implicitly the global object.
2459  EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2460  } else if (property != NULL) {
2461  { PreservePositionScope scope(masm()->positions_recorder());
2462  VisitForStackValue(property->obj());
2463  }
2464  if (property->key()->IsPropertyName()) {
2465  EmitCallWithIC(expr,
2466  property->key()->AsLiteral()->handle(),
2467  RelocInfo::CODE_TARGET);
2468  } else {
2469  EmitKeyedCallWithIC(expr, property->key());
2470  }
2471  } else {
2472  // Call to an arbitrary expression not handled specially above.
2473  { PreservePositionScope scope(masm()->positions_recorder());
2474  VisitForStackValue(callee);
2475  }
2476  // Load global receiver object.
2477  __ ldr(r1, GlobalObjectOperand());
2479  __ push(r1);
2480  // Emit function call.
2481  EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2482  }
2483 
2484 #ifdef DEBUG
2485  // RecordJSReturnSite should have been called.
2486  ASSERT(expr->return_is_recorded_);
2487 #endif
2488 }
2489 
2490 
2491 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2492  Comment cmnt(masm_, "[ CallNew");
2493  // According to ECMA-262, section 11.2.2, page 44, the function
2494  // expression in new calls must be evaluated before the
2495  // arguments.
2496 
2497  // Push constructor on the stack. If it's not a function it's used as
2498  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2499  // ignored.
2500  VisitForStackValue(expr->expression());
2501 
2502  // Push the arguments ("left-to-right") on the stack.
2503  ZoneList<Expression*>* args = expr->arguments();
2504  int arg_count = args->length();
2505  for (int i = 0; i < arg_count; i++) {
2506  VisitForStackValue(args->at(i));
2507  }
2508 
2509  // Call the construct call builtin that handles allocation and
2510  // constructor invocation.
2511  SetSourcePosition(expr->position());
2512 
2513  // Load function and argument count into r1 and r0.
2514  __ mov(r0, Operand(arg_count));
2515  __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2516 
2517  // Record call targets in unoptimized code.
2518  Handle<Object> uninitialized =
2520  Handle<JSGlobalPropertyCell> cell =
2521  isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2522  RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2523  __ mov(r2, Operand(cell));
2524 
2525  CallConstructStub stub(RECORD_CALL_TARGET);
2526  __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2527  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2528  context()->Plug(r0);
2529 }
2530 
2531 
2532 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2533  ZoneList<Expression*>* args = expr->arguments();
2534  ASSERT(args->length() == 1);
2535 
2536  VisitForAccumulatorValue(args->at(0));
2537 
2538  Label materialize_true, materialize_false;
2539  Label* if_true = NULL;
2540  Label* if_false = NULL;
2541  Label* fall_through = NULL;
2542  context()->PrepareTest(&materialize_true, &materialize_false,
2543  &if_true, &if_false, &fall_through);
2544 
2545  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2546  __ tst(r0, Operand(kSmiTagMask));
2547  Split(eq, if_true, if_false, fall_through);
2548 
2549  context()->Plug(if_true, if_false);
2550 }
2551 
2552 
2553 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2554  ZoneList<Expression*>* args = expr->arguments();
2555  ASSERT(args->length() == 1);
2556 
2557  VisitForAccumulatorValue(args->at(0));
2558 
2559  Label materialize_true, materialize_false;
2560  Label* if_true = NULL;
2561  Label* if_false = NULL;
2562  Label* fall_through = NULL;
2563  context()->PrepareTest(&materialize_true, &materialize_false,
2564  &if_true, &if_false, &fall_through);
2565 
2566  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2567  __ tst(r0, Operand(kSmiTagMask | 0x80000000));
2568  Split(eq, if_true, if_false, fall_through);
2569 
2570  context()->Plug(if_true, if_false);
2571 }
2572 
2573 
2574 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2575  ZoneList<Expression*>* args = expr->arguments();
2576  ASSERT(args->length() == 1);
2577 
2578  VisitForAccumulatorValue(args->at(0));
2579 
2580  Label materialize_true, materialize_false;
2581  Label* if_true = NULL;
2582  Label* if_false = NULL;
2583  Label* fall_through = NULL;
2584  context()->PrepareTest(&materialize_true, &materialize_false,
2585  &if_true, &if_false, &fall_through);
2586 
2587  __ JumpIfSmi(r0, if_false);
2588  __ LoadRoot(ip, Heap::kNullValueRootIndex);
2589  __ cmp(r0, ip);
2590  __ b(eq, if_true);
2592  // Undetectable objects behave like undefined when tested with typeof.
2594  __ tst(r1, Operand(1 << Map::kIsUndetectable));
2595  __ b(ne, if_false);
2597  __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2598  __ b(lt, if_false);
2599  __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2600  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2601  Split(le, if_true, if_false, fall_through);
2602 
2603  context()->Plug(if_true, if_false);
2604 }
2605 
2606 
2607 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2608  ZoneList<Expression*>* args = expr->arguments();
2609  ASSERT(args->length() == 1);
2610 
2611  VisitForAccumulatorValue(args->at(0));
2612 
2613  Label materialize_true, materialize_false;
2614  Label* if_true = NULL;
2615  Label* if_false = NULL;
2616  Label* fall_through = NULL;
2617  context()->PrepareTest(&materialize_true, &materialize_false,
2618  &if_true, &if_false, &fall_through);
2619 
2620  __ JumpIfSmi(r0, if_false);
2621  __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
2622  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2623  Split(ge, if_true, if_false, fall_through);
2624 
2625  context()->Plug(if_true, if_false);
2626 }
2627 
2628 
2629 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2630  ZoneList<Expression*>* args = expr->arguments();
2631  ASSERT(args->length() == 1);
2632 
2633  VisitForAccumulatorValue(args->at(0));
2634 
2635  Label materialize_true, materialize_false;
2636  Label* if_true = NULL;
2637  Label* if_false = NULL;
2638  Label* fall_through = NULL;
2639  context()->PrepareTest(&materialize_true, &materialize_false,
2640  &if_true, &if_false, &fall_through);
2641 
2642  __ JumpIfSmi(r0, if_false);
2645  __ tst(r1, Operand(1 << Map::kIsUndetectable));
2646  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2647  Split(ne, if_true, if_false, fall_through);
2648 
2649  context()->Plug(if_true, if_false);
2650 }
2651 
2652 
2653 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2654  CallRuntime* expr) {
2655  ZoneList<Expression*>* args = expr->arguments();
2656  ASSERT(args->length() == 1);
2657 
2658  VisitForAccumulatorValue(args->at(0));
2659 
2660  Label materialize_true, materialize_false;
2661  Label* if_true = NULL;
2662  Label* if_false = NULL;
2663  Label* fall_through = NULL;
2664  context()->PrepareTest(&materialize_true, &materialize_false,
2665  &if_true, &if_false, &fall_through);
2666 
2667  __ AssertNotSmi(r0);
2668 
2671  __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2672  __ b(ne, if_true);
2673 
2674  // Check for fast case object. Generate false result for slow case object.
2677  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
2678  __ cmp(r2, ip);
2679  __ b(eq, if_false);
2680 
2681  // Look for valueOf symbol in the descriptor array, and indicate false if
2682  // found. Since we omit an enumeration index check, if it is added via a
2683  // transition that shares its descriptor array, this is a false positive.
2684  Label entry, loop, done;
2685 
2686  // Skip loop if no descriptors are valid.
2687  __ NumberOfOwnDescriptors(r3, r1);
2688  __ cmp(r3, Operand(0));
2689  __ b(eq, &done);
2690 
2691  __ LoadInstanceDescriptors(r1, r4);
2692  // r4: descriptor array.
2693  // r3: valid entries in the descriptor array.
2694  STATIC_ASSERT(kSmiTag == 0);
2695  STATIC_ASSERT(kSmiTagSize == 1);
2696  STATIC_ASSERT(kPointerSize == 4);
2697  __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
2698  __ mul(r3, r3, ip);
2699  // Calculate location of the first key name.
2701  // Calculate the end of the descriptor array.
2702  __ mov(r2, r4);
2703  __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
2704 
2705  // Loop through all the keys in the descriptor array. If one of these is the
2706  // symbol valueOf the result is false.
2707  // The use of ip to store the valueOf symbol asumes that it is not otherwise
2708  // used in the loop below.
2709  __ mov(ip, Operand(FACTORY->value_of_symbol()));
2710  __ jmp(&entry);
2711  __ bind(&loop);
2712  __ ldr(r3, MemOperand(r4, 0));
2713  __ cmp(r3, ip);
2714  __ b(eq, if_false);
2715  __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
2716  __ bind(&entry);
2717  __ cmp(r4, Operand(r2));
2718  __ b(ne, &loop);
2719 
2720  __ bind(&done);
2721  // If a valueOf property is not found on the object check that its
2722  // prototype is the un-modified String prototype. If not result is false.
2724  __ JumpIfSmi(r2, if_false);
2729  __ cmp(r2, r3);
2730  __ b(ne, if_false);
2731 
2732  // Set the bit in the map to indicate that it has been checked safe for
2733  // default valueOf and set true result.
2735  __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2737  __ jmp(if_true);
2738 
2739  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2740  context()->Plug(if_true, if_false);
2741 }
2742 
2743 
2744 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2745  ZoneList<Expression*>* args = expr->arguments();
2746  ASSERT(args->length() == 1);
2747 
2748  VisitForAccumulatorValue(args->at(0));
2749 
2750  Label materialize_true, materialize_false;
2751  Label* if_true = NULL;
2752  Label* if_false = NULL;
2753  Label* fall_through = NULL;
2754  context()->PrepareTest(&materialize_true, &materialize_false,
2755  &if_true, &if_false, &fall_through);
2756 
2757  __ JumpIfSmi(r0, if_false);
2758  __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
2759  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2760  Split(eq, if_true, if_false, fall_through);
2761 
2762  context()->Plug(if_true, if_false);
2763 }
2764 
2765 
2766 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2767  ZoneList<Expression*>* args = expr->arguments();
2768  ASSERT(args->length() == 1);
2769 
2770  VisitForAccumulatorValue(args->at(0));
2771 
2772  Label materialize_true, materialize_false;
2773  Label* if_true = NULL;
2774  Label* if_false = NULL;
2775  Label* fall_through = NULL;
2776  context()->PrepareTest(&materialize_true, &materialize_false,
2777  &if_true, &if_false, &fall_through);
2778 
2779  __ JumpIfSmi(r0, if_false);
2780  __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
2781  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2782  Split(eq, if_true, if_false, fall_through);
2783 
2784  context()->Plug(if_true, if_false);
2785 }
2786 
2787 
2788 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2789  ZoneList<Expression*>* args = expr->arguments();
2790  ASSERT(args->length() == 1);
2791 
2792  VisitForAccumulatorValue(args->at(0));
2793 
2794  Label materialize_true, materialize_false;
2795  Label* if_true = NULL;
2796  Label* if_false = NULL;
2797  Label* fall_through = NULL;
2798  context()->PrepareTest(&materialize_true, &materialize_false,
2799  &if_true, &if_false, &fall_through);
2800 
2801  __ JumpIfSmi(r0, if_false);
2802  __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
2803  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2804  Split(eq, if_true, if_false, fall_through);
2805 
2806  context()->Plug(if_true, if_false);
2807 }
2808 
2809 
2810 
2811 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2812  ASSERT(expr->arguments()->length() == 0);
2813 
2814  Label materialize_true, materialize_false;
2815  Label* if_true = NULL;
2816  Label* if_false = NULL;
2817  Label* fall_through = NULL;
2818  context()->PrepareTest(&materialize_true, &materialize_false,
2819  &if_true, &if_false, &fall_through);
2820 
2821  // Get the frame pointer for the calling frame.
2823 
2824  // Skip the arguments adaptor frame if it exists.
2825  Label check_frame_marker;
2828  __ b(ne, &check_frame_marker);
2830 
2831  // Check the marker in the calling frame.
2832  __ bind(&check_frame_marker);
2834  __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
2835  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2836  Split(eq, if_true, if_false, fall_through);
2837 
2838  context()->Plug(if_true, if_false);
2839 }
2840 
2841 
2842 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2843  ZoneList<Expression*>* args = expr->arguments();
2844  ASSERT(args->length() == 2);
2845 
2846  // Load the two objects into registers and perform the comparison.
2847  VisitForStackValue(args->at(0));
2848  VisitForAccumulatorValue(args->at(1));
2849 
2850  Label materialize_true, materialize_false;
2851  Label* if_true = NULL;
2852  Label* if_false = NULL;
2853  Label* fall_through = NULL;
2854  context()->PrepareTest(&materialize_true, &materialize_false,
2855  &if_true, &if_false, &fall_through);
2856 
2857  __ pop(r1);
2858  __ cmp(r0, r1);
2859  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2860  Split(eq, if_true, if_false, fall_through);
2861 
2862  context()->Plug(if_true, if_false);
2863 }
2864 
2865 
2866 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2867  ZoneList<Expression*>* args = expr->arguments();
2868  ASSERT(args->length() == 1);
2869 
2870  // ArgumentsAccessStub expects the key in edx and the formal
2871  // parameter count in r0.
2872  VisitForAccumulatorValue(args->at(0));
2873  __ mov(r1, r0);
2874  __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2875  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2876  __ CallStub(&stub);
2877  context()->Plug(r0);
2878 }
2879 
2880 
2881 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2882  ASSERT(expr->arguments()->length() == 0);
2883  Label exit;
2884  // Get the number of formal parameters.
2885  __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2886 
2887  // Check if the calling frame is an arguments adaptor frame.
2891  __ b(ne, &exit);
2892 
2893  // Arguments adaptor case: Read the arguments length from the
2894  // adaptor frame.
2896 
2897  __ bind(&exit);
2898  context()->Plug(r0);
2899 }
2900 
2901 
2902 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2903  ZoneList<Expression*>* args = expr->arguments();
2904  ASSERT(args->length() == 1);
2905  Label done, null, function, non_function_constructor;
2906 
2907  VisitForAccumulatorValue(args->at(0));
2908 
2909  // If the object is a smi, we return null.
2910  __ JumpIfSmi(r0, &null);
2911 
2912  // Check that the object is a JS object but take special care of JS
2913  // functions to make sure they have 'Function' as their class.
2914  // Assume that there are only two callable types, and one of them is at
2915  // either end of the type range for JS object types. Saves extra comparisons.
2917  __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
2918  // Map is now in r0.
2919  __ b(lt, &null);
2922  __ b(eq, &function);
2923 
2924  __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
2926  LAST_SPEC_OBJECT_TYPE - 1);
2927  __ b(eq, &function);
2928  // Assume that there is no larger type.
2930 
2931  // Check if the constructor in the map is a JS function.
2933  __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
2934  __ b(ne, &non_function_constructor);
2935 
2936  // r0 now contains the constructor function. Grab the
2937  // instance class name from there.
2940  __ b(&done);
2941 
2942  // Functions have class 'Function'.
2943  __ bind(&function);
2944  __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex);
2945  __ jmp(&done);
2946 
2947  // Objects with a non-function constructor have class 'Object'.
2948  __ bind(&non_function_constructor);
2949  __ LoadRoot(r0, Heap::kObject_symbolRootIndex);
2950  __ jmp(&done);
2951 
2952  // Non-JS objects have class null.
2953  __ bind(&null);
2954  __ LoadRoot(r0, Heap::kNullValueRootIndex);
2955 
2956  // All done.
2957  __ bind(&done);
2958 
2959  context()->Plug(r0);
2960 }
2961 
2962 
2963 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2964  // Conditionally generate a log call.
2965  // Args:
2966  // 0 (literal string): The type of logging (corresponds to the flags).
2967  // This is used to determine whether or not to generate the log call.
2968  // 1 (string): Format string. Access the string at argument index 2
2969  // with '%2s' (see Logger::LogRuntime for all the formats).
2970  // 2 (array): Arguments to the format string.
2971  ZoneList<Expression*>* args = expr->arguments();
2972  ASSERT_EQ(args->length(), 3);
2973  if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2974  VisitForStackValue(args->at(1));
2975  VisitForStackValue(args->at(2));
2976  __ CallRuntime(Runtime::kLog, 2);
2977  }
2978 
2979  // Finally, we're expected to leave a value on the top of the stack.
2980  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2981  context()->Plug(r0);
2982 }
2983 
2984 
2985 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2986  ASSERT(expr->arguments()->length() == 0);
2987  Label slow_allocate_heapnumber;
2988  Label heapnumber_allocated;
2989 
2990  __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
2991  __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
2992  __ jmp(&heapnumber_allocated);
2993 
2994  __ bind(&slow_allocate_heapnumber);
2995  // Allocate a heap number.
2996  __ CallRuntime(Runtime::kNumberAlloc, 0);
2997  __ mov(r4, Operand(r0));
2998 
2999  __ bind(&heapnumber_allocated);
3000 
3001  // Convert 32 random bits in r0 to 0.(32 random bits) in a double
3002  // by computing:
3003  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
3005  __ PrepareCallCFunction(1, r0);
3006  __ ldr(r0,
3007  ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
3009  __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3010 
3011  CpuFeatures::Scope scope(VFP2);
3012  // 0x41300000 is the top half of 1.0 x 2^20 as a double.
3013  // Create this constant using mov/orr to avoid PC relative load.
3014  __ mov(r1, Operand(0x41000000));
3015  __ orr(r1, r1, Operand(0x300000));
3016  // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
3017  __ vmov(d7, r0, r1);
3018  // Move 0x4130000000000000 to VFP.
3019  __ mov(r0, Operand(0, RelocInfo::NONE));
3020  __ vmov(d8, r0, r1);
3021  // Subtract and store the result in the heap number.
3022  __ vsub(d7, d7, d8);
3023  __ sub(r0, r4, Operand(kHeapObjectTag));
3024  __ vstr(d7, r0, HeapNumber::kValueOffset);
3025  __ mov(r0, r4);
3026  } else {
3027  __ PrepareCallCFunction(2, r0);
3028  __ ldr(r1,
3029  ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
3030  __ mov(r0, Operand(r4));
3032  __ CallCFunction(
3033  ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
3034  }
3035 
3036  context()->Plug(r0);
3037 }
3038 
3039 
3040 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3041  // Load the arguments on the stack and call the stub.
3042  SubStringStub stub;
3043  ZoneList<Expression*>* args = expr->arguments();
3044  ASSERT(args->length() == 3);
3045  VisitForStackValue(args->at(0));
3046  VisitForStackValue(args->at(1));
3047  VisitForStackValue(args->at(2));
3048  __ CallStub(&stub);
3049  context()->Plug(r0);
3050 }
3051 
3052 
3053 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3054  // Load the arguments on the stack and call the stub.
3055  RegExpExecStub stub;
3056  ZoneList<Expression*>* args = expr->arguments();
3057  ASSERT(args->length() == 4);
3058  VisitForStackValue(args->at(0));
3059  VisitForStackValue(args->at(1));
3060  VisitForStackValue(args->at(2));
3061  VisitForStackValue(args->at(3));
3062  __ CallStub(&stub);
3063  context()->Plug(r0);
3064 }
3065 
3066 
3067 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3068  ZoneList<Expression*>* args = expr->arguments();
3069  ASSERT(args->length() == 1);
3070  VisitForAccumulatorValue(args->at(0)); // Load the object.
3071 
3072  Label done;
3073  // If the object is a smi return the object.
3074  __ JumpIfSmi(r0, &done);
3075  // If the object is not a value type, return the object.
3076  __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3077  __ b(ne, &done);
3079 
3080  __ bind(&done);
3081  context()->Plug(r0);
3082 }
3083 
3084 
3085 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3086  ZoneList<Expression*>* args = expr->arguments();
3087  ASSERT(args->length() == 2);
3088  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3089  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3090 
3091  VisitForAccumulatorValue(args->at(0)); // Load the object.
3092 
3093  Label runtime, done, not_date_object;
3094  Register object = r0;
3095  Register result = r0;
3096  Register scratch0 = r9;
3097  Register scratch1 = r1;
3098 
3099  __ JumpIfSmi(object, &not_date_object);
3100  __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3101  __ b(ne, &not_date_object);
3102 
3103  if (index->value() == 0) {
3104  __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3105  __ jmp(&done);
3106  } else {
3107  if (index->value() < JSDate::kFirstUncachedField) {
3108  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3109  __ mov(scratch1, Operand(stamp));
3110  __ ldr(scratch1, MemOperand(scratch1));
3111  __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3112  __ cmp(scratch1, scratch0);
3113  __ b(ne, &runtime);
3114  __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3115  kPointerSize * index->value()));
3116  __ jmp(&done);
3117  }
3118  __ bind(&runtime);
3119  __ PrepareCallCFunction(2, scratch1);
3120  __ mov(r1, Operand(index));
3121  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3122  __ jmp(&done);
3123  }
3124 
3125  __ bind(&not_date_object);
3126  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3127  __ bind(&done);
3128  context()->Plug(r0);
3129 }
3130 
3131 
3132 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3133  // Load the arguments on the stack and call the runtime function.
3134  ZoneList<Expression*>* args = expr->arguments();
3135  ASSERT(args->length() == 2);
3136  VisitForStackValue(args->at(0));
3137  VisitForStackValue(args->at(1));
3139  MathPowStub stub(MathPowStub::ON_STACK);
3140  __ CallStub(&stub);
3141  } else {
3142  __ CallRuntime(Runtime::kMath_pow, 2);
3143  }
3144  context()->Plug(r0);
3145 }
3146 
3147 
3148 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3149  ZoneList<Expression*>* args = expr->arguments();
3150  ASSERT(args->length() == 2);
3151  VisitForStackValue(args->at(0)); // Load the object.
3152  VisitForAccumulatorValue(args->at(1)); // Load the value.
3153  __ pop(r1); // r0 = value. r1 = object.
3154 
3155  Label done;
3156  // If the object is a smi, return the value.
3157  __ JumpIfSmi(r1, &done);
3158 
3159  // If the object is not a value type, return the value.
3160  __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3161  __ b(ne, &done);
3162 
3163  // Store the value.
3165  // Update the write barrier. Save the value as it will be
3166  // overwritten by the write barrier code and is needed afterward.
3167  __ mov(r2, r0);
3168  __ RecordWriteField(
3170 
3171  __ bind(&done);
3172  context()->Plug(r0);
3173 }
3174 
3175 
3176 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3177  ZoneList<Expression*>* args = expr->arguments();
3178  ASSERT_EQ(args->length(), 1);
3179  // Load the argument on the stack and call the stub.
3180  VisitForStackValue(args->at(0));
3181 
3182  NumberToStringStub stub;
3183  __ CallStub(&stub);
3184  context()->Plug(r0);
3185 }
3186 
3187 
3188 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3189  ZoneList<Expression*>* args = expr->arguments();
3190  ASSERT(args->length() == 1);
3191  VisitForAccumulatorValue(args->at(0));
3192 
3193  Label done;
3194  StringCharFromCodeGenerator generator(r0, r1);
3195  generator.GenerateFast(masm_);
3196  __ jmp(&done);
3197 
3198  NopRuntimeCallHelper call_helper;
3199  generator.GenerateSlow(masm_, call_helper);
3200 
3201  __ bind(&done);
3202  context()->Plug(r1);
3203 }
3204 
3205 
3206 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3207  ZoneList<Expression*>* args = expr->arguments();
3208  ASSERT(args->length() == 2);
3209  VisitForStackValue(args->at(0));
3210  VisitForAccumulatorValue(args->at(1));
3211 
3212  Register object = r1;
3213  Register index = r0;
3214  Register result = r3;
3215 
3216  __ pop(object);
3217 
3218  Label need_conversion;
3219  Label index_out_of_range;
3220  Label done;
3221  StringCharCodeAtGenerator generator(object,
3222  index,
3223  result,
3224  &need_conversion,
3225  &need_conversion,
3226  &index_out_of_range,
3228  generator.GenerateFast(masm_);
3229  __ jmp(&done);
3230 
3231  __ bind(&index_out_of_range);
3232  // When the index is out of range, the spec requires us to return
3233  // NaN.
3234  __ LoadRoot(result, Heap::kNanValueRootIndex);
3235  __ jmp(&done);
3236 
3237  __ bind(&need_conversion);
3238  // Load the undefined value into the result register, which will
3239  // trigger conversion.
3240  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3241  __ jmp(&done);
3242 
3243  NopRuntimeCallHelper call_helper;
3244  generator.GenerateSlow(masm_, call_helper);
3245 
3246  __ bind(&done);
3247  context()->Plug(result);
3248 }
3249 
3250 
3251 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3252  ZoneList<Expression*>* args = expr->arguments();
3253  ASSERT(args->length() == 2);
3254  VisitForStackValue(args->at(0));
3255  VisitForAccumulatorValue(args->at(1));
3256 
3257  Register object = r1;
3258  Register index = r0;
3259  Register scratch = r3;
3260  Register result = r0;
3261 
3262  __ pop(object);
3263 
3264  Label need_conversion;
3265  Label index_out_of_range;
3266  Label done;
3267  StringCharAtGenerator generator(object,
3268  index,
3269  scratch,
3270  result,
3271  &need_conversion,
3272  &need_conversion,
3273  &index_out_of_range,
3275  generator.GenerateFast(masm_);
3276  __ jmp(&done);
3277 
3278  __ bind(&index_out_of_range);
3279  // When the index is out of range, the spec requires us to return
3280  // the empty string.
3281  __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3282  __ jmp(&done);
3283 
3284  __ bind(&need_conversion);
3285  // Move smi zero into the result register, which will trigger
3286  // conversion.
3287  __ mov(result, Operand(Smi::FromInt(0)));
3288  __ jmp(&done);
3289 
3290  NopRuntimeCallHelper call_helper;
3291  generator.GenerateSlow(masm_, call_helper);
3292 
3293  __ bind(&done);
3294  context()->Plug(result);
3295 }
3296 
3297 
3298 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3299  ZoneList<Expression*>* args = expr->arguments();
3300  ASSERT_EQ(2, args->length());
3301  VisitForStackValue(args->at(0));
3302  VisitForStackValue(args->at(1));
3303 
3304  StringAddStub stub(NO_STRING_ADD_FLAGS);
3305  __ CallStub(&stub);
3306  context()->Plug(r0);
3307 }
3308 
3309 
3310 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3311  ZoneList<Expression*>* args = expr->arguments();
3312  ASSERT_EQ(2, args->length());
3313  VisitForStackValue(args->at(0));
3314  VisitForStackValue(args->at(1));
3315 
3316  StringCompareStub stub;
3317  __ CallStub(&stub);
3318  context()->Plug(r0);
3319 }
3320 
3321 
3322 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3323  // Load the argument on the stack and call the stub.
3324  TranscendentalCacheStub stub(TranscendentalCache::SIN,
3326  ZoneList<Expression*>* args = expr->arguments();
3327  ASSERT(args->length() == 1);
3328  VisitForStackValue(args->at(0));
3329  __ CallStub(&stub);
3330  context()->Plug(r0);
3331 }
3332 
3333 
3334 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3335  // Load the argument on the stack and call the stub.
3336  TranscendentalCacheStub stub(TranscendentalCache::COS,
3338  ZoneList<Expression*>* args = expr->arguments();
3339  ASSERT(args->length() == 1);
3340  VisitForStackValue(args->at(0));
3341  __ CallStub(&stub);
3342  context()->Plug(r0);
3343 }
3344 
3345 
3346 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3347  // Load the argument on the stack and call the stub.
3348  TranscendentalCacheStub stub(TranscendentalCache::TAN,
3350  ZoneList<Expression*>* args = expr->arguments();
3351  ASSERT(args->length() == 1);
3352  VisitForStackValue(args->at(0));
3353  __ CallStub(&stub);
3354  context()->Plug(r0);
3355 }
3356 
3357 
3358 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3359  // Load the argument on the stack and call the stub.
3360  TranscendentalCacheStub stub(TranscendentalCache::LOG,
3362  ZoneList<Expression*>* args = expr->arguments();
3363  ASSERT(args->length() == 1);
3364  VisitForStackValue(args->at(0));
3365  __ CallStub(&stub);
3366  context()->Plug(r0);
3367 }
3368 
3369 
3370 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3371  // Load the argument on the stack and call the runtime function.
3372  ZoneList<Expression*>* args = expr->arguments();
3373  ASSERT(args->length() == 1);
3374  VisitForStackValue(args->at(0));
3375  __ CallRuntime(Runtime::kMath_sqrt, 1);
3376  context()->Plug(r0);
3377 }
3378 
3379 
3380 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3381  ZoneList<Expression*>* args = expr->arguments();
3382  ASSERT(args->length() >= 2);
3383 
3384  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3385  for (int i = 0; i < arg_count + 1; i++) {
3386  VisitForStackValue(args->at(i));
3387  }
3388  VisitForAccumulatorValue(args->last()); // Function.
3389 
3390  Label runtime, done;
3391  // Check for non-function argument (including proxy).
3392  __ JumpIfSmi(r0, &runtime);
3393  __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3394  __ b(ne, &runtime);
3395 
3396  // InvokeFunction requires the function in r1. Move it in there.
3397  __ mov(r1, result_register());
3398  ParameterCount count(arg_count);
3399  __ InvokeFunction(r1, count, CALL_FUNCTION,
3400  NullCallWrapper(), CALL_AS_METHOD);
3402  __ jmp(&done);
3403 
3404  __ bind(&runtime);
3405  __ push(r0);
3406  __ CallRuntime(Runtime::kCall, args->length());
3407  __ bind(&done);
3408 
3409  context()->Plug(r0);
3410 }
3411 
3412 
3413 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3414  RegExpConstructResultStub stub;
3415  ZoneList<Expression*>* args = expr->arguments();
3416  ASSERT(args->length() == 3);
3417  VisitForStackValue(args->at(0));
3418  VisitForStackValue(args->at(1));
3419  VisitForStackValue(args->at(2));
3420  __ CallStub(&stub);
3421  context()->Plug(r0);
3422 }
3423 
3424 
3425 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3426  ZoneList<Expression*>* args = expr->arguments();
3427  ASSERT_EQ(2, args->length());
3428  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3429  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3430 
3431  Handle<FixedArray> jsfunction_result_caches(
3432  isolate()->native_context()->jsfunction_result_caches());
3433  if (jsfunction_result_caches->length() <= cache_id) {
3434  __ Abort("Attempt to use undefined cache.");
3435  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3436  context()->Plug(r0);
3437  return;
3438  }
3439 
3440  VisitForAccumulatorValue(args->at(1));
3441 
3442  Register key = r0;
3443  Register cache = r1;
3447  __ ldr(cache,
3449 
3450 
3451  Label done, not_found;
3452  // tmp now holds finger offset as a smi.
3453  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3455  // r2 now holds finger offset as a smi.
3456  __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3457  // r3 now points to the start of fixed array elements.
3459  // Note side effect of PreIndex: r3 now points to the key of the pair.
3460  __ cmp(key, r2);
3461  __ b(ne, &not_found);
3462 
3463  __ ldr(r0, MemOperand(r3, kPointerSize));
3464  __ b(&done);
3465 
3466  __ bind(&not_found);
3467  // Call runtime to perform the lookup.
3468  __ Push(cache, key);
3469  __ CallRuntime(Runtime::kGetFromCache, 2);
3470 
3471  __ bind(&done);
3472  context()->Plug(r0);
3473 }
3474 
3475 
3476 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3477  ZoneList<Expression*>* args = expr->arguments();
3478  ASSERT_EQ(2, args->length());
3479 
3480  Register right = r0;
3481  Register left = r1;
3482  Register tmp = r2;
3483  Register tmp2 = r3;
3484 
3485  VisitForStackValue(args->at(0));
3486  VisitForAccumulatorValue(args->at(1));
3487  __ pop(left);
3488 
3489  Label done, fail, ok;
3490  __ cmp(left, Operand(right));
3491  __ b(eq, &ok);
3492  // Fail if either is a non-HeapObject.
3493  __ and_(tmp, left, Operand(right));
3494  __ JumpIfSmi(tmp, &fail);
3495  __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
3496  __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
3497  __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
3498  __ b(ne, &fail);
3499  __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3500  __ cmp(tmp, Operand(tmp2));
3501  __ b(ne, &fail);
3502  __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
3503  __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
3504  __ cmp(tmp, tmp2);
3505  __ b(eq, &ok);
3506  __ bind(&fail);
3507  __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3508  __ jmp(&done);
3509  __ bind(&ok);
3510  __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3511  __ bind(&done);
3512 
3513  context()->Plug(r0);
3514 }
3515 
3516 
3517 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3518  ZoneList<Expression*>* args = expr->arguments();
3519  VisitForAccumulatorValue(args->at(0));
3520 
3521  Label materialize_true, materialize_false;
3522  Label* if_true = NULL;
3523  Label* if_false = NULL;
3524  Label* fall_through = NULL;
3525  context()->PrepareTest(&materialize_true, &materialize_false,
3526  &if_true, &if_false, &fall_through);
3527 
3530  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3531  Split(eq, if_true, if_false, fall_through);
3532 
3533  context()->Plug(if_true, if_false);
3534 }
3535 
3536 
3537 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3538  ZoneList<Expression*>* args = expr->arguments();
3539  ASSERT(args->length() == 1);
3540  VisitForAccumulatorValue(args->at(0));
3541 
3542  __ AssertString(r0);
3543 
3545  __ IndexFromHash(r0, r0);
3546 
3547  context()->Plug(r0);
3548 }
3549 
3550 
3551 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3552  Label bailout, done, one_char_separator, long_separator,
3553  non_trivial_array, not_size_one_array, loop,
3554  empty_separator_loop, one_char_separator_loop,
3555  one_char_separator_loop_entry, long_separator_loop;
3556  ZoneList<Expression*>* args = expr->arguments();
3557  ASSERT(args->length() == 2);
3558  VisitForStackValue(args->at(1));
3559  VisitForAccumulatorValue(args->at(0));
3560 
3561  // All aliases of the same register have disjoint lifetimes.
3562  Register array = r0;
3563  Register elements = no_reg; // Will be r0.
3564  Register result = no_reg; // Will be r0.
3565  Register separator = r1;
3566  Register array_length = r2;
3567  Register result_pos = no_reg; // Will be r2
3568  Register string_length = r3;
3569  Register string = r4;
3570  Register element = r5;
3571  Register elements_end = r6;
3572  Register scratch1 = r7;
3573  Register scratch2 = r9;
3574 
3575  // Separator operand is on the stack.
3576  __ pop(separator);
3577 
3578  // Check that the array is a JSArray.
3579  __ JumpIfSmi(array, &bailout);
3580  __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
3581  __ b(ne, &bailout);
3582 
3583  // Check that the array has fast elements.
3584  __ CheckFastElements(scratch1, scratch2, &bailout);
3585 
3586  // If the array has length zero, return the empty string.
3587  __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3588  __ SmiUntag(array_length, SetCC);
3589  __ b(ne, &non_trivial_array);
3590  __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
3591  __ b(&done);
3592 
3593  __ bind(&non_trivial_array);
3594 
3595  // Get the FixedArray containing array's elements.
3596  elements = array;
3597  __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3598  array = no_reg; // End of array's live range.
3599 
3600  // Check that all array elements are sequential ASCII strings, and
3601  // accumulate the sum of their lengths, as a smi-encoded value.
3602  __ mov(string_length, Operand(0));
3603  __ add(element,
3604  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3605  __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3606  // Loop condition: while (element < elements_end).
3607  // Live values in registers:
3608  // elements: Fixed array of strings.
3609  // array_length: Length of the fixed array of strings (not smi)
3610  // separator: Separator string
3611  // string_length: Accumulated sum of string lengths (smi).
3612  // element: Current array element.
3613  // elements_end: Array end.
3614  if (generate_debug_code_) {
3615  __ cmp(array_length, Operand(0));
3616  __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
3617  }
3618  __ bind(&loop);
3619  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3620  __ JumpIfSmi(string, &bailout);
3621  __ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3622  __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3623  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3624  __ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
3625  __ add(string_length, string_length, Operand(scratch1), SetCC);
3626  __ b(vs, &bailout);
3627  __ cmp(element, elements_end);
3628  __ b(lt, &loop);
3629 
3630  // If array_length is 1, return elements[0], a string.
3631  __ cmp(array_length, Operand(1));
3632  __ b(ne, &not_size_one_array);
3633  __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3634  __ b(&done);
3635 
3636  __ bind(&not_size_one_array);
3637 
3638  // Live values in registers:
3639  // separator: Separator string
3640  // array_length: Length of the array.
3641  // string_length: Sum of string lengths (smi).
3642  // elements: FixedArray of strings.
3643 
3644  // Check that the separator is a flat ASCII string.
3645  __ JumpIfSmi(separator, &bailout);
3646  __ ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3647  __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3648  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3649 
3650  // Add (separator length times array_length) - separator length to the
3651  // string_length to get the length of the result string. array_length is not
3652  // smi but the other values are, so the result is a smi
3653  __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3654  __ sub(string_length, string_length, Operand(scratch1));
3655  __ smull(scratch2, ip, array_length, scratch1);
3656  // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3657  // zero.
3658  __ cmp(ip, Operand(0));
3659  __ b(ne, &bailout);
3660  __ tst(scratch2, Operand(0x80000000));
3661  __ b(ne, &bailout);
3662  __ add(string_length, string_length, Operand(scratch2), SetCC);
3663  __ b(vs, &bailout);
3664  __ SmiUntag(string_length);
3665 
3666  // Get first element in the array to free up the elements register to be used
3667  // for the result.
3668  __ add(element,
3669  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3670  result = elements; // End of live range for elements.
3671  elements = no_reg;
3672  // Live values in registers:
3673  // element: First array element
3674  // separator: Separator string
3675  // string_length: Length of result string (not smi)
3676  // array_length: Length of the array.
3677  __ AllocateAsciiString(result,
3678  string_length,
3679  scratch1,
3680  scratch2,
3681  elements_end,
3682  &bailout);
3683  // Prepare for looping. Set up elements_end to end of the array. Set
3684  // result_pos to the position of the result where to write the first
3685  // character.
3686  __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3687  result_pos = array_length; // End of live range for array_length.
3688  array_length = no_reg;
3689  __ add(result_pos,
3690  result,
3692 
3693  // Check the length of the separator.
3694  __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3695  __ cmp(scratch1, Operand(Smi::FromInt(1)));
3696  __ b(eq, &one_char_separator);
3697  __ b(gt, &long_separator);
3698 
3699  // Empty separator case
3700  __ bind(&empty_separator_loop);
3701  // Live values in registers:
3702  // result_pos: the position to which we are currently copying characters.
3703  // element: Current array element.
3704  // elements_end: Array end.
3705 
3706  // Copy next array element to the result.
3707  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3708  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3709  __ SmiUntag(string_length);
3710  __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3711  __ CopyBytes(string, result_pos, string_length, scratch1);
3712  __ cmp(element, elements_end);
3713  __ b(lt, &empty_separator_loop); // End while (element < elements_end).
3714  ASSERT(result.is(r0));
3715  __ b(&done);
3716 
3717  // One-character separator case
3718  __ bind(&one_char_separator);
3719  // Replace separator with its ASCII character value.
3720  __ ldrb(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
3721  // Jump into the loop after the code that copies the separator, so the first
3722  // element is not preceded by a separator
3723  __ jmp(&one_char_separator_loop_entry);
3724 
3725  __ bind(&one_char_separator_loop);
3726  // Live values in registers:
3727  // result_pos: the position to which we are currently copying characters.
3728  // element: Current array element.
3729  // elements_end: Array end.
3730  // separator: Single separator ASCII char (in lower byte).
3731 
3732  // Copy the separator character to the result.
3733  __ strb(separator, MemOperand(result_pos, 1, PostIndex));
3734 
3735  // Copy next array element to the result.
3736  __ bind(&one_char_separator_loop_entry);
3737  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3738  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3739  __ SmiUntag(string_length);
3740  __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3741  __ CopyBytes(string, result_pos, string_length, scratch1);
3742  __ cmp(element, elements_end);
3743  __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
3744  ASSERT(result.is(r0));
3745  __ b(&done);
3746 
3747  // Long separator case (separator is more than one character). Entry is at the
3748  // label long_separator below.
3749  __ bind(&long_separator_loop);
3750  // Live values in registers:
3751  // result_pos: the position to which we are currently copying characters.
3752  // element: Current array element.
3753  // elements_end: Array end.
3754  // separator: Separator string.
3755 
3756  // Copy the separator to the result.
3757  __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
3758  __ SmiUntag(string_length);
3759  __ add(string,
3760  separator,
3762  __ CopyBytes(string, result_pos, string_length, scratch1);
3763 
3764  __ bind(&long_separator);
3765  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3766  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3767  __ SmiUntag(string_length);
3768  __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3769  __ CopyBytes(string, result_pos, string_length, scratch1);
3770  __ cmp(element, elements_end);
3771  __ b(lt, &long_separator_loop); // End while (element < elements_end).
3772  ASSERT(result.is(r0));
3773  __ b(&done);
3774 
3775  __ bind(&bailout);
3776  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3777  __ bind(&done);
3778  context()->Plug(r0);
3779 }
3780 
3781 
3782 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3783  Handle<String> name = expr->name();
3784  if (name->length() > 0 && name->Get(0) == '_') {
3785  Comment cmnt(masm_, "[ InlineRuntimeCall");
3786  EmitInlineRuntimeCall(expr);
3787  return;
3788  }
3789 
3790  Comment cmnt(masm_, "[ CallRuntime");
3791  ZoneList<Expression*>* args = expr->arguments();
3792 
3793  if (expr->is_jsruntime()) {
3794  // Prepare for calling JS runtime function.
3795  __ ldr(r0, GlobalObjectOperand());
3797  __ push(r0);
3798  }
3799 
3800  // Push the arguments ("left-to-right").
3801  int arg_count = args->length();
3802  for (int i = 0; i < arg_count; i++) {
3803  VisitForStackValue(args->at(i));
3804  }
3805 
3806  if (expr->is_jsruntime()) {
3807  // Call the JS runtime function.
3808  __ mov(r2, Operand(expr->name()));
3809  RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3810  Handle<Code> ic =
3811  isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3812  CallIC(ic, mode, expr->CallRuntimeFeedbackId());
3813  // Restore context register.
3815  } else {
3816  // Call the C runtime function.
3817  __ CallRuntime(expr->function(), arg_count);
3818  }
3819  context()->Plug(r0);
3820 }
3821 
3822 
3823 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3824  switch (expr->op()) {
3825  case Token::DELETE: {
3826  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3827  Property* property = expr->expression()->AsProperty();
3828  VariableProxy* proxy = expr->expression()->AsVariableProxy();
3829 
3830  if (property != NULL) {
3831  VisitForStackValue(property->obj());
3832  VisitForStackValue(property->key());
3833  StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
3835  __ mov(r1, Operand(Smi::FromInt(strict_mode_flag)));
3836  __ push(r1);
3837  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3838  context()->Plug(r0);
3839  } else if (proxy != NULL) {
3840  Variable* var = proxy->var();
3841  // Delete of an unqualified identifier is disallowed in strict mode
3842  // but "delete this" is allowed.
3843  ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
3844  if (var->IsUnallocated()) {
3845  __ ldr(r2, GlobalObjectOperand());
3846  __ mov(r1, Operand(var->name()));
3847  __ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
3848  __ Push(r2, r1, r0);
3849  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3850  context()->Plug(r0);
3851  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3852  // Result of deleting non-global, non-dynamic variables is false.
3853  // The subexpression does not have side effects.
3854  context()->Plug(var->is_this());
3855  } else {
3856  // Non-global variable. Call the runtime to try to delete from the
3857  // context where the variable was introduced.
3858  __ push(context_register());
3859  __ mov(r2, Operand(var->name()));
3860  __ push(r2);
3861  __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3862  context()->Plug(r0);
3863  }
3864  } else {
3865  // Result of deleting non-property, non-variable reference is true.
3866  // The subexpression may have side effects.
3867  VisitForEffect(expr->expression());
3868  context()->Plug(true);
3869  }
3870  break;
3871  }
3872 
3873  case Token::VOID: {
3874  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3875  VisitForEffect(expr->expression());
3876  context()->Plug(Heap::kUndefinedValueRootIndex);
3877  break;
3878  }
3879 
3880  case Token::NOT: {
3881  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3882  if (context()->IsEffect()) {
3883  // Unary NOT has no side effects so it's only necessary to visit the
3884  // subexpression. Match the optimizing compiler by not branching.
3885  VisitForEffect(expr->expression());
3886  } else if (context()->IsTest()) {
3887  const TestContext* test = TestContext::cast(context());
3888  // The labels are swapped for the recursive call.
3889  VisitForControl(expr->expression(),
3890  test->false_label(),
3891  test->true_label(),
3892  test->fall_through());
3893  context()->Plug(test->true_label(), test->false_label());
3894  } else {
3895  // We handle value contexts explicitly rather than simply visiting
3896  // for control and plugging the control flow into the context,
3897  // because we need to prepare a pair of extra administrative AST ids
3898  // for the optimizing compiler.
3899  ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3900  Label materialize_true, materialize_false, done;
3901  VisitForControl(expr->expression(),
3902  &materialize_false,
3903  &materialize_true,
3904  &materialize_true);
3905  __ bind(&materialize_true);
3906  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3907  __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3908  if (context()->IsStackValue()) __ push(r0);
3909  __ jmp(&done);
3910  __ bind(&materialize_false);
3911  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3912  __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3913  if (context()->IsStackValue()) __ push(r0);
3914  __ bind(&done);
3915  }
3916  break;
3917  }
3918 
3919  case Token::TYPEOF: {
3920  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3921  { StackValueContext context(this);
3922  VisitForTypeofValue(expr->expression());
3923  }
3924  __ CallRuntime(Runtime::kTypeof, 1);
3925  context()->Plug(r0);
3926  break;
3927  }
3928 
3929  case Token::ADD: {
3930  Comment cmt(masm_, "[ UnaryOperation (ADD)");
3931  VisitForAccumulatorValue(expr->expression());
3932  Label no_conversion;
3933  __ JumpIfSmi(result_register(), &no_conversion);
3934  ToNumberStub convert_stub;
3935  __ CallStub(&convert_stub);
3936  __ bind(&no_conversion);
3937  context()->Plug(result_register());
3938  break;
3939  }
3940 
3941  case Token::SUB:
3942  EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
3943  break;
3944 
3945  case Token::BIT_NOT:
3946  EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
3947  break;
3948 
3949  default:
3950  UNREACHABLE();
3951  }
3952 }
3953 
3954 
3955 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3956  const char* comment) {
3957  // TODO(svenpanne): Allowing format strings in Comment would be nice here...
3958  Comment cmt(masm_, comment);
3959  bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3960  UnaryOverwriteMode overwrite =
3961  can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3962  UnaryOpStub stub(expr->op(), overwrite);
3963  // UnaryOpStub expects the argument to be in the
3964  // accumulator register r0.
3965  VisitForAccumulatorValue(expr->expression());
3966  SetSourcePosition(expr->position());
3967  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
3968  expr->UnaryOperationFeedbackId());
3969  context()->Plug(r0);
3970 }
3971 
3972 
3973 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3974  Comment cmnt(masm_, "[ CountOperation");
3975  SetSourcePosition(expr->position());
3976 
3977  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
3978  // as the left-hand side.
3979  if (!expr->expression()->IsValidLeftHandSide()) {
3980  VisitForEffect(expr->expression());
3981  return;
3982  }
3983 
3984  // Expression can only be a property, a global or a (parameter or local)
3985  // slot.
3986  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3987  LhsKind assign_type = VARIABLE;
3988  Property* prop = expr->expression()->AsProperty();
3989  // In case of a property we use the uninitialized expression context
3990  // of the key to detect a named property.
3991  if (prop != NULL) {
3992  assign_type =
3993  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3994  }
3995 
3996  // Evaluate expression and get value.
3997  if (assign_type == VARIABLE) {
3998  ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3999  AccumulatorValueContext context(this);
4000  EmitVariableLoad(expr->expression()->AsVariableProxy());
4001  } else {
4002  // Reserve space for result of postfix operation.
4003  if (expr->is_postfix() && !context()->IsEffect()) {
4004  __ mov(ip, Operand(Smi::FromInt(0)));
4005  __ push(ip);
4006  }
4007  if (assign_type == NAMED_PROPERTY) {
4008  // Put the object both on the stack and in the accumulator.
4009  VisitForAccumulatorValue(prop->obj());
4010  __ push(r0);
4011  EmitNamedPropertyLoad(prop);
4012  } else {
4013  VisitForStackValue(prop->obj());
4014  VisitForAccumulatorValue(prop->key());
4015  __ ldr(r1, MemOperand(sp, 0));
4016  __ push(r0);
4017  EmitKeyedPropertyLoad(prop);
4018  }
4019  }
4020 
4021  // We need a second deoptimization point after loading the value
4022  // in case evaluating the property load my have a side effect.
4023  if (assign_type == VARIABLE) {
4024  PrepareForBailout(expr->expression(), TOS_REG);
4025  } else {
4026  PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4027  }
4028 
4029  // Call ToNumber only if operand is not a smi.
4030  Label no_conversion;
4031  __ JumpIfSmi(r0, &no_conversion);
4032  ToNumberStub convert_stub;
4033  __ CallStub(&convert_stub);
4034  __ bind(&no_conversion);
4035 
4036  // Save result for postfix expressions.
4037  if (expr->is_postfix()) {
4038  if (!context()->IsEffect()) {
4039  // Save the result on the stack. If we have a named or keyed property
4040  // we store the result under the receiver that is currently on top
4041  // of the stack.
4042  switch (assign_type) {
4043  case VARIABLE:
4044  __ push(r0);
4045  break;
4046  case NAMED_PROPERTY:
4047  __ str(r0, MemOperand(sp, kPointerSize));
4048  break;
4049  case KEYED_PROPERTY:
4050  __ str(r0, MemOperand(sp, 2 * kPointerSize));
4051  break;
4052  }
4053  }
4054  }
4055 
4056 
4057  // Inline smi case if we are in a loop.
4058  Label stub_call, done;
4059  JumpPatchSite patch_site(masm_);
4060 
4061  int count_value = expr->op() == Token::INC ? 1 : -1;
4062  if (ShouldInlineSmiCase(expr->op())) {
4063  __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4064  __ b(vs, &stub_call);
4065  // We could eliminate this smi check if we split the code at
4066  // the first smi check before calling ToNumber.
4067  patch_site.EmitJumpIfSmi(r0, &done);
4068 
4069  __ bind(&stub_call);
4070  // Call stub. Undo operation first.
4071  __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4072  }
4073  __ mov(r1, Operand(Smi::FromInt(count_value)));
4074 
4075  // Record position before stub call.
4076  SetSourcePosition(expr->position());
4077 
4078  BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
4079  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
4080  patch_site.EmitPatchInfo();
4081  __ bind(&done);
4082 
4083  // Store the value returned in r0.
4084  switch (assign_type) {
4085  case VARIABLE:
4086  if (expr->is_postfix()) {
4087  { EffectContext context(this);
4088  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4089  Token::ASSIGN);
4090  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4091  context.Plug(r0);
4092  }
4093  // For all contexts except EffectConstant We have the result on
4094  // top of the stack.
4095  if (!context()->IsEffect()) {
4096  context()->PlugTOS();
4097  }
4098  } else {
4099  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4100  Token::ASSIGN);
4101  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4102  context()->Plug(r0);
4103  }
4104  break;
4105  case NAMED_PROPERTY: {
4106  __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
4107  __ pop(r1);
4108  Handle<Code> ic = is_classic_mode()
4109  ? isolate()->builtins()->StoreIC_Initialize()
4110  : isolate()->builtins()->StoreIC_Initialize_Strict();
4111  CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4112  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4113  if (expr->is_postfix()) {
4114  if (!context()->IsEffect()) {
4115  context()->PlugTOS();
4116  }
4117  } else {
4118  context()->Plug(r0);
4119  }
4120  break;
4121  }
4122  case KEYED_PROPERTY: {
4123  __ pop(r1); // Key.
4124  __ pop(r2); // Receiver.
4125  Handle<Code> ic = is_classic_mode()
4126  ? isolate()->builtins()->KeyedStoreIC_Initialize()
4127  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4128  CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4129  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4130  if (expr->is_postfix()) {
4131  if (!context()->IsEffect()) {
4132  context()->PlugTOS();
4133  }
4134  } else {
4135  context()->Plug(r0);
4136  }
4137  break;
4138  }
4139  }
4140 }
4141 
4142 
4143 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4144  ASSERT(!context()->IsEffect());
4145  ASSERT(!context()->IsTest());
4146  VariableProxy* proxy = expr->AsVariableProxy();
4147  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4148  Comment cmnt(masm_, "Global variable");
4149  __ ldr(r0, GlobalObjectOperand());
4150  __ mov(r2, Operand(proxy->name()));
4151  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4152  // Use a regular load, not a contextual load, to avoid a reference
4153  // error.
4154  CallIC(ic);
4155  PrepareForBailout(expr, TOS_REG);
4156  context()->Plug(r0);
4157  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4158  Label done, slow;
4159 
4160  // Generate code for loading from variables potentially shadowed
4161  // by eval-introduced variables.
4162  EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4163 
4164  __ bind(&slow);
4165  __ mov(r0, Operand(proxy->name()));
4166  __ Push(cp, r0);
4167  __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4168  PrepareForBailout(expr, TOS_REG);
4169  __ bind(&done);
4170 
4171  context()->Plug(r0);
4172  } else {
4173  // This expression cannot throw a reference error at the top level.
4174  VisitInDuplicateContext(expr);
4175  }
4176 }
4177 
4178 
4179 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4180  Expression* sub_expr,
4181  Handle<String> check) {
4182  Label materialize_true, materialize_false;
4183  Label* if_true = NULL;
4184  Label* if_false = NULL;
4185  Label* fall_through = NULL;
4186  context()->PrepareTest(&materialize_true, &materialize_false,
4187  &if_true, &if_false, &fall_through);
4188 
4189  { AccumulatorValueContext context(this);
4190  VisitForTypeofValue(sub_expr);
4191  }
4192  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4193 
4194  if (check->Equals(isolate()->heap()->number_symbol())) {
4195  __ JumpIfSmi(r0, if_true);
4197  __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4198  __ cmp(r0, ip);
4199  Split(eq, if_true, if_false, fall_through);
4200  } else if (check->Equals(isolate()->heap()->string_symbol())) {
4201  __ JumpIfSmi(r0, if_false);
4202  // Check for undetectable objects => false.
4203  __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4204  __ b(ge, if_false);
4206  __ tst(r1, Operand(1 << Map::kIsUndetectable));
4207  Split(eq, if_true, if_false, fall_through);
4208  } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4209  __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4210  __ b(eq, if_true);
4211  __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4212  Split(eq, if_true, if_false, fall_through);
4213  } else if (FLAG_harmony_typeof &&
4214  check->Equals(isolate()->heap()->null_symbol())) {
4215  __ CompareRoot(r0, Heap::kNullValueRootIndex);
4216  Split(eq, if_true, if_false, fall_through);
4217  } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4218  __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4219  __ b(eq, if_true);
4220  __ JumpIfSmi(r0, if_false);
4221  // Check for undetectable objects => true.
4224  __ tst(r1, Operand(1 << Map::kIsUndetectable));
4225  Split(ne, if_true, if_false, fall_through);
4226 
4227  } else if (check->Equals(isolate()->heap()->function_symbol())) {
4228  __ JumpIfSmi(r0, if_false);
4230  __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
4231  __ b(eq, if_true);
4232  __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
4233  Split(eq, if_true, if_false, fall_through);
4234  } else if (check->Equals(isolate()->heap()->object_symbol())) {
4235  __ JumpIfSmi(r0, if_false);
4236  if (!FLAG_harmony_typeof) {
4237  __ CompareRoot(r0, Heap::kNullValueRootIndex);
4238  __ b(eq, if_true);
4239  }
4240  // Check for JS objects => true.
4241  __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4242  __ b(lt, if_false);
4243  __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4244  __ b(gt, if_false);
4245  // Check for undetectable objects => false.
4247  __ tst(r1, Operand(1 << Map::kIsUndetectable));
4248  Split(eq, if_true, if_false, fall_through);
4249  } else {
4250  if (if_false != fall_through) __ jmp(if_false);
4251  }
4252  context()->Plug(if_true, if_false);
4253 }
4254 
4255 
4256 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4257  Comment cmnt(masm_, "[ CompareOperation");
4258  SetSourcePosition(expr->position());
4259 
4260  // First we try a fast inlined version of the compare when one of
4261  // the operands is a literal.
4262  if (TryLiteralCompare(expr)) return;
4263 
4264  // Always perform the comparison for its control flow. Pack the result
4265  // into the expression's context after the comparison is performed.
4266  Label materialize_true, materialize_false;
4267  Label* if_true = NULL;
4268  Label* if_false = NULL;
4269  Label* fall_through = NULL;
4270  context()->PrepareTest(&materialize_true, &materialize_false,
4271  &if_true, &if_false, &fall_through);
4272 
4273  Token::Value op = expr->op();
4274  VisitForStackValue(expr->left());
4275  switch (op) {
4276  case Token::IN:
4277  VisitForStackValue(expr->right());
4278  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4279  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4280  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4281  __ cmp(r0, ip);
4282  Split(eq, if_true, if_false, fall_through);
4283  break;
4284 
4285  case Token::INSTANCEOF: {
4286  VisitForStackValue(expr->right());
4287  InstanceofStub stub(InstanceofStub::kNoFlags);
4288  __ CallStub(&stub);
4289  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4290  // The stub returns 0 for true.
4291  __ tst(r0, r0);
4292  Split(eq, if_true, if_false, fall_through);
4293  break;
4294  }
4295 
4296  default: {
4297  VisitForAccumulatorValue(expr->right());
4298  Condition cond = eq;
4299  switch (op) {
4300  case Token::EQ_STRICT:
4301  case Token::EQ:
4302  cond = eq;
4303  break;
4304  case Token::LT:
4305  cond = lt;
4306  break;
4307  case Token::GT:
4308  cond = gt;
4309  break;
4310  case Token::LTE:
4311  cond = le;
4312  break;
4313  case Token::GTE:
4314  cond = ge;
4315  break;
4316  case Token::IN:
4317  case Token::INSTANCEOF:
4318  default:
4319  UNREACHABLE();
4320  }
4321  __ pop(r1);
4322 
4323  bool inline_smi_code = ShouldInlineSmiCase(op);
4324  JumpPatchSite patch_site(masm_);
4325  if (inline_smi_code) {
4326  Label slow_case;
4327  __ orr(r2, r0, Operand(r1));
4328  patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4329  __ cmp(r1, r0);
4330  Split(cond, if_true, if_false, NULL);
4331  __ bind(&slow_case);
4332  }
4333 
4334  // Record position and call the compare IC.
4335  SetSourcePosition(expr->position());
4336  Handle<Code> ic = CompareIC::GetUninitialized(op);
4337  CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4338  patch_site.EmitPatchInfo();
4339  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4340  __ cmp(r0, Operand(0));
4341  Split(cond, if_true, if_false, fall_through);
4342  }
4343  }
4344 
4345  // Convert the result of the comparison into one expected for this
4346  // expression's context.
4347  context()->Plug(if_true, if_false);
4348 }
4349 
4350 
4351 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4352  Expression* sub_expr,
4353  NilValue nil) {
4354  Label materialize_true, materialize_false;
4355  Label* if_true = NULL;
4356  Label* if_false = NULL;
4357  Label* fall_through = NULL;
4358  context()->PrepareTest(&materialize_true, &materialize_false,
4359  &if_true, &if_false, &fall_through);
4360 
4361  VisitForAccumulatorValue(sub_expr);
4362  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4363  Heap::RootListIndex nil_value = nil == kNullValue ?
4364  Heap::kNullValueRootIndex :
4365  Heap::kUndefinedValueRootIndex;
4366  __ LoadRoot(r1, nil_value);
4367  __ cmp(r0, r1);
4368  if (expr->op() == Token::EQ_STRICT) {
4369  Split(eq, if_true, if_false, fall_through);
4370  } else {
4371  Heap::RootListIndex other_nil_value = nil == kNullValue ?
4372  Heap::kUndefinedValueRootIndex :
4373  Heap::kNullValueRootIndex;
4374  __ b(eq, if_true);
4375  __ LoadRoot(r1, other_nil_value);
4376  __ cmp(r0, r1);
4377  __ b(eq, if_true);
4378  __ JumpIfSmi(r0, if_false);
4379  // It can be an undetectable object.
4382  __ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
4383  __ cmp(r1, Operand(1 << Map::kIsUndetectable));
4384  Split(eq, if_true, if_false, fall_through);
4385  }
4386  context()->Plug(if_true, if_false);
4387 }
4388 
4389 
4390 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4392  context()->Plug(r0);
4393 }
4394 
4395 
4396 Register FullCodeGenerator::result_register() {
4397  return r0;
4398 }
4399 
4400 
4401 Register FullCodeGenerator::context_register() {
4402  return cp;
4403 }
4404 
4405 
4406 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4407  ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4408  __ str(value, MemOperand(fp, frame_offset));
4409 }
4410 
4411 
4412 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4413  __ ldr(dst, ContextOperand(cp, context_index));
4414 }
4415 
4416 
4417 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4418  Scope* declaration_scope = scope()->DeclarationScope();
4419  if (declaration_scope->is_global_scope() ||
4420  declaration_scope->is_module_scope()) {
4421  // Contexts nested in the native context have a canonical empty function
4422  // as their closure, not the anonymous closure containing the global
4423  // code. Pass a smi sentinel and let the runtime look up the empty
4424  // function.
4425  __ mov(ip, Operand(Smi::FromInt(0)));
4426  } else if (declaration_scope->is_eval_scope()) {
4427  // Contexts created by a call to eval have the same closure as the
4428  // context calling eval, not the anonymous closure containing the eval
4429  // code. Fetch it from the context.
4431  } else {
4432  ASSERT(declaration_scope->is_function_scope());
4434  }
4435  __ push(ip);
4436 }
4437 
4438 
4439 // ----------------------------------------------------------------------------
4440 // Non-local control flow support.
4441 
4442 void FullCodeGenerator::EnterFinallyBlock() {
4443  ASSERT(!result_register().is(r1));
4444  // Store result register while executing finally block.
4445  __ push(result_register());
4446  // Cook return address in link register to stack (smi encoded Code* delta)
4447  __ sub(r1, lr, Operand(masm_->CodeObject()));
4449  STATIC_ASSERT(kSmiTag == 0);
4450  __ add(r1, r1, Operand(r1)); // Convert to smi.
4451 
4452  // Store result register while executing finally block.
4453  __ push(r1);
4454 
4455  // Store pending message while executing finally block.
4456  ExternalReference pending_message_obj =
4457  ExternalReference::address_of_pending_message_obj(isolate());
4458  __ mov(ip, Operand(pending_message_obj));
4459  __ ldr(r1, MemOperand(ip));
4460  __ push(r1);
4461 
4462  ExternalReference has_pending_message =
4463  ExternalReference::address_of_has_pending_message(isolate());
4464  __ mov(ip, Operand(has_pending_message));
4465  __ ldr(r1, MemOperand(ip));
4466  __ SmiTag(r1);
4467  __ push(r1);
4468 
4469  ExternalReference pending_message_script =
4470  ExternalReference::address_of_pending_message_script(isolate());
4471  __ mov(ip, Operand(pending_message_script));
4472  __ ldr(r1, MemOperand(ip));
4473  __ push(r1);
4474 }
4475 
4476 
4477 void FullCodeGenerator::ExitFinallyBlock() {
4478  ASSERT(!result_register().is(r1));
4479  // Restore pending message from stack.
4480  __ pop(r1);
4481  ExternalReference pending_message_script =
4482  ExternalReference::address_of_pending_message_script(isolate());
4483  __ mov(ip, Operand(pending_message_script));
4484  __ str(r1, MemOperand(ip));
4485 
4486  __ pop(r1);
4487  __ SmiUntag(r1);
4488  ExternalReference has_pending_message =
4489  ExternalReference::address_of_has_pending_message(isolate());
4490  __ mov(ip, Operand(has_pending_message));
4491  __ str(r1, MemOperand(ip));
4492 
4493  __ pop(r1);
4494  ExternalReference pending_message_obj =
4495  ExternalReference::address_of_pending_message_obj(isolate());
4496  __ mov(ip, Operand(pending_message_obj));
4497  __ str(r1, MemOperand(ip));
4498 
4499  // Restore result register from stack.
4500  __ pop(r1);
4501 
4502  // Uncook return address and return.
4503  __ pop(result_register());
4505  __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value.
4506  __ add(pc, r1, Operand(masm_->CodeObject()));
4507 }
4508 
4509 
4510 #undef __
4511 
4512 #define __ ACCESS_MASM(masm())
4513 
4514 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4515  int* stack_depth,
4516  int* context_length) {
4517  // The macros used here must preserve the result register.
4518 
4519  // Because the handler block contains the context of the finally
4520  // code, we can restore it directly from there for the finally code
4521  // rather than iteratively unwinding contexts via their previous
4522  // links.
4523  __ Drop(*stack_depth); // Down to the handler block.
4524  if (*context_length > 0) {
4525  // Restore the context to its dedicated register and the stack.
4528  }
4529  __ PopTryHandler();
4530  __ bl(finally_entry_);
4531 
4532  *stack_depth = 0;
4533  *context_length = 0;
4534  return previous_;
4535 }
4536 
4537 
4538 #undef __
4539 
4540 } } // namespace v8::internal
4541 
4542 #endif // V8_TARGET_ARCH_ARM
static const int kBitFieldOffset
Definition: objects.h:5160
Scope * DeclarationScope()
Definition: scopes.cc:745
int InstructionsGeneratedSince(Label *label)
const intptr_t kSmiTagMask
Definition: v8.h:4016
VariableDeclaration * function() const
Definition: scopes.h:324
static int SlotOffset(int index)
Definition: contexts.h:425
static const int kBuiltinsOffset
Definition: objects.h:6285
const Register r3
static const int kEnumCacheOffset
Definition: objects.h:2632
static String * cast(Object *obj)
void mov(Register rd, Register rt)
const Register cp
static Smi * FromInt(int value)
Definition: objects-inl.h:981
bool IsFastObjectElementsKind(ElementsKind kind)
const DwVfpRegister d8
static const int kDataOffset
Definition: objects.h:6624
static const int kGlobalReceiverOffset
Definition: objects.h:6288
int SizeOfCodeGeneratedSince(Label *label)
const Register r6
T Max(T a, T b)
Definition: utils.h:222
Scope * outer_scope() const
Definition: scopes.h:348
int int32_t
Definition: unicode.cc:47
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:5339
static bool IsSupported(CpuFeature f)
static const int kSize
Definition: objects.h:6625
#define ASSERT(condition)
Definition: checks.h:270
friend class BlockConstPoolScope
const int kPointerSizeLog2
Definition: globals.h:232
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:120
static const int kInObjectFieldCount
Definition: objects.h:6679
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3830
#define POINTER_SIZE_ALIGN(value)
Definition: v8globals.h:387
static const int kMaximumSlots
Definition: code-stubs.h:344
MemOperand GlobalObjectOperand()
const Register r2
static const int kInstanceClassNameOffset
Definition: objects.h:5800
bool IsOptimizable() const
Definition: compiler.h:151
Variable * parameter(int index) const
Definition: scopes.h:331
PropertyAttributes
MemOperand ContextOperand(Register context, int index)
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
Definition: scopes.cc:735
static const int kHashFieldOffset
Definition: objects.h:7319
#define IN
const Register sp
static const int kLiteralsOffset
Definition: objects.h:6188
#define UNREACHABLE()
Definition: checks.h:50
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
Definition: objects.h:7318
static const int kValueOffset
Definition: objects.h:1342
Variable * arguments() const
Definition: scopes.h:339
static const int kForInSlowCaseMarker
Definition: objects.h:4167
static const int kFirstOffset
Definition: objects.h:2633
NilValue
Definition: v8.h:141
const Register ip
static BailoutId Declarations()
Definition: utils.h:1016
const Register r9
const int kPointerSize
Definition: globals.h:220
static const int kForInFastCaseMarker
Definition: objects.h:4166
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:5177
static void MaybeCallEntryHook(MacroAssembler *masm)
const DwVfpRegister d7
const int kHeapObjectTag
Definition: v8.h:4009
void Jump(Register target, Condition cond=al)
#define __
static const int kCacheStampOffset
Definition: objects.h:6476
static TestContext * cast(AstContext *context)
Definition: hydrogen.h:721
const Register pc
static const int kDescriptorSize
Definition: objects.h:2642
static const int kPropertiesOffset
Definition: objects.h:2171
int num_parameters() const
Definition: scopes.h:336
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
void ldm(BlockAddrMode am, Register base, RegList dst, Condition cond=al)
static const int kHeaderSize
Definition: objects.h:7517
void CheckConstPool(bool force_emit, bool require_jump)
const Register r0
static const int kElementsOffset
Definition: objects.h:2172
static const int kContainsCachedArrayIndexMask
Definition: objects.h:7374
static BailoutId FunctionEntry()
Definition: utils.h:1015
#define BASE_EMBEDDED
Definition: allocation.h:68
void add(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
bool IsDeclaredVariableMode(VariableMode mode)
Definition: v8globals.h:516
Vector< const char > CStrVector(const char *data)
Definition: utils.h:526
static int OffsetOfElementAt(int index)
Definition: objects.h:2356
static const int kLengthOffset
Definition: objects.h:8332
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
Definition: objects.h:2296
const Register lr
static const int kMapOffset
Definition: objects.h:1261
static const int kValueOffset
Definition: objects.h:6468
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:2636
const Register r1
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:545
static const int kLengthOffset
Definition: objects.h:2295
MemOperand FieldMemOperand(Register object, int offset)
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:168
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
const int kSmiShiftSize
Definition: v8.h:4060
const int kSmiTagSize
Definition: v8.h:4015
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:926
static const int kJSReturnSequenceInstructions
static const int kConstructorOffset
Definition: objects.h:5127
const int kSmiTag
Definition: v8.h:4014
#define ASSERT_NE(v1, v2)
Definition: checks.h:272
static const int kIsUndetectable
Definition: objects.h:5171
static bool ShouldGenerateLog(Expression *type)
Definition: codegen.cc:153
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
#define FACTORY
Definition: isolate.h:1434
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
static const int kPrototypeOffset
Definition: objects.h:5126
const Register no_reg
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
Definition: objects.h:6385
bool IsImmutableVariableMode(VariableMode mode)
Definition: v8globals.h:526
const Register fp
static const int kNativeContextOffset
Definition: objects.h:6286
T Min(T a, T b)
Definition: utils.h:229
static const int kSharedFunctionInfoOffset
Definition: objects.h:6185
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:1731
static const int kMaxValue
Definition: objects.h:1050
static const int kBitField2Offset
Definition: objects.h:5161
#define VOID
static Handle< Code > GetUninitialized(Token::Value op)
Definition: ic.cc:2565
void check(i::Vector< const char > string)
const Register r5
static const int kInstanceTypeOffset
Definition: objects.h:5158
TypeofState
Definition: codegen.h:70
Scope * scope() const
Definition: compiler.h:67
const Register r4
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag
Definition: objects-inl.h:3923
const Register r7