v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
full-codegen-arm.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_ARM)
31 
32 #include "code-stubs.h"
33 #include "codegen.h"
34 #include "compiler.h"
35 #include "debug.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
38 #include "parser.h"
39 #include "scopes.h"
40 #include "stub-cache.h"
41 
42 #include "arm/code-stubs-arm.h"
44 
45 namespace v8 {
46 namespace internal {
47 
48 #define __ ACCESS_MASM(masm_)
49 
50 
51 // A patch site is a location in the code which it is possible to patch. This
52 // class has a number of methods to emit the code which is patchable and the
53 // method EmitPatchInfo to record a marker back to the patchable code. This
54 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
55 // immediate value is used) is the delta from the pc to the first instruction of
56 // the patchable code.
57 class JumpPatchSite BASE_EMBEDDED {
58  public:
59  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
60 #ifdef DEBUG
61  info_emitted_ = false;
62 #endif
63  }
64 
65  ~JumpPatchSite() {
66  ASSERT(patch_site_.is_bound() == info_emitted_);
67  }
68 
69  // When initially emitting this ensure that a jump is always generated to skip
70  // the inlined smi code.
71  void EmitJumpIfNotSmi(Register reg, Label* target) {
72  ASSERT(!patch_site_.is_bound() && !info_emitted_);
73  Assembler::BlockConstPoolScope block_const_pool(masm_);
74  __ bind(&patch_site_);
75  __ cmp(reg, Operand(reg));
76  __ b(eq, target); // Always taken before patched.
77  }
78 
79  // When initially emitting this ensure that a jump is never generated to skip
80  // the inlined smi code.
81  void EmitJumpIfSmi(Register reg, Label* target) {
82  ASSERT(!patch_site_.is_bound() && !info_emitted_);
83  Assembler::BlockConstPoolScope block_const_pool(masm_);
84  __ bind(&patch_site_);
85  __ cmp(reg, Operand(reg));
86  __ b(ne, target); // Never taken before patched.
87  }
88 
89  void EmitPatchInfo() {
90  // Block literal pool emission whilst recording patch site information.
91  Assembler::BlockConstPoolScope block_const_pool(masm_);
92  if (patch_site_.is_bound()) {
93  int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
94  Register reg;
95  reg.set_code(delta_to_patch_site / kOff12Mask);
96  __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
97 #ifdef DEBUG
98  info_emitted_ = true;
99 #endif
100  } else {
101  __ nop(); // Signals no inlined code.
102  }
103  }
104 
105  private:
106  MacroAssembler* masm_;
107  Label patch_site_;
108 #ifdef DEBUG
109  bool info_emitted_;
110 #endif
111 };
112 
113 
114 // Generate code for a JS function. On entry to the function the receiver
115 // and arguments have been pushed on the stack left to right. The actual
116 // argument count matches the formal parameter count expected by the
117 // function.
118 //
119 // The live registers are:
120 // o r1: the JS function object being called (i.e., ourselves)
121 // o cp: our context
122 // o fp: our caller's frame pointer
123 // o sp: stack pointer
124 // o lr: return address
125 //
126 // The function builds a JS frame. Please see JavaScriptFrameConstants in
127 // frames-arm.h for its layout.
128 void FullCodeGenerator::Generate() {
129  CompilationInfo* info = info_;
130  handler_table_ =
131  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
132  profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
133  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
134  SetFunctionPosition(function());
135  Comment cmnt(masm_, "[ function compiled by full code generator");
136 
137 #ifdef DEBUG
138  if (strlen(FLAG_stop_at) > 0 &&
139  info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
140  __ stop("stop-at");
141  }
142 #endif
143 
144  // Strict mode functions and builtins need to replace the receiver
145  // with undefined when called as functions (without an explicit
146  // receiver object). r5 is zero for method calls and non-zero for
147  // function calls.
148  if (!info->is_classic_mode() || info->is_native()) {
149  Label ok;
150  __ cmp(r5, Operand(0));
151  __ b(eq, &ok);
152  int receiver_offset = info->scope()->num_parameters() * kPointerSize;
153  __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
154  __ str(r2, MemOperand(sp, receiver_offset));
155  __ bind(&ok);
156  }
157 
158  // Open a frame scope to indicate that there is a frame on the stack. The
159  // MANUAL indicates that the scope shouldn't actually generate code to set up
160  // the frame (that is done below).
161  FrameScope frame_scope(masm_, StackFrame::MANUAL);
162 
163  int locals_count = info->scope()->num_stack_slots();
164 
165  __ Push(lr, fp, cp, r1);
166  if (locals_count > 0) {
167  // Load undefined value here, so the value is ready for the loop
168  // below.
169  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
170  }
171  // Adjust fp to point to caller's fp.
172  __ add(fp, sp, Operand(2 * kPointerSize));
173 
174  { Comment cmnt(masm_, "[ Allocate locals");
175  for (int i = 0; i < locals_count; i++) {
176  __ push(ip);
177  }
178  }
179 
180  bool function_in_register = true;
181 
182  // Possibly allocate a local context.
183  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
184  if (heap_slots > 0) {
185  Comment cmnt(masm_, "[ Allocate local context");
186  // Argument to NewContext is the function, which is in r1.
187  __ push(r1);
188  if (heap_slots <= FastNewContextStub::kMaximumSlots) {
189  FastNewContextStub stub(heap_slots);
190  __ CallStub(&stub);
191  } else {
192  __ CallRuntime(Runtime::kNewFunctionContext, 1);
193  }
194  function_in_register = false;
195  // Context is returned in both r0 and cp. It replaces the context
196  // passed to us. It's saved in the stack and kept live in cp.
198  // Copy any necessary parameters into the context.
199  int num_parameters = info->scope()->num_parameters();
200  for (int i = 0; i < num_parameters; i++) {
201  Variable* var = scope()->parameter(i);
202  if (var->IsContextSlot()) {
203  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
204  (num_parameters - 1 - i) * kPointerSize;
205  // Load parameter from stack.
206  __ ldr(r0, MemOperand(fp, parameter_offset));
207  // Store it in the context.
208  MemOperand target = ContextOperand(cp, var->index());
209  __ str(r0, target);
210 
211  // Update the write barrier.
212  __ RecordWriteContextSlot(
213  cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
214  }
215  }
216  }
217 
218  Variable* arguments = scope()->arguments();
219  if (arguments != NULL) {
220  // Function uses arguments object.
221  Comment cmnt(masm_, "[ Allocate arguments object");
222  if (!function_in_register) {
223  // Load this again, if it's used by the local context below.
225  } else {
226  __ mov(r3, r1);
227  }
228  // Receiver is just before the parameters on the caller's stack.
229  int num_parameters = info->scope()->num_parameters();
230  int offset = num_parameters * kPointerSize;
231  __ add(r2, fp,
232  Operand(StandardFrameConstants::kCallerSPOffset + offset));
233  __ mov(r1, Operand(Smi::FromInt(num_parameters)));
234  __ Push(r3, r2, r1);
235 
236  // Arguments to ArgumentsAccessStub:
237  // function, receiver address, parameter count.
238  // The stub will rewrite receiever and parameter count if the previous
239  // stack frame was an arguments adapter frame.
241  if (!is_classic_mode()) {
243  } else if (function()->has_duplicate_parameters()) {
245  } else {
247  }
248  ArgumentsAccessStub stub(type);
249  __ CallStub(&stub);
250 
251  SetVar(arguments, r0, r1, r2);
252  }
253 
254  if (FLAG_trace) {
255  __ CallRuntime(Runtime::kTraceEnter, 0);
256  }
257 
258  // Visit the declarations and body unless there is an illegal
259  // redeclaration.
260  if (scope()->HasIllegalRedeclaration()) {
261  Comment cmnt(masm_, "[ Declarations");
262  scope()->VisitIllegalRedeclaration(this);
263 
264  } else {
265  PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
266  { Comment cmnt(masm_, "[ Declarations");
267  // For named function expressions, declare the function name as a
268  // constant.
269  if (scope()->is_function_scope() && scope()->function() != NULL) {
270  VariableDeclaration* function = scope()->function();
271  ASSERT(function->proxy()->var()->mode() == CONST ||
272  function->proxy()->var()->mode() == CONST_HARMONY);
273  ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
274  VisitVariableDeclaration(function);
275  }
276  VisitDeclarations(scope()->declarations());
277  }
278 
279  { Comment cmnt(masm_, "[ Stack check");
280  PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
281  Label ok;
282  __ LoadRoot(ip, Heap::kStackLimitRootIndex);
283  __ cmp(sp, Operand(ip));
284  __ b(hs, &ok);
285  StackCheckStub stub;
286  __ CallStub(&stub);
287  __ bind(&ok);
288  }
289 
290  { Comment cmnt(masm_, "[ Body");
291  ASSERT(loop_depth() == 0);
292  VisitStatements(function()->body());
293  ASSERT(loop_depth() == 0);
294  }
295  }
296 
297  // Always emit a 'return undefined' in case control fell off the end of
298  // the body.
299  { Comment cmnt(masm_, "[ return <undefined>;");
300  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
301  }
302  EmitReturnSequence();
303 
304  // Force emit the constant pool, so it doesn't get emitted in the middle
305  // of the stack check table.
306  masm()->CheckConstPool(true, false);
307 }
308 
309 
310 void FullCodeGenerator::ClearAccumulator() {
311  __ mov(r0, Operand(Smi::FromInt(0)));
312 }
313 
314 
315 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
316  __ mov(r2, Operand(profiling_counter_));
318  __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
320 }
321 
322 
323 void FullCodeGenerator::EmitProfilingCounterReset() {
324  int reset_value = FLAG_interrupt_budget;
325  if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
326  // Self-optimization is a one-off thing: if it fails, don't try again.
327  reset_value = Smi::kMaxValue;
328  }
329  if (isolate()->IsDebuggerActive()) {
330  // Detect debug break requests as soon as possible.
331  reset_value = 10;
332  }
333  __ mov(r2, Operand(profiling_counter_));
334  __ mov(r3, Operand(Smi::FromInt(reset_value)));
336 }
337 
338 
339 static const int kMaxBackEdgeWeight = 127;
340 static const int kBackEdgeDistanceDivisor = 142;
341 
342 
343 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
344  Label* back_edge_target) {
345  Comment cmnt(masm_, "[ Stack check");
346  // Block literal pools whilst emitting stack check code.
347  Assembler::BlockConstPoolScope block_const_pool(masm_);
348  Label ok;
349 
350  if (FLAG_count_based_interrupts) {
351  int weight = 1;
352  if (FLAG_weighted_back_edges) {
353  ASSERT(back_edge_target->is_bound());
354  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
355  weight = Min(kMaxBackEdgeWeight,
356  Max(1, distance / kBackEdgeDistanceDivisor));
357  }
358  EmitProfilingCounterDecrement(weight);
359  __ b(pl, &ok);
360  InterruptStub stub;
361  __ CallStub(&stub);
362  } else {
363  __ LoadRoot(ip, Heap::kStackLimitRootIndex);
364  __ cmp(sp, Operand(ip));
365  __ b(hs, &ok);
366  StackCheckStub stub;
367  __ CallStub(&stub);
368  }
369 
370  // Record a mapping of this PC offset to the OSR id. This is used to find
371  // the AST id from the unoptimized code in order to use it as a key into
372  // the deoptimization input data found in the optimized code.
373  RecordStackCheck(stmt->OsrEntryId());
374 
375  if (FLAG_count_based_interrupts) {
376  EmitProfilingCounterReset();
377  }
378 
379  __ bind(&ok);
380  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
381  // Record a mapping of the OSR id to this PC. This is used if the OSR
382  // entry becomes the target of a bailout. We don't expect it to be, but
383  // we want it to work if it is.
384  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
385 }
386 
387 
388 void FullCodeGenerator::EmitReturnSequence() {
389  Comment cmnt(masm_, "[ Return sequence");
390  if (return_label_.is_bound()) {
391  __ b(&return_label_);
392  } else {
393  __ bind(&return_label_);
394  if (FLAG_trace) {
395  // Push the return value on the stack as the parameter.
396  // Runtime::TraceExit returns its parameter in r0.
397  __ push(r0);
398  __ CallRuntime(Runtime::kTraceExit, 1);
399  }
400  if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
401  // Pretend that the exit is a backwards jump to the entry.
402  int weight = 1;
403  if (info_->ShouldSelfOptimize()) {
404  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
405  } else if (FLAG_weighted_back_edges) {
406  int distance = masm_->pc_offset();
407  weight = Min(kMaxBackEdgeWeight,
408  Max(1, distance / kBackEdgeDistanceDivisor));
409  }
410  EmitProfilingCounterDecrement(weight);
411  Label ok;
412  __ b(pl, &ok);
413  __ push(r0);
414  if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
416  __ push(r2);
417  __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
418  } else {
419  InterruptStub stub;
420  __ CallStub(&stub);
421  }
422  __ pop(r0);
423  EmitProfilingCounterReset();
424  __ bind(&ok);
425  }
426 
427 #ifdef DEBUG
428  // Add a label for checking the size of the code used for returning.
429  Label check_exit_codesize;
430  masm_->bind(&check_exit_codesize);
431 #endif
432  // Make sure that the constant pool is not emitted inside of the return
433  // sequence.
434  { Assembler::BlockConstPoolScope block_const_pool(masm_);
435  // Here we use masm_-> instead of the __ macro to avoid the code coverage
436  // tool from instrumenting as we rely on the code size here.
437  int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
438  CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
439  __ RecordJSReturn();
440  masm_->mov(sp, fp);
441  masm_->ldm(ia_w, sp, fp.bit() | lr.bit());
442  masm_->add(sp, sp, Operand(sp_delta));
443  masm_->Jump(lr);
444  }
445 
446 #ifdef DEBUG
447  // Check that the size of the code used for returning is large enough
448  // for the debugger's requirements.
450  masm_->InstructionsGeneratedSince(&check_exit_codesize));
451 #endif
452  }
453 }
454 
455 
456 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
457  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
458 }
459 
460 
461 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
462  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
463  codegen()->GetVar(result_register(), var);
464 }
465 
466 
467 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
468  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
469  codegen()->GetVar(result_register(), var);
470  __ push(result_register());
471 }
472 
473 
474 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
475  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
476  // For simplicity we always test the accumulator register.
477  codegen()->GetVar(result_register(), var);
478  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
479  codegen()->DoTest(this);
480 }
481 
482 
483 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
484 }
485 
486 
487 void FullCodeGenerator::AccumulatorValueContext::Plug(
488  Heap::RootListIndex index) const {
489  __ LoadRoot(result_register(), index);
490 }
491 
492 
493 void FullCodeGenerator::StackValueContext::Plug(
494  Heap::RootListIndex index) const {
495  __ LoadRoot(result_register(), index);
496  __ push(result_register());
497 }
498 
499 
500 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
501  codegen()->PrepareForBailoutBeforeSplit(condition(),
502  true,
503  true_label_,
504  false_label_);
505  if (index == Heap::kUndefinedValueRootIndex ||
506  index == Heap::kNullValueRootIndex ||
507  index == Heap::kFalseValueRootIndex) {
508  if (false_label_ != fall_through_) __ b(false_label_);
509  } else if (index == Heap::kTrueValueRootIndex) {
510  if (true_label_ != fall_through_) __ b(true_label_);
511  } else {
512  __ LoadRoot(result_register(), index);
513  codegen()->DoTest(this);
514  }
515 }
516 
517 
518 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
519 }
520 
521 
522 void FullCodeGenerator::AccumulatorValueContext::Plug(
523  Handle<Object> lit) const {
524  __ mov(result_register(), Operand(lit));
525 }
526 
527 
528 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
529  // Immediates cannot be pushed directly.
530  __ mov(result_register(), Operand(lit));
531  __ push(result_register());
532 }
533 
534 
535 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
536  codegen()->PrepareForBailoutBeforeSplit(condition(),
537  true,
538  true_label_,
539  false_label_);
540  ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
541  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
542  if (false_label_ != fall_through_) __ b(false_label_);
543  } else if (lit->IsTrue() || lit->IsJSObject()) {
544  if (true_label_ != fall_through_) __ b(true_label_);
545  } else if (lit->IsString()) {
546  if (String::cast(*lit)->length() == 0) {
547  if (false_label_ != fall_through_) __ b(false_label_);
548  } else {
549  if (true_label_ != fall_through_) __ b(true_label_);
550  }
551  } else if (lit->IsSmi()) {
552  if (Smi::cast(*lit)->value() == 0) {
553  if (false_label_ != fall_through_) __ b(false_label_);
554  } else {
555  if (true_label_ != fall_through_) __ b(true_label_);
556  }
557  } else {
558  // For simplicity we always test the accumulator register.
559  __ mov(result_register(), Operand(lit));
560  codegen()->DoTest(this);
561  }
562 }
563 
564 
565 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
566  Register reg) const {
567  ASSERT(count > 0);
568  __ Drop(count);
569 }
570 
571 
572 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
573  int count,
574  Register reg) const {
575  ASSERT(count > 0);
576  __ Drop(count);
577  __ Move(result_register(), reg);
578 }
579 
580 
581 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
582  Register reg) const {
583  ASSERT(count > 0);
584  if (count > 1) __ Drop(count - 1);
585  __ str(reg, MemOperand(sp, 0));
586 }
587 
588 
589 void FullCodeGenerator::TestContext::DropAndPlug(int count,
590  Register reg) const {
591  ASSERT(count > 0);
592  // For simplicity we always test the accumulator register.
593  __ Drop(count);
594  __ Move(result_register(), reg);
595  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
596  codegen()->DoTest(this);
597 }
598 
599 
600 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
601  Label* materialize_false) const {
602  ASSERT(materialize_true == materialize_false);
603  __ bind(materialize_true);
604 }
605 
606 
607 void FullCodeGenerator::AccumulatorValueContext::Plug(
608  Label* materialize_true,
609  Label* materialize_false) const {
610  Label done;
611  __ bind(materialize_true);
612  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
613  __ jmp(&done);
614  __ bind(materialize_false);
615  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
616  __ bind(&done);
617 }
618 
619 
620 void FullCodeGenerator::StackValueContext::Plug(
621  Label* materialize_true,
622  Label* materialize_false) const {
623  Label done;
624  __ bind(materialize_true);
625  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
626  __ push(ip);
627  __ jmp(&done);
628  __ bind(materialize_false);
629  __ LoadRoot(ip, Heap::kFalseValueRootIndex);
630  __ push(ip);
631  __ bind(&done);
632 }
633 
634 
635 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
636  Label* materialize_false) const {
637  ASSERT(materialize_true == true_label_);
638  ASSERT(materialize_false == false_label_);
639 }
640 
641 
642 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
643 }
644 
645 
646 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
647  Heap::RootListIndex value_root_index =
648  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
649  __ LoadRoot(result_register(), value_root_index);
650 }
651 
652 
653 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
654  Heap::RootListIndex value_root_index =
655  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
656  __ LoadRoot(ip, value_root_index);
657  __ push(ip);
658 }
659 
660 
661 void FullCodeGenerator::TestContext::Plug(bool flag) const {
662  codegen()->PrepareForBailoutBeforeSplit(condition(),
663  true,
664  true_label_,
665  false_label_);
666  if (flag) {
667  if (true_label_ != fall_through_) __ b(true_label_);
668  } else {
669  if (false_label_ != fall_through_) __ b(false_label_);
670  }
671 }
672 
673 
674 void FullCodeGenerator::DoTest(Expression* condition,
675  Label* if_true,
676  Label* if_false,
677  Label* fall_through) {
679  ToBooleanStub stub(result_register());
680  __ CallStub(&stub);
681  __ tst(result_register(), result_register());
682  } else {
683  // Call the runtime to find the boolean value of the source and then
684  // translate it into control flow to the pair of labels.
685  __ push(result_register());
686  __ CallRuntime(Runtime::kToBool, 1);
687  __ LoadRoot(ip, Heap::kFalseValueRootIndex);
688  __ cmp(r0, ip);
689  }
690  Split(ne, if_true, if_false, fall_through);
691 }
692 
693 
694 void FullCodeGenerator::Split(Condition cond,
695  Label* if_true,
696  Label* if_false,
697  Label* fall_through) {
698  if (if_false == fall_through) {
699  __ b(cond, if_true);
700  } else if (if_true == fall_through) {
701  __ b(NegateCondition(cond), if_false);
702  } else {
703  __ b(cond, if_true);
704  __ b(if_false);
705  }
706 }
707 
708 
709 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
710  ASSERT(var->IsStackAllocated());
711  // Offset is negative because higher indexes are at lower addresses.
712  int offset = -var->index() * kPointerSize;
713  // Adjust by a (parameter or local) base offset.
714  if (var->IsParameter()) {
715  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
716  } else {
718  }
719  return MemOperand(fp, offset);
720 }
721 
722 
723 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
724  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
725  if (var->IsContextSlot()) {
726  int context_chain_length = scope()->ContextChainLength(var->scope());
727  __ LoadContext(scratch, context_chain_length);
728  return ContextOperand(scratch, var->index());
729  } else {
730  return StackOperand(var);
731  }
732 }
733 
734 
735 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
736  // Use destination as scratch.
737  MemOperand location = VarOperand(var, dest);
738  __ ldr(dest, location);
739 }
740 
741 
742 void FullCodeGenerator::SetVar(Variable* var,
743  Register src,
744  Register scratch0,
745  Register scratch1) {
746  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
747  ASSERT(!scratch0.is(src));
748  ASSERT(!scratch0.is(scratch1));
749  ASSERT(!scratch1.is(src));
750  MemOperand location = VarOperand(var, scratch0);
751  __ str(src, location);
752 
753  // Emit the write barrier code if the location is in the heap.
754  if (var->IsContextSlot()) {
755  __ RecordWriteContextSlot(scratch0,
756  location.offset(),
757  src,
758  scratch1,
761  }
762 }
763 
764 
765 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
766  bool should_normalize,
767  Label* if_true,
768  Label* if_false) {
769  // Only prepare for bailouts before splits if we're in a test
770  // context. Otherwise, we let the Visit function deal with the
771  // preparation to avoid preparing with the same AST id twice.
772  if (!context()->IsTest() || !info_->IsOptimizable()) return;
773 
774  Label skip;
775  if (should_normalize) __ b(&skip);
776  PrepareForBailout(expr, TOS_REG);
777  if (should_normalize) {
778  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
779  __ cmp(r0, ip);
780  Split(eq, if_true, if_false, NULL);
781  __ bind(&skip);
782  }
783 }
784 
785 
786 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
787  // The variable in the declaration always resides in the current function
788  // context.
789  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
790  if (FLAG_debug_code) {
791  // Check that we're not inside a with or catch context.
793  __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
794  __ Check(ne, "Declaration in with context.");
795  __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
796  __ Check(ne, "Declaration in catch context.");
797  }
798 }
799 
800 
801 void FullCodeGenerator::VisitVariableDeclaration(
802  VariableDeclaration* declaration) {
803  // If it was not possible to allocate the variable at compile time, we
804  // need to "declare" it at runtime to make sure it actually exists in the
805  // local context.
806  VariableProxy* proxy = declaration->proxy();
807  VariableMode mode = declaration->mode();
808  Variable* variable = proxy->var();
809  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
810  switch (variable->location()) {
812  globals_->Add(variable->name(), zone());
813  globals_->Add(variable->binding_needs_init()
814  ? isolate()->factory()->the_hole_value()
815  : isolate()->factory()->undefined_value(),
816  zone());
817  break;
818 
819  case Variable::PARAMETER:
820  case Variable::LOCAL:
821  if (hole_init) {
822  Comment cmnt(masm_, "[ VariableDeclaration");
823  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
824  __ str(ip, StackOperand(variable));
825  }
826  break;
827 
828  case Variable::CONTEXT:
829  if (hole_init) {
830  Comment cmnt(masm_, "[ VariableDeclaration");
831  EmitDebugCheckDeclarationContext(variable);
832  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
833  __ str(ip, ContextOperand(cp, variable->index()));
834  // No write barrier since the_hole_value is in old space.
835  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
836  }
837  break;
838 
839  case Variable::LOOKUP: {
840  Comment cmnt(masm_, "[ VariableDeclaration");
841  __ mov(r2, Operand(variable->name()));
842  // Declaration nodes are always introduced in one of four modes.
843  ASSERT(mode == VAR || mode == LET ||
844  mode == CONST || mode == CONST_HARMONY);
845  PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
846  ? READ_ONLY : NONE;
847  __ mov(r1, Operand(Smi::FromInt(attr)));
848  // Push initial value, if any.
849  // Note: For variables we must not push an initial value (such as
850  // 'undefined') because we may have a (legal) redeclaration and we
851  // must not destroy the current value.
852  if (hole_init) {
853  __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
854  __ Push(cp, r2, r1, r0);
855  } else {
856  __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
857  __ Push(cp, r2, r1, r0);
858  }
859  __ CallRuntime(Runtime::kDeclareContextSlot, 4);
860  break;
861  }
862  }
863 }
864 
865 
866 void FullCodeGenerator::VisitFunctionDeclaration(
867  FunctionDeclaration* declaration) {
868  VariableProxy* proxy = declaration->proxy();
869  Variable* variable = proxy->var();
870  switch (variable->location()) {
871  case Variable::UNALLOCATED: {
872  globals_->Add(variable->name(), zone());
873  Handle<SharedFunctionInfo> function =
874  Compiler::BuildFunctionInfo(declaration->fun(), script());
875  // Check for stack-overflow exception.
876  if (function.is_null()) return SetStackOverflow();
877  globals_->Add(function, zone());
878  break;
879  }
880 
881  case Variable::PARAMETER:
882  case Variable::LOCAL: {
883  Comment cmnt(masm_, "[ FunctionDeclaration");
884  VisitForAccumulatorValue(declaration->fun());
885  __ str(result_register(), StackOperand(variable));
886  break;
887  }
888 
889  case Variable::CONTEXT: {
890  Comment cmnt(masm_, "[ FunctionDeclaration");
891  EmitDebugCheckDeclarationContext(variable);
892  VisitForAccumulatorValue(declaration->fun());
893  __ str(result_register(), ContextOperand(cp, variable->index()));
894  int offset = Context::SlotOffset(variable->index());
895  // We know that we have written a function, which is not a smi.
896  __ RecordWriteContextSlot(cp,
897  offset,
898  result_register(),
899  r2,
904  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
905  break;
906  }
907 
908  case Variable::LOOKUP: {
909  Comment cmnt(masm_, "[ FunctionDeclaration");
910  __ mov(r2, Operand(variable->name()));
911  __ mov(r1, Operand(Smi::FromInt(NONE)));
912  __ Push(cp, r2, r1);
913  // Push initial value for function declaration.
914  VisitForStackValue(declaration->fun());
915  __ CallRuntime(Runtime::kDeclareContextSlot, 4);
916  break;
917  }
918  }
919 }
920 
921 
922 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
923  VariableProxy* proxy = declaration->proxy();
924  Variable* variable = proxy->var();
925  Handle<JSModule> instance = declaration->module()->interface()->Instance();
926  ASSERT(!instance.is_null());
927 
928  switch (variable->location()) {
929  case Variable::UNALLOCATED: {
930  Comment cmnt(masm_, "[ ModuleDeclaration");
931  globals_->Add(variable->name(), zone());
932  globals_->Add(instance, zone());
933  Visit(declaration->module());
934  break;
935  }
936 
937  case Variable::CONTEXT: {
938  Comment cmnt(masm_, "[ ModuleDeclaration");
939  EmitDebugCheckDeclarationContext(variable);
940  __ mov(r1, Operand(instance));
941  __ str(r1, ContextOperand(cp, variable->index()));
942  Visit(declaration->module());
943  break;
944  }
945 
946  case Variable::PARAMETER:
947  case Variable::LOCAL:
948  case Variable::LOOKUP:
949  UNREACHABLE();
950  }
951 }
952 
953 
954 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
955  VariableProxy* proxy = declaration->proxy();
956  Variable* variable = proxy->var();
957  switch (variable->location()) {
959  // TODO(rossberg)
960  break;
961 
962  case Variable::CONTEXT: {
963  Comment cmnt(masm_, "[ ImportDeclaration");
964  EmitDebugCheckDeclarationContext(variable);
965  // TODO(rossberg)
966  break;
967  }
968 
969  case Variable::PARAMETER:
970  case Variable::LOCAL:
971  case Variable::LOOKUP:
972  UNREACHABLE();
973  }
974 }
975 
976 
977 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
978  // TODO(rossberg)
979 }
980 
981 
982 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
983  // Call the runtime to declare the globals.
984  // The context is the first argument.
985  __ mov(r1, Operand(pairs));
986  __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
987  __ Push(cp, r1, r0);
988  __ CallRuntime(Runtime::kDeclareGlobals, 3);
989  // Return value is ignored.
990 }
991 
992 
993 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
994  Comment cmnt(masm_, "[ SwitchStatement");
995  Breakable nested_statement(this, stmt);
996  SetStatementPosition(stmt);
997 
998  // Keep the switch value on the stack until a case matches.
999  VisitForStackValue(stmt->tag());
1000  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1001 
1002  ZoneList<CaseClause*>* clauses = stmt->cases();
1003  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1004 
1005  Label next_test; // Recycled for each test.
1006  // Compile all the tests with branches to their bodies.
1007  for (int i = 0; i < clauses->length(); i++) {
1008  CaseClause* clause = clauses->at(i);
1009  clause->body_target()->Unuse();
1010 
1011  // The default is not a test, but remember it as final fall through.
1012  if (clause->is_default()) {
1013  default_clause = clause;
1014  continue;
1015  }
1016 
1017  Comment cmnt(masm_, "[ Case comparison");
1018  __ bind(&next_test);
1019  next_test.Unuse();
1020 
1021  // Compile the label expression.
1022  VisitForAccumulatorValue(clause->label());
1023 
1024  // Perform the comparison as if via '==='.
1025  __ ldr(r1, MemOperand(sp, 0)); // Switch value.
1026  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1027  JumpPatchSite patch_site(masm_);
1028  if (inline_smi_code) {
1029  Label slow_case;
1030  __ orr(r2, r1, r0);
1031  patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1032 
1033  __ cmp(r1, r0);
1034  __ b(ne, &next_test);
1035  __ Drop(1); // Switch value is no longer needed.
1036  __ b(clause->body_target());
1037  __ bind(&slow_case);
1038  }
1039 
1040  // Record position before stub call for type feedback.
1041  SetSourcePosition(clause->position());
1042  Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
1043  CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1044  patch_site.EmitPatchInfo();
1045 
1046  __ cmp(r0, Operand(0));
1047  __ b(ne, &next_test);
1048  __ Drop(1); // Switch value is no longer needed.
1049  __ b(clause->body_target());
1050  }
1051 
1052  // Discard the test value and jump to the default if present, otherwise to
1053  // the end of the statement.
1054  __ bind(&next_test);
1055  __ Drop(1); // Switch value is no longer needed.
1056  if (default_clause == NULL) {
1057  __ b(nested_statement.break_label());
1058  } else {
1059  __ b(default_clause->body_target());
1060  }
1061 
1062  // Compile all the case bodies.
1063  for (int i = 0; i < clauses->length(); i++) {
1064  Comment cmnt(masm_, "[ Case body");
1065  CaseClause* clause = clauses->at(i);
1066  __ bind(clause->body_target());
1067  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1068  VisitStatements(clause->statements());
1069  }
1070 
1071  __ bind(nested_statement.break_label());
1072  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1073 }
1074 
1075 
1076 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1077  Comment cmnt(masm_, "[ ForInStatement");
1078  SetStatementPosition(stmt);
1079 
1080  Label loop, exit;
1081  ForIn loop_statement(this, stmt);
1082  increment_loop_depth();
1083 
1084  // Get the object to enumerate over. Both SpiderMonkey and JSC
1085  // ignore null and undefined in contrast to the specification; see
1086  // ECMA-262 section 12.6.4.
1087  VisitForAccumulatorValue(stmt->enumerable());
1088  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1089  __ cmp(r0, ip);
1090  __ b(eq, &exit);
1091  Register null_value = r5;
1092  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1093  __ cmp(r0, null_value);
1094  __ b(eq, &exit);
1095 
1096  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1097 
1098  // Convert the object to a JS object.
1099  Label convert, done_convert;
1100  __ JumpIfSmi(r0, &convert);
1101  __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1102  __ b(ge, &done_convert);
1103  __ bind(&convert);
1104  __ push(r0);
1105  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1106  __ bind(&done_convert);
1107  __ push(r0);
1108 
1109  // Check for proxies.
1110  Label call_runtime;
1112  __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1113  __ b(le, &call_runtime);
1114 
1115  // Check cache validity in generated code. This is a fast case for
1116  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1117  // guarantee cache validity, call the runtime system to check cache
1118  // validity or get the property names in a fixed array.
1119  __ CheckEnumCache(null_value, &call_runtime);
1120 
1121  // The enum cache is valid. Load the map of the object being
1122  // iterated over and use the cache for the iteration.
1123  Label use_cache;
1125  __ b(&use_cache);
1126 
1127  // Get the set of properties to enumerate.
1128  __ bind(&call_runtime);
1129  __ push(r0); // Duplicate the enumerable object on the stack.
1130  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1131 
1132  // If we got a map from the runtime call, we can do a fast
1133  // modification check. Otherwise, we got a fixed array, and we have
1134  // to do a slow check.
1135  Label fixed_array;
1136  __ mov(r2, r0);
1138  __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1139  __ cmp(r1, ip);
1140  __ b(ne, &fixed_array);
1141 
1142  // We got a map in register r0. Get the enumeration cache from it.
1143  __ bind(&use_cache);
1144  __ LoadInstanceDescriptors(r0, r1);
1147 
1148  // Set up the four remaining stack slots.
1149  __ push(r0); // Map.
1151  __ mov(r0, Operand(Smi::FromInt(0)));
1152  // Push enumeration cache, enumeration cache length (as smi) and zero.
1153  __ Push(r2, r1, r0);
1154  __ jmp(&loop);
1155 
1156  // We got a fixed array in register r0. Iterate through that.
1157  Label non_proxy;
1158  __ bind(&fixed_array);
1159 
1160  Handle<JSGlobalPropertyCell> cell =
1161  isolate()->factory()->NewJSGlobalPropertyCell(
1162  Handle<Object>(
1164  RecordTypeFeedbackCell(stmt->PrepareId(), cell);
1165  __ LoadHeapObject(r1, cell);
1168 
1169  __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1170  __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1172  __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1173  __ b(gt, &non_proxy);
1174  __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1175  __ bind(&non_proxy);
1176  __ Push(r1, r0); // Smi and array
1178  __ mov(r0, Operand(Smi::FromInt(0)));
1179  __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1180 
1181  // Generate code for doing the condition check.
1182  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1183  __ bind(&loop);
1184  // Load the current count to r0, load the length to r1.
1185  __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1186  __ cmp(r0, r1); // Compare to the array length.
1187  __ b(hs, loop_statement.break_label());
1188 
1189  // Get the current entry of the array into register r3.
1190  __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1191  __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1193 
1194  // Get the expected map from the stack or a smi in the
1195  // permanent slow case into register r2.
1196  __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1197 
1198  // Check if the expected map still matches that of the enumerable.
1199  // If not, we may have to filter the key.
1200  Label update_each;
1201  __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1203  __ cmp(r4, Operand(r2));
1204  __ b(eq, &update_each);
1205 
1206  // For proxies, no filtering is done.
1207  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1208  __ cmp(r2, Operand(Smi::FromInt(0)));
1209  __ b(eq, &update_each);
1210 
1211  // Convert the entry to a string or (smi) 0 if it isn't a property
1212  // any more. If the property has been removed while iterating, we
1213  // just skip it.
1214  __ push(r1); // Enumerable.
1215  __ push(r3); // Current entry.
1216  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1217  __ mov(r3, Operand(r0), SetCC);
1218  __ b(eq, loop_statement.continue_label());
1219 
1220  // Update the 'each' property or variable from the possibly filtered
1221  // entry in register r3.
1222  __ bind(&update_each);
1223  __ mov(result_register(), r3);
1224  // Perform the assignment as if via '='.
1225  { EffectContext context(this);
1226  EmitAssignment(stmt->each());
1227  }
1228 
1229  // Generate code for the body of the loop.
1230  Visit(stmt->body());
1231 
1232  // Generate code for the going to the next element by incrementing
1233  // the index (smi) stored on top of the stack.
1234  __ bind(loop_statement.continue_label());
1235  __ pop(r0);
1236  __ add(r0, r0, Operand(Smi::FromInt(1)));
1237  __ push(r0);
1238 
1239  EmitStackCheck(stmt, &loop);
1240  __ b(&loop);
1241 
1242  // Remove the pointers stored on the stack.
1243  __ bind(loop_statement.break_label());
1244  __ Drop(5);
1245 
1246  // Exit and decrement the loop depth.
1247  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1248  __ bind(&exit);
1249  decrement_loop_depth();
1250 }
1251 
1252 
1253 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1254  bool pretenure) {
1255  // Use the fast case closure allocation code that allocates in new
1256  // space for nested functions that don't need literals cloning. If
1257  // we're running with the --always-opt or the --prepare-always-opt
1258  // flag, we need to use the runtime function so that the new function
1259  // we are creating here gets a chance to have its code optimized and
1260  // doesn't just get a copy of the existing unoptimized code.
1261  if (!FLAG_always_opt &&
1262  !FLAG_prepare_always_opt &&
1263  !pretenure &&
1264  scope()->is_function_scope() &&
1265  info->num_literals() == 0) {
1266  FastNewClosureStub stub(info->language_mode());
1267  __ mov(r0, Operand(info));
1268  __ push(r0);
1269  __ CallStub(&stub);
1270  } else {
1271  __ mov(r0, Operand(info));
1272  __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1273  : Heap::kFalseValueRootIndex);
1274  __ Push(cp, r0, r1);
1275  __ CallRuntime(Runtime::kNewClosure, 3);
1276  }
1277  context()->Plug(r0);
1278 }
1279 
1280 
1281 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1282  Comment cmnt(masm_, "[ VariableProxy");
1283  EmitVariableLoad(expr);
1284 }
1285 
1286 
1287 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1288  TypeofState typeof_state,
1289  Label* slow) {
1290  Register current = cp;
1291  Register next = r1;
1292  Register temp = r2;
1293 
1294  Scope* s = scope();
1295  while (s != NULL) {
1296  if (s->num_heap_slots() > 0) {
1297  if (s->calls_non_strict_eval()) {
1298  // Check that extension is NULL.
1299  __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1300  __ tst(temp, temp);
1301  __ b(ne, slow);
1302  }
1303  // Load next context in chain.
1304  __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1305  // Walk the rest of the chain without clobbering cp.
1306  current = next;
1307  }
1308  // If no outer scope calls eval, we do not need to check more
1309  // context extensions.
1310  if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1311  s = s->outer_scope();
1312  }
1313 
1314  if (s->is_eval_scope()) {
1315  Label loop, fast;
1316  if (!current.is(next)) {
1317  __ Move(next, current);
1318  }
1319  __ bind(&loop);
1320  // Terminate at global context.
1321  __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1322  __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
1323  __ cmp(temp, ip);
1324  __ b(eq, &fast);
1325  // Check that extension is NULL.
1326  __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1327  __ tst(temp, temp);
1328  __ b(ne, slow);
1329  // Load next context in chain.
1330  __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1331  __ b(&loop);
1332  __ bind(&fast);
1333  }
1334 
1335  __ ldr(r0, GlobalObjectOperand());
1336  __ mov(r2, Operand(var->name()));
1337  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1338  ? RelocInfo::CODE_TARGET
1339  : RelocInfo::CODE_TARGET_CONTEXT;
1340  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1341  CallIC(ic, mode);
1342 }
1343 
1344 
1345 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1346  Label* slow) {
1347  ASSERT(var->IsContextSlot());
1348  Register context = cp;
1349  Register next = r3;
1350  Register temp = r4;
1351 
1352  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1353  if (s->num_heap_slots() > 0) {
1354  if (s->calls_non_strict_eval()) {
1355  // Check that extension is NULL.
1356  __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1357  __ tst(temp, temp);
1358  __ b(ne, slow);
1359  }
1360  __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1361  // Walk the rest of the chain without clobbering cp.
1362  context = next;
1363  }
1364  }
1365  // Check that last extension is NULL.
1366  __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1367  __ tst(temp, temp);
1368  __ b(ne, slow);
1369 
1370  // This function is used only for loads, not stores, so it's safe to
1371  // return an cp-based operand (the write barrier cannot be allowed to
1372  // destroy the cp register).
1373  return ContextOperand(context, var->index());
1374 }
1375 
1376 
1377 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1378  TypeofState typeof_state,
1379  Label* slow,
1380  Label* done) {
1381  // Generate fast-case code for variables that might be shadowed by
1382  // eval-introduced variables. Eval is used a lot without
1383  // introducing variables. In those cases, we do not want to
1384  // perform a runtime call for all variables in the scope
1385  // containing the eval.
1386  if (var->mode() == DYNAMIC_GLOBAL) {
1387  EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1388  __ jmp(done);
1389  } else if (var->mode() == DYNAMIC_LOCAL) {
1390  Variable* local = var->local_if_not_shadowed();
1391  __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1392  if (local->mode() == CONST ||
1393  local->mode() == CONST_HARMONY ||
1394  local->mode() == LET) {
1395  __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1396  if (local->mode() == CONST) {
1397  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1398  } else { // LET || CONST_HARMONY
1399  __ b(ne, done);
1400  __ mov(r0, Operand(var->name()));
1401  __ push(r0);
1402  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1403  }
1404  }
1405  __ jmp(done);
1406  }
1407 }
1408 
1409 
1410 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1411  // Record position before possible IC call.
1412  SetSourcePosition(proxy->position());
1413  Variable* var = proxy->var();
1414 
1415  // Three cases: global variables, lookup variables, and all other types of
1416  // variables.
1417  switch (var->location()) {
1418  case Variable::UNALLOCATED: {
1419  Comment cmnt(masm_, "Global variable");
1420  // Use inline caching. Variable name is passed in r2 and the global
1421  // object (receiver) in r0.
1422  __ ldr(r0, GlobalObjectOperand());
1423  __ mov(r2, Operand(var->name()));
1424  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1425  CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1426  context()->Plug(r0);
1427  break;
1428  }
1429 
1430  case Variable::PARAMETER:
1431  case Variable::LOCAL:
1432  case Variable::CONTEXT: {
1433  Comment cmnt(masm_, var->IsContextSlot()
1434  ? "Context variable"
1435  : "Stack variable");
1436  if (var->binding_needs_init()) {
1437  // var->scope() may be NULL when the proxy is located in eval code and
1438  // refers to a potential outside binding. Currently those bindings are
1439  // always looked up dynamically, i.e. in that case
1440  // var->location() == LOOKUP.
1441  // always holds.
1442  ASSERT(var->scope() != NULL);
1443 
1444  // Check if the binding really needs an initialization check. The check
1445  // can be skipped in the following situation: we have a LET or CONST
1446  // binding in harmony mode, both the Variable and the VariableProxy have
1447  // the same declaration scope (i.e. they are both in global code, in the
1448  // same function or in the same eval code) and the VariableProxy is in
1449  // the source physically located after the initializer of the variable.
1450  //
1451  // We cannot skip any initialization checks for CONST in non-harmony
1452  // mode because const variables may be declared but never initialized:
1453  // if (false) { const x; }; var y = x;
1454  //
1455  // The condition on the declaration scopes is a conservative check for
1456  // nested functions that access a binding and are called before the
1457  // binding is initialized:
1458  // function() { f(); let x = 1; function f() { x = 2; } }
1459  //
1460  bool skip_init_check;
1461  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1462  skip_init_check = false;
1463  } else {
1464  // Check that we always have valid source position.
1465  ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1466  ASSERT(proxy->position() != RelocInfo::kNoPosition);
1467  skip_init_check = var->mode() != CONST &&
1468  var->initializer_position() < proxy->position();
1469  }
1470 
1471  if (!skip_init_check) {
1472  // Let and const need a read barrier.
1473  GetVar(r0, var);
1474  __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1475  if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1476  // Throw a reference error when using an uninitialized let/const
1477  // binding in harmony mode.
1478  Label done;
1479  __ b(ne, &done);
1480  __ mov(r0, Operand(var->name()));
1481  __ push(r0);
1482  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1483  __ bind(&done);
1484  } else {
1485  // Uninitalized const bindings outside of harmony mode are unholed.
1486  ASSERT(var->mode() == CONST);
1487  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1488  }
1489  context()->Plug(r0);
1490  break;
1491  }
1492  }
1493  context()->Plug(var);
1494  break;
1495  }
1496 
1497  case Variable::LOOKUP: {
1498  Label done, slow;
1499  // Generate code for loading from variables potentially shadowed
1500  // by eval-introduced variables.
1501  EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1502  __ bind(&slow);
1503  Comment cmnt(masm_, "Lookup variable");
1504  __ mov(r1, Operand(var->name()));
1505  __ Push(cp, r1); // Context and name.
1506  __ CallRuntime(Runtime::kLoadContextSlot, 2);
1507  __ bind(&done);
1508  context()->Plug(r0);
1509  }
1510  }
1511 }
1512 
1513 
1514 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1515  Comment cmnt(masm_, "[ RegExpLiteral");
1516  Label materialized;
1517  // Registers will be used as follows:
1518  // r5 = materialized value (RegExp literal)
1519  // r4 = JS function, literals array
1520  // r3 = literal index
1521  // r2 = RegExp pattern
1522  // r1 = RegExp flags
1523  // r0 = RegExp literal clone
1526  int literal_offset =
1527  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1528  __ ldr(r5, FieldMemOperand(r4, literal_offset));
1529  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1530  __ cmp(r5, ip);
1531  __ b(ne, &materialized);
1532 
1533  // Create regexp literal using runtime function.
1534  // Result will be in r0.
1535  __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1536  __ mov(r2, Operand(expr->pattern()));
1537  __ mov(r1, Operand(expr->flags()));
1538  __ Push(r4, r3, r2, r1);
1539  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1540  __ mov(r5, r0);
1541 
1542  __ bind(&materialized);
1544  Label allocated, runtime_allocate;
1545  __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1546  __ jmp(&allocated);
1547 
1548  __ bind(&runtime_allocate);
1549  __ push(r5);
1550  __ mov(r0, Operand(Smi::FromInt(size)));
1551  __ push(r0);
1552  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1553  __ pop(r5);
1554 
1555  __ bind(&allocated);
1556  // After this, registers are used as follows:
1557  // r0: Newly allocated regexp.
1558  // r5: Materialized regexp.
1559  // r2: temp.
1560  __ CopyFields(r0, r5, r2.bit(), size / kPointerSize);
1561  context()->Plug(r0);
1562 }
1563 
1564 
1565 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1566  if (expression == NULL) {
1567  __ LoadRoot(r1, Heap::kNullValueRootIndex);
1568  __ push(r1);
1569  } else {
1570  VisitForStackValue(expression);
1571  }
1572 }
1573 
1574 
1575 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1576  Comment cmnt(masm_, "[ ObjectLiteral");
1577  Handle<FixedArray> constant_properties = expr->constant_properties();
1580  __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1581  __ mov(r1, Operand(constant_properties));
1582  int flags = expr->fast_elements()
1585  flags |= expr->has_function()
1588  __ mov(r0, Operand(Smi::FromInt(flags)));
1589  __ Push(r3, r2, r1, r0);
1590  int properties_count = constant_properties->length() / 2;
1591  if (expr->depth() > 1) {
1592  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1593  } else if (flags != ObjectLiteral::kFastElements ||
1595  __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1596  } else {
1597  FastCloneShallowObjectStub stub(properties_count);
1598  __ CallStub(&stub);
1599  }
1600 
1601  // If result_saved is true the result is on top of the stack. If
1602  // result_saved is false the result is in r0.
1603  bool result_saved = false;
1604 
1605  // Mark all computed expressions that are bound to a key that
1606  // is shadowed by a later occurrence of the same key. For the
1607  // marked expressions, no store code is emitted.
1608  expr->CalculateEmitStore(zone());
1609 
1610  AccessorTable accessor_table(isolate()->zone());
1611  for (int i = 0; i < expr->properties()->length(); i++) {
1612  ObjectLiteral::Property* property = expr->properties()->at(i);
1613  if (property->IsCompileTimeValue()) continue;
1614 
1615  Literal* key = property->key();
1616  Expression* value = property->value();
1617  if (!result_saved) {
1618  __ push(r0); // Save result on stack
1619  result_saved = true;
1620  }
1621  switch (property->kind()) {
1623  UNREACHABLE();
1625  ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1626  // Fall through.
1628  if (key->handle()->IsSymbol()) {
1629  if (property->emit_store()) {
1630  VisitForAccumulatorValue(value);
1631  __ mov(r2, Operand(key->handle()));
1632  __ ldr(r1, MemOperand(sp));
1633  Handle<Code> ic = is_classic_mode()
1634  ? isolate()->builtins()->StoreIC_Initialize()
1635  : isolate()->builtins()->StoreIC_Initialize_Strict();
1636  CallIC(ic, RelocInfo::CODE_TARGET, key->id());
1637  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1638  } else {
1639  VisitForEffect(value);
1640  }
1641  break;
1642  }
1643  // Fall through.
1645  // Duplicate receiver on stack.
1646  __ ldr(r0, MemOperand(sp));
1647  __ push(r0);
1648  VisitForStackValue(key);
1649  VisitForStackValue(value);
1650  if (property->emit_store()) {
1651  __ mov(r0, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1652  __ push(r0);
1653  __ CallRuntime(Runtime::kSetProperty, 4);
1654  } else {
1655  __ Drop(3);
1656  }
1657  break;
1659  accessor_table.lookup(key)->second->getter = value;
1660  break;
1662  accessor_table.lookup(key)->second->setter = value;
1663  break;
1664  }
1665  }
1666 
1667  // Emit code to define accessors, using only a single call to the runtime for
1668  // each pair of corresponding getters and setters.
1669  for (AccessorTable::Iterator it = accessor_table.begin();
1670  it != accessor_table.end();
1671  ++it) {
1672  __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1673  __ push(r0);
1674  VisitForStackValue(it->first);
1675  EmitAccessor(it->second->getter);
1676  EmitAccessor(it->second->setter);
1677  __ mov(r0, Operand(Smi::FromInt(NONE)));
1678  __ push(r0);
1679  __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1680  }
1681 
1682  if (expr->has_function()) {
1683  ASSERT(result_saved);
1684  __ ldr(r0, MemOperand(sp));
1685  __ push(r0);
1686  __ CallRuntime(Runtime::kToFastProperties, 1);
1687  }
1688 
1689  if (result_saved) {
1690  context()->PlugTOS();
1691  } else {
1692  context()->Plug(r0);
1693  }
1694 }
1695 
1696 
1697 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1698  Comment cmnt(masm_, "[ ArrayLiteral");
1699 
1700  ZoneList<Expression*>* subexprs = expr->values();
1701  int length = subexprs->length();
1702  Handle<FixedArray> constant_elements = expr->constant_elements();
1703  ASSERT_EQ(2, constant_elements->length());
1704  ElementsKind constant_elements_kind =
1705  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1706  bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1707  Handle<FixedArrayBase> constant_elements_values(
1708  FixedArrayBase::cast(constant_elements->get(1)));
1709 
1712  __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1713  __ mov(r1, Operand(constant_elements));
1714  __ Push(r3, r2, r1);
1715  if (has_fast_elements && constant_elements_values->map() ==
1716  isolate()->heap()->fixed_cow_array_map()) {
1717  FastCloneShallowArrayStub stub(
1719  __ CallStub(&stub);
1720  __ IncrementCounter(
1721  isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
1722  } else if (expr->depth() > 1) {
1723  __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1725  __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1726  } else {
1727  ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1728  FLAG_smi_only_arrays);
1729  FastCloneShallowArrayStub::Mode mode = has_fast_elements
1732  FastCloneShallowArrayStub stub(mode, length);
1733  __ CallStub(&stub);
1734  }
1735 
1736  bool result_saved = false; // Is the result saved to the stack?
1737 
1738  // Emit code to evaluate all the non-constant subexpressions and to store
1739  // them into the newly cloned array.
1740  for (int i = 0; i < length; i++) {
1741  Expression* subexpr = subexprs->at(i);
1742  // If the subexpression is a literal or a simple materialized literal it
1743  // is already set in the cloned array.
1744  if (subexpr->AsLiteral() != NULL ||
1746  continue;
1747  }
1748 
1749  if (!result_saved) {
1750  __ push(r0);
1751  result_saved = true;
1752  }
1753  VisitForAccumulatorValue(subexpr);
1754 
1755  if (IsFastObjectElementsKind(constant_elements_kind)) {
1756  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1757  __ ldr(r6, MemOperand(sp)); // Copy of array literal.
1759  __ str(result_register(), FieldMemOperand(r1, offset));
1760  // Update the write barrier for the array store.
1761  __ RecordWriteField(r1, offset, result_register(), r2,
1764  } else {
1765  __ ldr(r1, MemOperand(sp)); // Copy of array literal.
1767  __ mov(r3, Operand(Smi::FromInt(i)));
1768  __ mov(r4, Operand(Smi::FromInt(expr->literal_index())));
1769  StoreArrayLiteralElementStub stub;
1770  __ CallStub(&stub);
1771  }
1772 
1773  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1774  }
1775 
1776  if (result_saved) {
1777  context()->PlugTOS();
1778  } else {
1779  context()->Plug(r0);
1780  }
1781 }
1782 
1783 
1784 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1785  Comment cmnt(masm_, "[ Assignment");
1786  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1787  // on the left-hand side.
1788  if (!expr->target()->IsValidLeftHandSide()) {
1789  VisitForEffect(expr->target());
1790  return;
1791  }
1792 
1793  // Left-hand side can only be a property, a global or a (parameter or local)
1794  // slot.
1795  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1796  LhsKind assign_type = VARIABLE;
1797  Property* property = expr->target()->AsProperty();
1798  if (property != NULL) {
1799  assign_type = (property->key()->IsPropertyName())
1800  ? NAMED_PROPERTY
1801  : KEYED_PROPERTY;
1802  }
1803 
1804  // Evaluate LHS expression.
1805  switch (assign_type) {
1806  case VARIABLE:
1807  // Nothing to do here.
1808  break;
1809  case NAMED_PROPERTY:
1810  if (expr->is_compound()) {
1811  // We need the receiver both on the stack and in the accumulator.
1812  VisitForAccumulatorValue(property->obj());
1813  __ push(result_register());
1814  } else {
1815  VisitForStackValue(property->obj());
1816  }
1817  break;
1818  case KEYED_PROPERTY:
1819  if (expr->is_compound()) {
1820  VisitForStackValue(property->obj());
1821  VisitForAccumulatorValue(property->key());
1822  __ ldr(r1, MemOperand(sp, 0));
1823  __ push(r0);
1824  } else {
1825  VisitForStackValue(property->obj());
1826  VisitForStackValue(property->key());
1827  }
1828  break;
1829  }
1830 
1831  // For compound assignments we need another deoptimization point after the
1832  // variable/property load.
1833  if (expr->is_compound()) {
1834  { AccumulatorValueContext context(this);
1835  switch (assign_type) {
1836  case VARIABLE:
1837  EmitVariableLoad(expr->target()->AsVariableProxy());
1838  PrepareForBailout(expr->target(), TOS_REG);
1839  break;
1840  case NAMED_PROPERTY:
1841  EmitNamedPropertyLoad(property);
1842  PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1843  break;
1844  case KEYED_PROPERTY:
1845  EmitKeyedPropertyLoad(property);
1846  PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1847  break;
1848  }
1849  }
1850 
1851  Token::Value op = expr->binary_op();
1852  __ push(r0); // Left operand goes on the stack.
1853  VisitForAccumulatorValue(expr->value());
1854 
1855  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1856  ? OVERWRITE_RIGHT
1857  : NO_OVERWRITE;
1858  SetSourcePosition(expr->position() + 1);
1859  AccumulatorValueContext context(this);
1860  if (ShouldInlineSmiCase(op)) {
1861  EmitInlineSmiBinaryOp(expr->binary_operation(),
1862  op,
1863  mode,
1864  expr->target(),
1865  expr->value());
1866  } else {
1867  EmitBinaryOp(expr->binary_operation(), op, mode);
1868  }
1869 
1870  // Deoptimization point in case the binary operation may have side effects.
1871  PrepareForBailout(expr->binary_operation(), TOS_REG);
1872  } else {
1873  VisitForAccumulatorValue(expr->value());
1874  }
1875 
1876  // Record source position before possible IC call.
1877  SetSourcePosition(expr->position());
1878 
1879  // Store the value.
1880  switch (assign_type) {
1881  case VARIABLE:
1882  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1883  expr->op());
1884  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1885  context()->Plug(r0);
1886  break;
1887  case NAMED_PROPERTY:
1888  EmitNamedPropertyAssignment(expr);
1889  break;
1890  case KEYED_PROPERTY:
1891  EmitKeyedPropertyAssignment(expr);
1892  break;
1893  }
1894 }
1895 
1896 
1897 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1898  SetSourcePosition(prop->position());
1899  Literal* key = prop->key()->AsLiteral();
1900  __ mov(r2, Operand(key->handle()));
1901  // Call load IC. It has arguments receiver and property name r0 and r2.
1902  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1903  CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1904 }
1905 
1906 
1907 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1908  SetSourcePosition(prop->position());
1909  // Call keyed load IC. It has arguments key and receiver in r0 and r1.
1910  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1911  CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1912 }
1913 
1914 
1915 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1916  Token::Value op,
1917  OverwriteMode mode,
1918  Expression* left_expr,
1919  Expression* right_expr) {
1920  Label done, smi_case, stub_call;
1921 
1922  Register scratch1 = r2;
1923  Register scratch2 = r3;
1924 
1925  // Get the arguments.
1926  Register left = r1;
1927  Register right = r0;
1928  __ pop(left);
1929 
1930  // Perform combined smi check on both operands.
1931  __ orr(scratch1, left, Operand(right));
1932  STATIC_ASSERT(kSmiTag == 0);
1933  JumpPatchSite patch_site(masm_);
1934  patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1935 
1936  __ bind(&stub_call);
1937  BinaryOpStub stub(op, mode);
1938  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1939  patch_site.EmitPatchInfo();
1940  __ jmp(&done);
1941 
1942  __ bind(&smi_case);
1943  // Smi case. This code works the same way as the smi-smi case in the type
1944  // recording binary operation stub, see
1945  // BinaryOpStub::GenerateSmiSmiOperation for comments.
1946  switch (op) {
1947  case Token::SAR:
1948  __ b(&stub_call);
1949  __ GetLeastBitsFromSmi(scratch1, right, 5);
1950  __ mov(right, Operand(left, ASR, scratch1));
1951  __ bic(right, right, Operand(kSmiTagMask));
1952  break;
1953  case Token::SHL: {
1954  __ b(&stub_call);
1955  __ SmiUntag(scratch1, left);
1956  __ GetLeastBitsFromSmi(scratch2, right, 5);
1957  __ mov(scratch1, Operand(scratch1, LSL, scratch2));
1958  __ add(scratch2, scratch1, Operand(0x40000000), SetCC);
1959  __ b(mi, &stub_call);
1960  __ SmiTag(right, scratch1);
1961  break;
1962  }
1963  case Token::SHR: {
1964  __ b(&stub_call);
1965  __ SmiUntag(scratch1, left);
1966  __ GetLeastBitsFromSmi(scratch2, right, 5);
1967  __ mov(scratch1, Operand(scratch1, LSR, scratch2));
1968  __ tst(scratch1, Operand(0xc0000000));
1969  __ b(ne, &stub_call);
1970  __ SmiTag(right, scratch1);
1971  break;
1972  }
1973  case Token::ADD:
1974  __ add(scratch1, left, Operand(right), SetCC);
1975  __ b(vs, &stub_call);
1976  __ mov(right, scratch1);
1977  break;
1978  case Token::SUB:
1979  __ sub(scratch1, left, Operand(right), SetCC);
1980  __ b(vs, &stub_call);
1981  __ mov(right, scratch1);
1982  break;
1983  case Token::MUL: {
1984  __ SmiUntag(ip, right);
1985  __ smull(scratch1, scratch2, left, ip);
1986  __ mov(ip, Operand(scratch1, ASR, 31));
1987  __ cmp(ip, Operand(scratch2));
1988  __ b(ne, &stub_call);
1989  __ cmp(scratch1, Operand(0));
1990  __ mov(right, Operand(scratch1), LeaveCC, ne);
1991  __ b(ne, &done);
1992  __ add(scratch2, right, Operand(left), SetCC);
1993  __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
1994  __ b(mi, &stub_call);
1995  break;
1996  }
1997  case Token::BIT_OR:
1998  __ orr(right, left, Operand(right));
1999  break;
2000  case Token::BIT_AND:
2001  __ and_(right, left, Operand(right));
2002  break;
2003  case Token::BIT_XOR:
2004  __ eor(right, left, Operand(right));
2005  break;
2006  default:
2007  UNREACHABLE();
2008  }
2009 
2010  __ bind(&done);
2011  context()->Plug(r0);
2012 }
2013 
2014 
2015 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2016  Token::Value op,
2017  OverwriteMode mode) {
2018  __ pop(r1);
2019  BinaryOpStub stub(op, mode);
2020  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2021  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
2022  patch_site.EmitPatchInfo();
2023  context()->Plug(r0);
2024 }
2025 
2026 
2027 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2028  // Invalid left-hand sides are rewritten to have a 'throw
2029  // ReferenceError' on the left-hand side.
2030  if (!expr->IsValidLeftHandSide()) {
2031  VisitForEffect(expr);
2032  return;
2033  }
2034 
2035  // Left-hand side can only be a property, a global or a (parameter or local)
2036  // slot.
2037  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2038  LhsKind assign_type = VARIABLE;
2039  Property* prop = expr->AsProperty();
2040  if (prop != NULL) {
2041  assign_type = (prop->key()->IsPropertyName())
2042  ? NAMED_PROPERTY
2043  : KEYED_PROPERTY;
2044  }
2045 
2046  switch (assign_type) {
2047  case VARIABLE: {
2048  Variable* var = expr->AsVariableProxy()->var();
2049  EffectContext context(this);
2050  EmitVariableAssignment(var, Token::ASSIGN);
2051  break;
2052  }
2053  case NAMED_PROPERTY: {
2054  __ push(r0); // Preserve value.
2055  VisitForAccumulatorValue(prop->obj());
2056  __ mov(r1, r0);
2057  __ pop(r0); // Restore value.
2058  __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
2059  Handle<Code> ic = is_classic_mode()
2060  ? isolate()->builtins()->StoreIC_Initialize()
2061  : isolate()->builtins()->StoreIC_Initialize_Strict();
2062  CallIC(ic);
2063  break;
2064  }
2065  case KEYED_PROPERTY: {
2066  __ push(r0); // Preserve value.
2067  VisitForStackValue(prop->obj());
2068  VisitForAccumulatorValue(prop->key());
2069  __ mov(r1, r0);
2070  __ pop(r2);
2071  __ pop(r0); // Restore value.
2072  Handle<Code> ic = is_classic_mode()
2073  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2074  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2075  CallIC(ic);
2076  break;
2077  }
2078  }
2079  context()->Plug(r0);
2080 }
2081 
2082 
2083 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2084  Token::Value op) {
2085  if (var->IsUnallocated()) {
2086  // Global var, const, or let.
2087  __ mov(r2, Operand(var->name()));
2088  __ ldr(r1, GlobalObjectOperand());
2089  Handle<Code> ic = is_classic_mode()
2090  ? isolate()->builtins()->StoreIC_Initialize()
2091  : isolate()->builtins()->StoreIC_Initialize_Strict();
2092  CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2093 
2094  } else if (op == Token::INIT_CONST) {
2095  // Const initializers need a write barrier.
2096  ASSERT(!var->IsParameter()); // No const parameters.
2097  if (var->IsStackLocal()) {
2098  Label skip;
2099  __ ldr(r1, StackOperand(var));
2100  __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
2101  __ b(ne, &skip);
2102  __ str(result_register(), StackOperand(var));
2103  __ bind(&skip);
2104  } else {
2105  ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2106  // Like var declarations, const declarations are hoisted to function
2107  // scope. However, unlike var initializers, const initializers are
2108  // able to drill a hole to that function context, even from inside a
2109  // 'with' context. We thus bypass the normal static scope lookup for
2110  // var->IsContextSlot().
2111  __ push(r0);
2112  __ mov(r0, Operand(var->name()));
2113  __ Push(cp, r0); // Context and name.
2114  __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2115  }
2116 
2117  } else if (var->mode() == LET && op != Token::INIT_LET) {
2118  // Non-initializing assignment to let variable needs a write barrier.
2119  if (var->IsLookupSlot()) {
2120  __ push(r0); // Value.
2121  __ mov(r1, Operand(var->name()));
2122  __ mov(r0, Operand(Smi::FromInt(language_mode())));
2123  __ Push(cp, r1, r0); // Context, name, strict mode.
2124  __ CallRuntime(Runtime::kStoreContextSlot, 4);
2125  } else {
2126  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2127  Label assign;
2128  MemOperand location = VarOperand(var, r1);
2129  __ ldr(r3, location);
2130  __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2131  __ b(ne, &assign);
2132  __ mov(r3, Operand(var->name()));
2133  __ push(r3);
2134  __ CallRuntime(Runtime::kThrowReferenceError, 1);
2135  // Perform the assignment.
2136  __ bind(&assign);
2137  __ str(result_register(), location);
2138  if (var->IsContextSlot()) {
2139  // RecordWrite may destroy all its register arguments.
2140  __ mov(r3, result_register());
2141  int offset = Context::SlotOffset(var->index());
2142  __ RecordWriteContextSlot(
2143  r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2144  }
2145  }
2146 
2147  } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2148  // Assignment to var or initializing assignment to let/const
2149  // in harmony mode.
2150  if (var->IsStackAllocated() || var->IsContextSlot()) {
2151  MemOperand location = VarOperand(var, r1);
2152  if (FLAG_debug_code && op == Token::INIT_LET) {
2153  // Check for an uninitialized let binding.
2154  __ ldr(r2, location);
2155  __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2156  __ Check(eq, "Let binding re-initialization.");
2157  }
2158  // Perform the assignment.
2159  __ str(r0, location);
2160  if (var->IsContextSlot()) {
2161  __ mov(r3, r0);
2162  int offset = Context::SlotOffset(var->index());
2163  __ RecordWriteContextSlot(
2164  r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2165  }
2166  } else {
2167  ASSERT(var->IsLookupSlot());
2168  __ push(r0); // Value.
2169  __ mov(r1, Operand(var->name()));
2170  __ mov(r0, Operand(Smi::FromInt(language_mode())));
2171  __ Push(cp, r1, r0); // Context, name, strict mode.
2172  __ CallRuntime(Runtime::kStoreContextSlot, 4);
2173  }
2174  }
2175  // Non-initializing assignments to consts are ignored.
2176 }
2177 
2178 
2179 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2180  // Assignment to a property, using a named store IC.
2181  Property* prop = expr->target()->AsProperty();
2182  ASSERT(prop != NULL);
2183  ASSERT(prop->key()->AsLiteral() != NULL);
2184 
2185  // If the assignment starts a block of assignments to the same object,
2186  // change to slow case to avoid the quadratic behavior of repeatedly
2187  // adding fast properties.
2188  if (expr->starts_initialization_block()) {
2189  __ push(result_register());
2190  __ ldr(ip, MemOperand(sp, kPointerSize)); // Receiver is now under value.
2191  __ push(ip);
2192  __ CallRuntime(Runtime::kToSlowProperties, 1);
2193  __ pop(result_register());
2194  }
2195 
2196  // Record source code position before IC call.
2197  SetSourcePosition(expr->position());
2198  __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
2199  // Load receiver to r1. Leave a copy in the stack if needed for turning the
2200  // receiver into fast case.
2201  if (expr->ends_initialization_block()) {
2202  __ ldr(r1, MemOperand(sp));
2203  } else {
2204  __ pop(r1);
2205  }
2206 
2207  Handle<Code> ic = is_classic_mode()
2208  ? isolate()->builtins()->StoreIC_Initialize()
2209  : isolate()->builtins()->StoreIC_Initialize_Strict();
2210  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2211 
2212  // If the assignment ends an initialization block, revert to fast case.
2213  if (expr->ends_initialization_block()) {
2214  __ push(r0); // Result of assignment, saved even if not needed.
2215  // Receiver is under the result value.
2216  __ ldr(ip, MemOperand(sp, kPointerSize));
2217  __ push(ip);
2218  __ CallRuntime(Runtime::kToFastProperties, 1);
2219  __ pop(r0);
2220  __ Drop(1);
2221  }
2222  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2223  context()->Plug(r0);
2224 }
2225 
2226 
2227 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2228  // Assignment to a property, using a keyed store IC.
2229 
2230  // If the assignment starts a block of assignments to the same object,
2231  // change to slow case to avoid the quadratic behavior of repeatedly
2232  // adding fast properties.
2233  if (expr->starts_initialization_block()) {
2234  __ push(result_register());
2235  // Receiver is now under the key and value.
2236  __ ldr(ip, MemOperand(sp, 2 * kPointerSize));
2237  __ push(ip);
2238  __ CallRuntime(Runtime::kToSlowProperties, 1);
2239  __ pop(result_register());
2240  }
2241 
2242  // Record source code position before IC call.
2243  SetSourcePosition(expr->position());
2244  __ pop(r1); // Key.
2245  // Load receiver to r2. Leave a copy in the stack if needed for turning the
2246  // receiver into fast case.
2247  if (expr->ends_initialization_block()) {
2248  __ ldr(r2, MemOperand(sp));
2249  } else {
2250  __ pop(r2);
2251  }
2252 
2253  Handle<Code> ic = is_classic_mode()
2254  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2255  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2256  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2257 
2258  // If the assignment ends an initialization block, revert to fast case.
2259  if (expr->ends_initialization_block()) {
2260  __ push(r0); // Result of assignment, saved even if not needed.
2261  // Receiver is under the result value.
2262  __ ldr(ip, MemOperand(sp, kPointerSize));
2263  __ push(ip);
2264  __ CallRuntime(Runtime::kToFastProperties, 1);
2265  __ pop(r0);
2266  __ Drop(1);
2267  }
2268  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2269  context()->Plug(r0);
2270 }
2271 
2272 
2273 void FullCodeGenerator::VisitProperty(Property* expr) {
2274  Comment cmnt(masm_, "[ Property");
2275  Expression* key = expr->key();
2276 
2277  if (key->IsPropertyName()) {
2278  VisitForAccumulatorValue(expr->obj());
2279  EmitNamedPropertyLoad(expr);
2280  context()->Plug(r0);
2281  } else {
2282  VisitForStackValue(expr->obj());
2283  VisitForAccumulatorValue(expr->key());
2284  __ pop(r1);
2285  EmitKeyedPropertyLoad(expr);
2286  context()->Plug(r0);
2287  }
2288 }
2289 
2290 
2291 void FullCodeGenerator::CallIC(Handle<Code> code,
2292  RelocInfo::Mode rmode,
2293  unsigned ast_id) {
2294  ic_total_count_++;
2295  __ Call(code, rmode, ast_id);
2296 }
2297 
2298 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2299  Handle<Object> name,
2300  RelocInfo::Mode mode) {
2301  // Code common for calls using the IC.
2302  ZoneList<Expression*>* args = expr->arguments();
2303  int arg_count = args->length();
2304  { PreservePositionScope scope(masm()->positions_recorder());
2305  for (int i = 0; i < arg_count; i++) {
2306  VisitForStackValue(args->at(i));
2307  }
2308  __ mov(r2, Operand(name));
2309  }
2310  // Record source position for debugger.
2311  SetSourcePosition(expr->position());
2312  // Call the IC initialization code.
2313  Handle<Code> ic =
2314  isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2315  CallIC(ic, mode, expr->id());
2316  RecordJSReturnSite(expr);
2317  // Restore context register.
2319  context()->Plug(r0);
2320 }
2321 
2322 
2323 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2324  Expression* key) {
2325  // Load the key.
2326  VisitForAccumulatorValue(key);
2327 
2328  // Swap the name of the function and the receiver on the stack to follow
2329  // the calling convention for call ICs.
2330  __ pop(r1);
2331  __ push(r0);
2332  __ push(r1);
2333 
2334  // Code common for calls using the IC.
2335  ZoneList<Expression*>* args = expr->arguments();
2336  int arg_count = args->length();
2337  { PreservePositionScope scope(masm()->positions_recorder());
2338  for (int i = 0; i < arg_count; i++) {
2339  VisitForStackValue(args->at(i));
2340  }
2341  }
2342  // Record source position for debugger.
2343  SetSourcePosition(expr->position());
2344  // Call the IC initialization code.
2345  Handle<Code> ic =
2346  isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2347  __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
2348  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2349  RecordJSReturnSite(expr);
2350  // Restore context register.
2352  context()->DropAndPlug(1, r0); // Drop the key still on the stack.
2353 }
2354 
2355 
2356 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2357  // Code common for calls using the call stub.
2358  ZoneList<Expression*>* args = expr->arguments();
2359  int arg_count = args->length();
2360  { PreservePositionScope scope(masm()->positions_recorder());
2361  for (int i = 0; i < arg_count; i++) {
2362  VisitForStackValue(args->at(i));
2363  }
2364  }
2365  // Record source position for debugger.
2366  SetSourcePosition(expr->position());
2367 
2368  // Record call targets in unoptimized code, but not in the snapshot.
2369  if (!Serializer::enabled()) {
2370  flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2371  Handle<Object> uninitialized =
2373  Handle<JSGlobalPropertyCell> cell =
2374  isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2375  RecordTypeFeedbackCell(expr->id(), cell);
2376  __ mov(r2, Operand(cell));
2377  }
2378 
2379  CallFunctionStub stub(arg_count, flags);
2380  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2381  __ CallStub(&stub);
2382  RecordJSReturnSite(expr);
2383  // Restore context register.
2385  context()->DropAndPlug(1, r0);
2386 }
2387 
2388 
2389 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2390  // Push copy of the first argument or undefined if it doesn't exist.
2391  if (arg_count > 0) {
2392  __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2393  } else {
2394  __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2395  }
2396  __ push(r1);
2397 
2398  // Push the receiver of the enclosing function.
2399  int receiver_offset = 2 + info_->scope()->num_parameters();
2400  __ ldr(r1, MemOperand(fp, receiver_offset * kPointerSize));
2401  __ push(r1);
2402  // Push the language mode.
2403  __ mov(r1, Operand(Smi::FromInt(language_mode())));
2404  __ push(r1);
2405 
2406  // Push the start position of the scope the calls resides in.
2407  __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2408  __ push(r1);
2409 
2410  // Do the runtime call.
2411  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2412 }
2413 
2414 
2415 void FullCodeGenerator::VisitCall(Call* expr) {
2416 #ifdef DEBUG
2417  // We want to verify that RecordJSReturnSite gets called on all paths
2418  // through this function. Avoid early returns.
2419  expr->return_is_recorded_ = false;
2420 #endif
2421 
2422  Comment cmnt(masm_, "[ Call");
2423  Expression* callee = expr->expression();
2424  VariableProxy* proxy = callee->AsVariableProxy();
2425  Property* property = callee->AsProperty();
2426 
2427  if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2428  // In a call to eval, we first call %ResolvePossiblyDirectEval to
2429  // resolve the function we need to call and the receiver of the
2430  // call. Then we call the resolved function using the given
2431  // arguments.
2432  ZoneList<Expression*>* args = expr->arguments();
2433  int arg_count = args->length();
2434 
2435  { PreservePositionScope pos_scope(masm()->positions_recorder());
2436  VisitForStackValue(callee);
2437  __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2438  __ push(r2); // Reserved receiver slot.
2439 
2440  // Push the arguments.
2441  for (int i = 0; i < arg_count; i++) {
2442  VisitForStackValue(args->at(i));
2443  }
2444 
2445  // Push a copy of the function (found below the arguments) and
2446  // resolve eval.
2447  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2448  __ push(r1);
2449  EmitResolvePossiblyDirectEval(arg_count);
2450 
2451  // The runtime call returns a pair of values in r0 (function) and
2452  // r1 (receiver). Touch up the stack with the right values.
2453  __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2454  __ str(r1, MemOperand(sp, arg_count * kPointerSize));
2455  }
2456 
2457  // Record source position for debugger.
2458  SetSourcePosition(expr->position());
2459  CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2460  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2461  __ CallStub(&stub);
2462  RecordJSReturnSite(expr);
2463  // Restore context register.
2465  context()->DropAndPlug(1, r0);
2466  } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2467  // Push global object as receiver for the call IC.
2468  __ ldr(r0, GlobalObjectOperand());
2469  __ push(r0);
2470  EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2471  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2472  // Call to a lookup slot (dynamically introduced variable).
2473  Label slow, done;
2474 
2475  { PreservePositionScope scope(masm()->positions_recorder());
2476  // Generate code for loading from variables potentially shadowed
2477  // by eval-introduced variables.
2478  EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2479  }
2480 
2481  __ bind(&slow);
2482  // Call the runtime to find the function to call (returned in r0)
2483  // and the object holding it (returned in edx).
2484  __ push(context_register());
2485  __ mov(r2, Operand(proxy->name()));
2486  __ push(r2);
2487  __ CallRuntime(Runtime::kLoadContextSlot, 2);
2488  __ Push(r0, r1); // Function, receiver.
2489 
2490  // If fast case code has been generated, emit code to push the
2491  // function and receiver and have the slow path jump around this
2492  // code.
2493  if (done.is_linked()) {
2494  Label call;
2495  __ b(&call);
2496  __ bind(&done);
2497  // Push function.
2498  __ push(r0);
2499  // The receiver is implicitly the global receiver. Indicate this
2500  // by passing the hole to the call function stub.
2501  __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
2502  __ push(r1);
2503  __ bind(&call);
2504  }
2505 
2506  // The receiver is either the global receiver or an object found
2507  // by LoadContextSlot. That object could be the hole if the
2508  // receiver is implicitly the global object.
2509  EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2510  } else if (property != NULL) {
2511  { PreservePositionScope scope(masm()->positions_recorder());
2512  VisitForStackValue(property->obj());
2513  }
2514  if (property->key()->IsPropertyName()) {
2515  EmitCallWithIC(expr,
2516  property->key()->AsLiteral()->handle(),
2517  RelocInfo::CODE_TARGET);
2518  } else {
2519  EmitKeyedCallWithIC(expr, property->key());
2520  }
2521  } else {
2522  // Call to an arbitrary expression not handled specially above.
2523  { PreservePositionScope scope(masm()->positions_recorder());
2524  VisitForStackValue(callee);
2525  }
2526  // Load global receiver object.
2527  __ ldr(r1, GlobalObjectOperand());
2529  __ push(r1);
2530  // Emit function call.
2531  EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2532  }
2533 
2534 #ifdef DEBUG
2535  // RecordJSReturnSite should have been called.
2536  ASSERT(expr->return_is_recorded_);
2537 #endif
2538 }
2539 
2540 
2541 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2542  Comment cmnt(masm_, "[ CallNew");
2543  // According to ECMA-262, section 11.2.2, page 44, the function
2544  // expression in new calls must be evaluated before the
2545  // arguments.
2546 
2547  // Push constructor on the stack. If it's not a function it's used as
2548  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2549  // ignored.
2550  VisitForStackValue(expr->expression());
2551 
2552  // Push the arguments ("left-to-right") on the stack.
2553  ZoneList<Expression*>* args = expr->arguments();
2554  int arg_count = args->length();
2555  for (int i = 0; i < arg_count; i++) {
2556  VisitForStackValue(args->at(i));
2557  }
2558 
2559  // Call the construct call builtin that handles allocation and
2560  // constructor invocation.
2561  SetSourcePosition(expr->position());
2562 
2563  // Load function and argument count into r1 and r0.
2564  __ mov(r0, Operand(arg_count));
2565  __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2566 
2567  // Record call targets in unoptimized code, but not in the snapshot.
2569  if (!Serializer::enabled()) {
2570  flags = RECORD_CALL_TARGET;
2571  Handle<Object> uninitialized =
2573  Handle<JSGlobalPropertyCell> cell =
2574  isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2575  RecordTypeFeedbackCell(expr->id(), cell);
2576  __ mov(r2, Operand(cell));
2577  } else {
2578  flags = NO_CALL_FUNCTION_FLAGS;
2579  }
2580 
2581  CallConstructStub stub(flags);
2582  __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2583  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2584  context()->Plug(r0);
2585 }
2586 
2587 
2588 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2589  ZoneList<Expression*>* args = expr->arguments();
2590  ASSERT(args->length() == 1);
2591 
2592  VisitForAccumulatorValue(args->at(0));
2593 
2594  Label materialize_true, materialize_false;
2595  Label* if_true = NULL;
2596  Label* if_false = NULL;
2597  Label* fall_through = NULL;
2598  context()->PrepareTest(&materialize_true, &materialize_false,
2599  &if_true, &if_false, &fall_through);
2600 
2601  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2602  __ tst(r0, Operand(kSmiTagMask));
2603  Split(eq, if_true, if_false, fall_through);
2604 
2605  context()->Plug(if_true, if_false);
2606 }
2607 
2608 
2609 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2610  ZoneList<Expression*>* args = expr->arguments();
2611  ASSERT(args->length() == 1);
2612 
2613  VisitForAccumulatorValue(args->at(0));
2614 
2615  Label materialize_true, materialize_false;
2616  Label* if_true = NULL;
2617  Label* if_false = NULL;
2618  Label* fall_through = NULL;
2619  context()->PrepareTest(&materialize_true, &materialize_false,
2620  &if_true, &if_false, &fall_through);
2621 
2622  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2623  __ tst(r0, Operand(kSmiTagMask | 0x80000000));
2624  Split(eq, if_true, if_false, fall_through);
2625 
2626  context()->Plug(if_true, if_false);
2627 }
2628 
2629 
2630 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2631  ZoneList<Expression*>* args = expr->arguments();
2632  ASSERT(args->length() == 1);
2633 
2634  VisitForAccumulatorValue(args->at(0));
2635 
2636  Label materialize_true, materialize_false;
2637  Label* if_true = NULL;
2638  Label* if_false = NULL;
2639  Label* fall_through = NULL;
2640  context()->PrepareTest(&materialize_true, &materialize_false,
2641  &if_true, &if_false, &fall_through);
2642 
2643  __ JumpIfSmi(r0, if_false);
2644  __ LoadRoot(ip, Heap::kNullValueRootIndex);
2645  __ cmp(r0, ip);
2646  __ b(eq, if_true);
2648  // Undetectable objects behave like undefined when tested with typeof.
2650  __ tst(r1, Operand(1 << Map::kIsUndetectable));
2651  __ b(ne, if_false);
2653  __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2654  __ b(lt, if_false);
2655  __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2656  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2657  Split(le, if_true, if_false, fall_through);
2658 
2659  context()->Plug(if_true, if_false);
2660 }
2661 
2662 
2663 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2664  ZoneList<Expression*>* args = expr->arguments();
2665  ASSERT(args->length() == 1);
2666 
2667  VisitForAccumulatorValue(args->at(0));
2668 
2669  Label materialize_true, materialize_false;
2670  Label* if_true = NULL;
2671  Label* if_false = NULL;
2672  Label* fall_through = NULL;
2673  context()->PrepareTest(&materialize_true, &materialize_false,
2674  &if_true, &if_false, &fall_through);
2675 
2676  __ JumpIfSmi(r0, if_false);
2677  __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
2678  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2679  Split(ge, if_true, if_false, fall_through);
2680 
2681  context()->Plug(if_true, if_false);
2682 }
2683 
2684 
2685 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2686  ZoneList<Expression*>* args = expr->arguments();
2687  ASSERT(args->length() == 1);
2688 
2689  VisitForAccumulatorValue(args->at(0));
2690 
2691  Label materialize_true, materialize_false;
2692  Label* if_true = NULL;
2693  Label* if_false = NULL;
2694  Label* fall_through = NULL;
2695  context()->PrepareTest(&materialize_true, &materialize_false,
2696  &if_true, &if_false, &fall_through);
2697 
2698  __ JumpIfSmi(r0, if_false);
2701  __ tst(r1, Operand(1 << Map::kIsUndetectable));
2702  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2703  Split(ne, if_true, if_false, fall_through);
2704 
2705  context()->Plug(if_true, if_false);
2706 }
2707 
2708 
2709 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2710  CallRuntime* expr) {
2711  ZoneList<Expression*>* args = expr->arguments();
2712  ASSERT(args->length() == 1);
2713 
2714  VisitForAccumulatorValue(args->at(0));
2715 
2716  Label materialize_true, materialize_false;
2717  Label* if_true = NULL;
2718  Label* if_false = NULL;
2719  Label* fall_through = NULL;
2720  context()->PrepareTest(&materialize_true, &materialize_false,
2721  &if_true, &if_false, &fall_through);
2722 
2723  if (FLAG_debug_code) __ AbortIfSmi(r0);
2724 
2727  __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2728  __ b(ne, if_true);
2729 
2730  // Check for fast case object. Generate false result for slow case object.
2733  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
2734  __ cmp(r2, ip);
2735  __ b(eq, if_false);
2736 
2737  // Look for valueOf symbol in the descriptor array, and indicate false if
2738  // found. The type is not checked, so if it is a transition it is a false
2739  // negative.
2740  __ LoadInstanceDescriptors(r1, r4);
2742  // r4: descriptor array
2743  // r3: length of descriptor array
2744  // Calculate the end of the descriptor array.
2745  STATIC_ASSERT(kSmiTag == 0);
2746  STATIC_ASSERT(kSmiTagSize == 1);
2747  STATIC_ASSERT(kPointerSize == 4);
2748  __ add(r2, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2749  __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
2750 
2751  // Calculate location of the first key name.
2752  __ add(r4,
2753  r4,
2755  DescriptorArray::kFirstIndex * kPointerSize));
2756  // Loop through all the keys in the descriptor array. If one of these is the
2757  // symbol valueOf the result is false.
2758  Label entry, loop;
2759  // The use of ip to store the valueOf symbol asumes that it is not otherwise
2760  // used in the loop below.
2761  __ mov(ip, Operand(FACTORY->value_of_symbol()));
2762  __ jmp(&entry);
2763  __ bind(&loop);
2764  __ ldr(r3, MemOperand(r4, 0));
2765  __ cmp(r3, ip);
2766  __ b(eq, if_false);
2767  __ add(r4, r4, Operand(kPointerSize));
2768  __ bind(&entry);
2769  __ cmp(r4, Operand(r2));
2770  __ b(ne, &loop);
2771 
2772  // If a valueOf property is not found on the object check that it's
2773  // prototype is the un-modified String prototype. If not result is false.
2775  __ JumpIfSmi(r2, if_false);
2780  __ cmp(r2, r3);
2781  __ b(ne, if_false);
2782 
2783  // Set the bit in the map to indicate that it has been checked safe for
2784  // default valueOf and set true result.
2786  __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2788  __ jmp(if_true);
2789 
2790  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2791  context()->Plug(if_true, if_false);
2792 }
2793 
2794 
2795 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2796  ZoneList<Expression*>* args = expr->arguments();
2797  ASSERT(args->length() == 1);
2798 
2799  VisitForAccumulatorValue(args->at(0));
2800 
2801  Label materialize_true, materialize_false;
2802  Label* if_true = NULL;
2803  Label* if_false = NULL;
2804  Label* fall_through = NULL;
2805  context()->PrepareTest(&materialize_true, &materialize_false,
2806  &if_true, &if_false, &fall_through);
2807 
2808  __ JumpIfSmi(r0, if_false);
2809  __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
2810  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2811  Split(eq, if_true, if_false, fall_through);
2812 
2813  context()->Plug(if_true, if_false);
2814 }
2815 
2816 
2817 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2818  ZoneList<Expression*>* args = expr->arguments();
2819  ASSERT(args->length() == 1);
2820 
2821  VisitForAccumulatorValue(args->at(0));
2822 
2823  Label materialize_true, materialize_false;
2824  Label* if_true = NULL;
2825  Label* if_false = NULL;
2826  Label* fall_through = NULL;
2827  context()->PrepareTest(&materialize_true, &materialize_false,
2828  &if_true, &if_false, &fall_through);
2829 
2830  __ JumpIfSmi(r0, if_false);
2831  __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
2832  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2833  Split(eq, if_true, if_false, fall_through);
2834 
2835  context()->Plug(if_true, if_false);
2836 }
2837 
2838 
2839 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2840  ZoneList<Expression*>* args = expr->arguments();
2841  ASSERT(args->length() == 1);
2842 
2843  VisitForAccumulatorValue(args->at(0));
2844 
2845  Label materialize_true, materialize_false;
2846  Label* if_true = NULL;
2847  Label* if_false = NULL;
2848  Label* fall_through = NULL;
2849  context()->PrepareTest(&materialize_true, &materialize_false,
2850  &if_true, &if_false, &fall_through);
2851 
2852  __ JumpIfSmi(r0, if_false);
2853  __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
2854  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2855  Split(eq, if_true, if_false, fall_through);
2856 
2857  context()->Plug(if_true, if_false);
2858 }
2859 
2860 
2861 
2862 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2863  ASSERT(expr->arguments()->length() == 0);
2864 
2865  Label materialize_true, materialize_false;
2866  Label* if_true = NULL;
2867  Label* if_false = NULL;
2868  Label* fall_through = NULL;
2869  context()->PrepareTest(&materialize_true, &materialize_false,
2870  &if_true, &if_false, &fall_through);
2871 
2872  // Get the frame pointer for the calling frame.
2874 
2875  // Skip the arguments adaptor frame if it exists.
2876  Label check_frame_marker;
2879  __ b(ne, &check_frame_marker);
2881 
2882  // Check the marker in the calling frame.
2883  __ bind(&check_frame_marker);
2885  __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
2886  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2887  Split(eq, if_true, if_false, fall_through);
2888 
2889  context()->Plug(if_true, if_false);
2890 }
2891 
2892 
2893 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2894  ZoneList<Expression*>* args = expr->arguments();
2895  ASSERT(args->length() == 2);
2896 
2897  // Load the two objects into registers and perform the comparison.
2898  VisitForStackValue(args->at(0));
2899  VisitForAccumulatorValue(args->at(1));
2900 
2901  Label materialize_true, materialize_false;
2902  Label* if_true = NULL;
2903  Label* if_false = NULL;
2904  Label* fall_through = NULL;
2905  context()->PrepareTest(&materialize_true, &materialize_false,
2906  &if_true, &if_false, &fall_through);
2907 
2908  __ pop(r1);
2909  __ cmp(r0, r1);
2910  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2911  Split(eq, if_true, if_false, fall_through);
2912 
2913  context()->Plug(if_true, if_false);
2914 }
2915 
2916 
2917 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2918  ZoneList<Expression*>* args = expr->arguments();
2919  ASSERT(args->length() == 1);
2920 
2921  // ArgumentsAccessStub expects the key in edx and the formal
2922  // parameter count in r0.
2923  VisitForAccumulatorValue(args->at(0));
2924  __ mov(r1, r0);
2925  __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2926  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2927  __ CallStub(&stub);
2928  context()->Plug(r0);
2929 }
2930 
2931 
2932 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2933  ASSERT(expr->arguments()->length() == 0);
2934  Label exit;
2935  // Get the number of formal parameters.
2936  __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2937 
2938  // Check if the calling frame is an arguments adaptor frame.
2942  __ b(ne, &exit);
2943 
2944  // Arguments adaptor case: Read the arguments length from the
2945  // adaptor frame.
2947 
2948  __ bind(&exit);
2949  context()->Plug(r0);
2950 }
2951 
2952 
2953 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2954  ZoneList<Expression*>* args = expr->arguments();
2955  ASSERT(args->length() == 1);
2956  Label done, null, function, non_function_constructor;
2957 
2958  VisitForAccumulatorValue(args->at(0));
2959 
2960  // If the object is a smi, we return null.
2961  __ JumpIfSmi(r0, &null);
2962 
2963  // Check that the object is a JS object but take special care of JS
2964  // functions to make sure they have 'Function' as their class.
2965  // Assume that there are only two callable types, and one of them is at
2966  // either end of the type range for JS object types. Saves extra comparisons.
2968  __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
2969  // Map is now in r0.
2970  __ b(lt, &null);
2973  __ b(eq, &function);
2974 
2975  __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
2977  LAST_SPEC_OBJECT_TYPE - 1);
2978  __ b(eq, &function);
2979  // Assume that there is no larger type.
2981 
2982  // Check if the constructor in the map is a JS function.
2984  __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
2985  __ b(ne, &non_function_constructor);
2986 
2987  // r0 now contains the constructor function. Grab the
2988  // instance class name from there.
2991  __ b(&done);
2992 
2993  // Functions have class 'Function'.
2994  __ bind(&function);
2995  __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex);
2996  __ jmp(&done);
2997 
2998  // Objects with a non-function constructor have class 'Object'.
2999  __ bind(&non_function_constructor);
3000  __ LoadRoot(r0, Heap::kObject_symbolRootIndex);
3001  __ jmp(&done);
3002 
3003  // Non-JS objects have class null.
3004  __ bind(&null);
3005  __ LoadRoot(r0, Heap::kNullValueRootIndex);
3006 
3007  // All done.
3008  __ bind(&done);
3009 
3010  context()->Plug(r0);
3011 }
3012 
3013 
3014 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3015  // Conditionally generate a log call.
3016  // Args:
3017  // 0 (literal string): The type of logging (corresponds to the flags).
3018  // This is used to determine whether or not to generate the log call.
3019  // 1 (string): Format string. Access the string at argument index 2
3020  // with '%2s' (see Logger::LogRuntime for all the formats).
3021  // 2 (array): Arguments to the format string.
3022  ZoneList<Expression*>* args = expr->arguments();
3023  ASSERT_EQ(args->length(), 3);
3024  if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
3025  VisitForStackValue(args->at(1));
3026  VisitForStackValue(args->at(2));
3027  __ CallRuntime(Runtime::kLog, 2);
3028  }
3029 
3030  // Finally, we're expected to leave a value on the top of the stack.
3031  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3032  context()->Plug(r0);
3033 }
3034 
3035 
3036 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
3037  ASSERT(expr->arguments()->length() == 0);
3038  Label slow_allocate_heapnumber;
3039  Label heapnumber_allocated;
3040 
3041  __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3042  __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
3043  __ jmp(&heapnumber_allocated);
3044 
3045  __ bind(&slow_allocate_heapnumber);
3046  // Allocate a heap number.
3047  __ CallRuntime(Runtime::kNumberAlloc, 0);
3048  __ mov(r4, Operand(r0));
3049 
3050  __ bind(&heapnumber_allocated);
3051 
3052  // Convert 32 random bits in r0 to 0.(32 random bits) in a double
3053  // by computing:
3054  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
3056  __ PrepareCallCFunction(1, r0);
3057  __ ldr(r0, ContextOperand(context_register(), Context::GLOBAL_INDEX));
3059  __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3060 
3061  CpuFeatures::Scope scope(VFP3);
3062  // 0x41300000 is the top half of 1.0 x 2^20 as a double.
3063  // Create this constant using mov/orr to avoid PC relative load.
3064  __ mov(r1, Operand(0x41000000));
3065  __ orr(r1, r1, Operand(0x300000));
3066  // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
3067  __ vmov(d7, r0, r1);
3068  // Move 0x4130000000000000 to VFP.
3069  __ mov(r0, Operand(0, RelocInfo::NONE));
3070  __ vmov(d8, r0, r1);
3071  // Subtract and store the result in the heap number.
3072  __ vsub(d7, d7, d8);
3073  __ sub(r0, r4, Operand(kHeapObjectTag));
3074  __ vstr(d7, r0, HeapNumber::kValueOffset);
3075  __ mov(r0, r4);
3076  } else {
3077  __ PrepareCallCFunction(2, r0);
3078  __ ldr(r1, ContextOperand(context_register(), Context::GLOBAL_INDEX));
3079  __ mov(r0, Operand(r4));
3081  __ CallCFunction(
3082  ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
3083  }
3084 
3085  context()->Plug(r0);
3086 }
3087 
3088 
3089 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3090  // Load the arguments on the stack and call the stub.
3091  SubStringStub stub;
3092  ZoneList<Expression*>* args = expr->arguments();
3093  ASSERT(args->length() == 3);
3094  VisitForStackValue(args->at(0));
3095  VisitForStackValue(args->at(1));
3096  VisitForStackValue(args->at(2));
3097  __ CallStub(&stub);
3098  context()->Plug(r0);
3099 }
3100 
3101 
3102 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3103  // Load the arguments on the stack and call the stub.
3104  RegExpExecStub stub;
3105  ZoneList<Expression*>* args = expr->arguments();
3106  ASSERT(args->length() == 4);
3107  VisitForStackValue(args->at(0));
3108  VisitForStackValue(args->at(1));
3109  VisitForStackValue(args->at(2));
3110  VisitForStackValue(args->at(3));
3111  __ CallStub(&stub);
3112  context()->Plug(r0);
3113 }
3114 
3115 
3116 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3117  ZoneList<Expression*>* args = expr->arguments();
3118  ASSERT(args->length() == 1);
3119  VisitForAccumulatorValue(args->at(0)); // Load the object.
3120 
3121  Label done;
3122  // If the object is a smi return the object.
3123  __ JumpIfSmi(r0, &done);
3124  // If the object is not a value type, return the object.
3125  __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3126  __ b(ne, &done);
3128 
3129  __ bind(&done);
3130  context()->Plug(r0);
3131 }
3132 
3133 
3134 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3135  ZoneList<Expression*>* args = expr->arguments();
3136  ASSERT(args->length() == 2);
3137  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3138  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3139 
3140  VisitForAccumulatorValue(args->at(0)); // Load the object.
3141 
3142  Label runtime, done;
3143  Register object = r0;
3144  Register result = r0;
3145  Register scratch0 = r9;
3146  Register scratch1 = r1;
3147 
3148 #ifdef DEBUG
3149  __ AbortIfSmi(object);
3150  __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3151  __ Assert(eq, "Trying to get date field from non-date.");
3152 #endif
3153 
3154  if (index->value() == 0) {
3155  __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3156  } else {
3157  if (index->value() < JSDate::kFirstUncachedField) {
3158  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3159  __ mov(scratch1, Operand(stamp));
3160  __ ldr(scratch1, MemOperand(scratch1));
3161  __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3162  __ cmp(scratch1, scratch0);
3163  __ b(ne, &runtime);
3164  __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3165  kPointerSize * index->value()));
3166  __ jmp(&done);
3167  }
3168  __ bind(&runtime);
3169  __ PrepareCallCFunction(2, scratch1);
3170  __ mov(r1, Operand(index));
3171  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3172  __ bind(&done);
3173  }
3174  context()->Plug(r0);
3175 }
3176 
3177 
3178 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3179  // Load the arguments on the stack and call the runtime function.
3180  ZoneList<Expression*>* args = expr->arguments();
3181  ASSERT(args->length() == 2);
3182  VisitForStackValue(args->at(0));
3183  VisitForStackValue(args->at(1));
3185  MathPowStub stub(MathPowStub::ON_STACK);
3186  __ CallStub(&stub);
3187  } else {
3188  __ CallRuntime(Runtime::kMath_pow, 2);
3189  }
3190  context()->Plug(r0);
3191 }
3192 
3193 
3194 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3195  ZoneList<Expression*>* args = expr->arguments();
3196  ASSERT(args->length() == 2);
3197  VisitForStackValue(args->at(0)); // Load the object.
3198  VisitForAccumulatorValue(args->at(1)); // Load the value.
3199  __ pop(r1); // r0 = value. r1 = object.
3200 
3201  Label done;
3202  // If the object is a smi, return the value.
3203  __ JumpIfSmi(r1, &done);
3204 
3205  // If the object is not a value type, return the value.
3206  __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3207  __ b(ne, &done);
3208 
3209  // Store the value.
3211  // Update the write barrier. Save the value as it will be
3212  // overwritten by the write barrier code and is needed afterward.
3213  __ mov(r2, r0);
3214  __ RecordWriteField(
3216 
3217  __ bind(&done);
3218  context()->Plug(r0);
3219 }
3220 
3221 
3222 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3223  ZoneList<Expression*>* args = expr->arguments();
3224  ASSERT_EQ(args->length(), 1);
3225  // Load the argument on the stack and call the stub.
3226  VisitForStackValue(args->at(0));
3227 
3228  NumberToStringStub stub;
3229  __ CallStub(&stub);
3230  context()->Plug(r0);
3231 }
3232 
3233 
3234 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3235  ZoneList<Expression*>* args = expr->arguments();
3236  ASSERT(args->length() == 1);
3237  VisitForAccumulatorValue(args->at(0));
3238 
3239  Label done;
3240  StringCharFromCodeGenerator generator(r0, r1);
3241  generator.GenerateFast(masm_);
3242  __ jmp(&done);
3243 
3244  NopRuntimeCallHelper call_helper;
3245  generator.GenerateSlow(masm_, call_helper);
3246 
3247  __ bind(&done);
3248  context()->Plug(r1);
3249 }
3250 
3251 
3252 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3253  ZoneList<Expression*>* args = expr->arguments();
3254  ASSERT(args->length() == 2);
3255  VisitForStackValue(args->at(0));
3256  VisitForAccumulatorValue(args->at(1));
3257 
3258  Register object = r1;
3259  Register index = r0;
3260  Register result = r3;
3261 
3262  __ pop(object);
3263 
3264  Label need_conversion;
3265  Label index_out_of_range;
3266  Label done;
3267  StringCharCodeAtGenerator generator(object,
3268  index,
3269  result,
3270  &need_conversion,
3271  &need_conversion,
3272  &index_out_of_range,
3274  generator.GenerateFast(masm_);
3275  __ jmp(&done);
3276 
3277  __ bind(&index_out_of_range);
3278  // When the index is out of range, the spec requires us to return
3279  // NaN.
3280  __ LoadRoot(result, Heap::kNanValueRootIndex);
3281  __ jmp(&done);
3282 
3283  __ bind(&need_conversion);
3284  // Load the undefined value into the result register, which will
3285  // trigger conversion.
3286  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3287  __ jmp(&done);
3288 
3289  NopRuntimeCallHelper call_helper;
3290  generator.GenerateSlow(masm_, call_helper);
3291 
3292  __ bind(&done);
3293  context()->Plug(result);
3294 }
3295 
3296 
3297 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3298  ZoneList<Expression*>* args = expr->arguments();
3299  ASSERT(args->length() == 2);
3300  VisitForStackValue(args->at(0));
3301  VisitForAccumulatorValue(args->at(1));
3302 
3303  Register object = r1;
3304  Register index = r0;
3305  Register scratch = r3;
3306  Register result = r0;
3307 
3308  __ pop(object);
3309 
3310  Label need_conversion;
3311  Label index_out_of_range;
3312  Label done;
3313  StringCharAtGenerator generator(object,
3314  index,
3315  scratch,
3316  result,
3317  &need_conversion,
3318  &need_conversion,
3319  &index_out_of_range,
3321  generator.GenerateFast(masm_);
3322  __ jmp(&done);
3323 
3324  __ bind(&index_out_of_range);
3325  // When the index is out of range, the spec requires us to return
3326  // the empty string.
3327  __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3328  __ jmp(&done);
3329 
3330  __ bind(&need_conversion);
3331  // Move smi zero into the result register, which will trigger
3332  // conversion.
3333  __ mov(result, Operand(Smi::FromInt(0)));
3334  __ jmp(&done);
3335 
3336  NopRuntimeCallHelper call_helper;
3337  generator.GenerateSlow(masm_, call_helper);
3338 
3339  __ bind(&done);
3340  context()->Plug(result);
3341 }
3342 
3343 
3344 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3345  ZoneList<Expression*>* args = expr->arguments();
3346  ASSERT_EQ(2, args->length());
3347  VisitForStackValue(args->at(0));
3348  VisitForStackValue(args->at(1));
3349 
3350  StringAddStub stub(NO_STRING_ADD_FLAGS);
3351  __ CallStub(&stub);
3352  context()->Plug(r0);
3353 }
3354 
3355 
3356 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3357  ZoneList<Expression*>* args = expr->arguments();
3358  ASSERT_EQ(2, args->length());
3359  VisitForStackValue(args->at(0));
3360  VisitForStackValue(args->at(1));
3361 
3362  StringCompareStub stub;
3363  __ CallStub(&stub);
3364  context()->Plug(r0);
3365 }
3366 
3367 
3368 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3369  // Load the argument on the stack and call the stub.
3370  TranscendentalCacheStub stub(TranscendentalCache::SIN,
3372  ZoneList<Expression*>* args = expr->arguments();
3373  ASSERT(args->length() == 1);
3374  VisitForStackValue(args->at(0));
3375  __ CallStub(&stub);
3376  context()->Plug(r0);
3377 }
3378 
3379 
3380 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3381  // Load the argument on the stack and call the stub.
3382  TranscendentalCacheStub stub(TranscendentalCache::COS,
3384  ZoneList<Expression*>* args = expr->arguments();
3385  ASSERT(args->length() == 1);
3386  VisitForStackValue(args->at(0));
3387  __ CallStub(&stub);
3388  context()->Plug(r0);
3389 }
3390 
3391 
3392 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3393  // Load the argument on the stack and call the stub.
3394  TranscendentalCacheStub stub(TranscendentalCache::TAN,
3396  ZoneList<Expression*>* args = expr->arguments();
3397  ASSERT(args->length() == 1);
3398  VisitForStackValue(args->at(0));
3399  __ CallStub(&stub);
3400  context()->Plug(r0);
3401 }
3402 
3403 
3404 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3405  // Load the argument on the stack and call the stub.
3406  TranscendentalCacheStub stub(TranscendentalCache::LOG,
3408  ZoneList<Expression*>* args = expr->arguments();
3409  ASSERT(args->length() == 1);
3410  VisitForStackValue(args->at(0));
3411  __ CallStub(&stub);
3412  context()->Plug(r0);
3413 }
3414 
3415 
3416 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3417  // Load the argument on the stack and call the runtime function.
3418  ZoneList<Expression*>* args = expr->arguments();
3419  ASSERT(args->length() == 1);
3420  VisitForStackValue(args->at(0));
3421  __ CallRuntime(Runtime::kMath_sqrt, 1);
3422  context()->Plug(r0);
3423 }
3424 
3425 
3426 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3427  ZoneList<Expression*>* args = expr->arguments();
3428  ASSERT(args->length() >= 2);
3429 
3430  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3431  for (int i = 0; i < arg_count + 1; i++) {
3432  VisitForStackValue(args->at(i));
3433  }
3434  VisitForAccumulatorValue(args->last()); // Function.
3435 
3436  // Check for proxy.
3437  Label proxy, done;
3438  __ CompareObjectType(r0, r1, r1, JS_FUNCTION_PROXY_TYPE);
3439  __ b(eq, &proxy);
3440 
3441  // InvokeFunction requires the function in r1. Move it in there.
3442  __ mov(r1, result_register());
3443  ParameterCount count(arg_count);
3444  __ InvokeFunction(r1, count, CALL_FUNCTION,
3445  NullCallWrapper(), CALL_AS_METHOD);
3447  __ jmp(&done);
3448 
3449  __ bind(&proxy);
3450  __ push(r0);
3451  __ CallRuntime(Runtime::kCall, args->length());
3452  __ bind(&done);
3453 
3454  context()->Plug(r0);
3455 }
3456 
3457 
3458 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3459  RegExpConstructResultStub stub;
3460  ZoneList<Expression*>* args = expr->arguments();
3461  ASSERT(args->length() == 3);
3462  VisitForStackValue(args->at(0));
3463  VisitForStackValue(args->at(1));
3464  VisitForStackValue(args->at(2));
3465  __ CallStub(&stub);
3466  context()->Plug(r0);
3467 }
3468 
3469 
3470 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3471  ZoneList<Expression*>* args = expr->arguments();
3472  ASSERT_EQ(2, args->length());
3473  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3474  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3475 
3476  Handle<FixedArray> jsfunction_result_caches(
3477  isolate()->global_context()->jsfunction_result_caches());
3478  if (jsfunction_result_caches->length() <= cache_id) {
3479  __ Abort("Attempt to use undefined cache.");
3480  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3481  context()->Plug(r0);
3482  return;
3483  }
3484 
3485  VisitForAccumulatorValue(args->at(1));
3486 
3487  Register key = r0;
3488  Register cache = r1;
3489  __ ldr(cache, ContextOperand(cp, Context::GLOBAL_INDEX));
3492  __ ldr(cache,
3494 
3495 
3496  Label done, not_found;
3497  // tmp now holds finger offset as a smi.
3498  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3500  // r2 now holds finger offset as a smi.
3501  __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3502  // r3 now points to the start of fixed array elements.
3504  // Note side effect of PreIndex: r3 now points to the key of the pair.
3505  __ cmp(key, r2);
3506  __ b(ne, &not_found);
3507 
3508  __ ldr(r0, MemOperand(r3, kPointerSize));
3509  __ b(&done);
3510 
3511  __ bind(&not_found);
3512  // Call runtime to perform the lookup.
3513  __ Push(cache, key);
3514  __ CallRuntime(Runtime::kGetFromCache, 2);
3515 
3516  __ bind(&done);
3517  context()->Plug(r0);
3518 }
3519 
3520 
3521 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3522  ZoneList<Expression*>* args = expr->arguments();
3523  ASSERT_EQ(2, args->length());
3524 
3525  Register right = r0;
3526  Register left = r1;
3527  Register tmp = r2;
3528  Register tmp2 = r3;
3529 
3530  VisitForStackValue(args->at(0));
3531  VisitForAccumulatorValue(args->at(1));
3532  __ pop(left);
3533 
3534  Label done, fail, ok;
3535  __ cmp(left, Operand(right));
3536  __ b(eq, &ok);
3537  // Fail if either is a non-HeapObject.
3538  __ and_(tmp, left, Operand(right));
3539  __ JumpIfSmi(tmp, &fail);
3540  __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
3541  __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
3542  __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
3543  __ b(ne, &fail);
3544  __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3545  __ cmp(tmp, Operand(tmp2));
3546  __ b(ne, &fail);
3547  __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
3548  __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
3549  __ cmp(tmp, tmp2);
3550  __ b(eq, &ok);
3551  __ bind(&fail);
3552  __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3553  __ jmp(&done);
3554  __ bind(&ok);
3555  __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3556  __ bind(&done);
3557 
3558  context()->Plug(r0);
3559 }
3560 
3561 
3562 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3563  ZoneList<Expression*>* args = expr->arguments();
3564  VisitForAccumulatorValue(args->at(0));
3565 
3566  Label materialize_true, materialize_false;
3567  Label* if_true = NULL;
3568  Label* if_false = NULL;
3569  Label* fall_through = NULL;
3570  context()->PrepareTest(&materialize_true, &materialize_false,
3571  &if_true, &if_false, &fall_through);
3572 
3575  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3576  Split(eq, if_true, if_false, fall_through);
3577 
3578  context()->Plug(if_true, if_false);
3579 }
3580 
3581 
3582 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3583  ZoneList<Expression*>* args = expr->arguments();
3584  ASSERT(args->length() == 1);
3585  VisitForAccumulatorValue(args->at(0));
3586 
3587  if (FLAG_debug_code) {
3588  __ AbortIfNotString(r0);
3589  }
3590 
3592  __ IndexFromHash(r0, r0);
3593 
3594  context()->Plug(r0);
3595 }
3596 
3597 
3598 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3599  Label bailout, done, one_char_separator, long_separator,
3600  non_trivial_array, not_size_one_array, loop,
3601  empty_separator_loop, one_char_separator_loop,
3602  one_char_separator_loop_entry, long_separator_loop;
3603  ZoneList<Expression*>* args = expr->arguments();
3604  ASSERT(args->length() == 2);
3605  VisitForStackValue(args->at(1));
3606  VisitForAccumulatorValue(args->at(0));
3607 
3608  // All aliases of the same register have disjoint lifetimes.
3609  Register array = r0;
3610  Register elements = no_reg; // Will be r0.
3611  Register result = no_reg; // Will be r0.
3612  Register separator = r1;
3613  Register array_length = r2;
3614  Register result_pos = no_reg; // Will be r2
3615  Register string_length = r3;
3616  Register string = r4;
3617  Register element = r5;
3618  Register elements_end = r6;
3619  Register scratch1 = r7;
3620  Register scratch2 = r9;
3621 
3622  // Separator operand is on the stack.
3623  __ pop(separator);
3624 
3625  // Check that the array is a JSArray.
3626  __ JumpIfSmi(array, &bailout);
3627  __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
3628  __ b(ne, &bailout);
3629 
3630  // Check that the array has fast elements.
3631  __ CheckFastElements(scratch1, scratch2, &bailout);
3632 
3633  // If the array has length zero, return the empty string.
3634  __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3635  __ SmiUntag(array_length, SetCC);
3636  __ b(ne, &non_trivial_array);
3637  __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
3638  __ b(&done);
3639 
3640  __ bind(&non_trivial_array);
3641 
3642  // Get the FixedArray containing array's elements.
3643  elements = array;
3644  __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3645  array = no_reg; // End of array's live range.
3646 
3647  // Check that all array elements are sequential ASCII strings, and
3648  // accumulate the sum of their lengths, as a smi-encoded value.
3649  __ mov(string_length, Operand(0));
3650  __ add(element,
3651  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3652  __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3653  // Loop condition: while (element < elements_end).
3654  // Live values in registers:
3655  // elements: Fixed array of strings.
3656  // array_length: Length of the fixed array of strings (not smi)
3657  // separator: Separator string
3658  // string_length: Accumulated sum of string lengths (smi).
3659  // element: Current array element.
3660  // elements_end: Array end.
3661  if (FLAG_debug_code) {
3662  __ cmp(array_length, Operand(0));
3663  __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
3664  }
3665  __ bind(&loop);
3666  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3667  __ JumpIfSmi(string, &bailout);
3668  __ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3669  __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3670  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3671  __ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
3672  __ add(string_length, string_length, Operand(scratch1), SetCC);
3673  __ b(vs, &bailout);
3674  __ cmp(element, elements_end);
3675  __ b(lt, &loop);
3676 
3677  // If array_length is 1, return elements[0], a string.
3678  __ cmp(array_length, Operand(1));
3679  __ b(ne, &not_size_one_array);
3680  __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3681  __ b(&done);
3682 
3683  __ bind(&not_size_one_array);
3684 
3685  // Live values in registers:
3686  // separator: Separator string
3687  // array_length: Length of the array.
3688  // string_length: Sum of string lengths (smi).
3689  // elements: FixedArray of strings.
3690 
3691  // Check that the separator is a flat ASCII string.
3692  __ JumpIfSmi(separator, &bailout);
3693  __ ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3694  __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3695  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3696 
3697  // Add (separator length times array_length) - separator length to the
3698  // string_length to get the length of the result string. array_length is not
3699  // smi but the other values are, so the result is a smi
3700  __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3701  __ sub(string_length, string_length, Operand(scratch1));
3702  __ smull(scratch2, ip, array_length, scratch1);
3703  // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3704  // zero.
3705  __ cmp(ip, Operand(0));
3706  __ b(ne, &bailout);
3707  __ tst(scratch2, Operand(0x80000000));
3708  __ b(ne, &bailout);
3709  __ add(string_length, string_length, Operand(scratch2), SetCC);
3710  __ b(vs, &bailout);
3711  __ SmiUntag(string_length);
3712 
3713  // Get first element in the array to free up the elements register to be used
3714  // for the result.
3715  __ add(element,
3716  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3717  result = elements; // End of live range for elements.
3718  elements = no_reg;
3719  // Live values in registers:
3720  // element: First array element
3721  // separator: Separator string
3722  // string_length: Length of result string (not smi)
3723  // array_length: Length of the array.
3724  __ AllocateAsciiString(result,
3725  string_length,
3726  scratch1,
3727  scratch2,
3728  elements_end,
3729  &bailout);
3730  // Prepare for looping. Set up elements_end to end of the array. Set
3731  // result_pos to the position of the result where to write the first
3732  // character.
3733  __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3734  result_pos = array_length; // End of live range for array_length.
3735  array_length = no_reg;
3736  __ add(result_pos,
3737  result,
3739 
3740  // Check the length of the separator.
3741  __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3742  __ cmp(scratch1, Operand(Smi::FromInt(1)));
3743  __ b(eq, &one_char_separator);
3744  __ b(gt, &long_separator);
3745 
3746  // Empty separator case
3747  __ bind(&empty_separator_loop);
3748  // Live values in registers:
3749  // result_pos: the position to which we are currently copying characters.
3750  // element: Current array element.
3751  // elements_end: Array end.
3752 
3753  // Copy next array element to the result.
3754  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3755  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3756  __ SmiUntag(string_length);
3757  __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3758  __ CopyBytes(string, result_pos, string_length, scratch1);
3759  __ cmp(element, elements_end);
3760  __ b(lt, &empty_separator_loop); // End while (element < elements_end).
3761  ASSERT(result.is(r0));
3762  __ b(&done);
3763 
3764  // One-character separator case
3765  __ bind(&one_char_separator);
3766  // Replace separator with its ASCII character value.
3767  __ ldrb(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
3768  // Jump into the loop after the code that copies the separator, so the first
3769  // element is not preceded by a separator
3770  __ jmp(&one_char_separator_loop_entry);
3771 
3772  __ bind(&one_char_separator_loop);
3773  // Live values in registers:
3774  // result_pos: the position to which we are currently copying characters.
3775  // element: Current array element.
3776  // elements_end: Array end.
3777  // separator: Single separator ASCII char (in lower byte).
3778 
3779  // Copy the separator character to the result.
3780  __ strb(separator, MemOperand(result_pos, 1, PostIndex));
3781 
3782  // Copy next array element to the result.
3783  __ bind(&one_char_separator_loop_entry);
3784  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3785  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3786  __ SmiUntag(string_length);
3787  __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3788  __ CopyBytes(string, result_pos, string_length, scratch1);
3789  __ cmp(element, elements_end);
3790  __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
3791  ASSERT(result.is(r0));
3792  __ b(&done);
3793 
3794  // Long separator case (separator is more than one character). Entry is at the
3795  // label long_separator below.
3796  __ bind(&long_separator_loop);
3797  // Live values in registers:
3798  // result_pos: the position to which we are currently copying characters.
3799  // element: Current array element.
3800  // elements_end: Array end.
3801  // separator: Separator string.
3802 
3803  // Copy the separator to the result.
3804  __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
3805  __ SmiUntag(string_length);
3806  __ add(string,
3807  separator,
3809  __ CopyBytes(string, result_pos, string_length, scratch1);
3810 
3811  __ bind(&long_separator);
3812  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3813  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3814  __ SmiUntag(string_length);
3815  __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3816  __ CopyBytes(string, result_pos, string_length, scratch1);
3817  __ cmp(element, elements_end);
3818  __ b(lt, &long_separator_loop); // End while (element < elements_end).
3819  ASSERT(result.is(r0));
3820  __ b(&done);
3821 
3822  __ bind(&bailout);
3823  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3824  __ bind(&done);
3825  context()->Plug(r0);
3826 }
3827 
3828 
3829 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3830  Handle<String> name = expr->name();
3831  if (name->length() > 0 && name->Get(0) == '_') {
3832  Comment cmnt(masm_, "[ InlineRuntimeCall");
3833  EmitInlineRuntimeCall(expr);
3834  return;
3835  }
3836 
3837  Comment cmnt(masm_, "[ CallRuntime");
3838  ZoneList<Expression*>* args = expr->arguments();
3839 
3840  if (expr->is_jsruntime()) {
3841  // Prepare for calling JS runtime function.
3842  __ ldr(r0, GlobalObjectOperand());
3844  __ push(r0);
3845  }
3846 
3847  // Push the arguments ("left-to-right").
3848  int arg_count = args->length();
3849  for (int i = 0; i < arg_count; i++) {
3850  VisitForStackValue(args->at(i));
3851  }
3852 
3853  if (expr->is_jsruntime()) {
3854  // Call the JS runtime function.
3855  __ mov(r2, Operand(expr->name()));
3856  RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3857  Handle<Code> ic =
3858  isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3859  CallIC(ic, mode, expr->id());
3860  // Restore context register.
3862  } else {
3863  // Call the C runtime function.
3864  __ CallRuntime(expr->function(), arg_count);
3865  }
3866  context()->Plug(r0);
3867 }
3868 
3869 
3870 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3871  switch (expr->op()) {
3872  case Token::DELETE: {
3873  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3874  Property* property = expr->expression()->AsProperty();
3875  VariableProxy* proxy = expr->expression()->AsVariableProxy();
3876 
3877  if (property != NULL) {
3878  VisitForStackValue(property->obj());
3879  VisitForStackValue(property->key());
3880  StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
3882  __ mov(r1, Operand(Smi::FromInt(strict_mode_flag)));
3883  __ push(r1);
3884  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3885  context()->Plug(r0);
3886  } else if (proxy != NULL) {
3887  Variable* var = proxy->var();
3888  // Delete of an unqualified identifier is disallowed in strict mode
3889  // but "delete this" is allowed.
3890  ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
3891  if (var->IsUnallocated()) {
3892  __ ldr(r2, GlobalObjectOperand());
3893  __ mov(r1, Operand(var->name()));
3894  __ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
3895  __ Push(r2, r1, r0);
3896  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3897  context()->Plug(r0);
3898  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3899  // Result of deleting non-global, non-dynamic variables is false.
3900  // The subexpression does not have side effects.
3901  context()->Plug(var->is_this());
3902  } else {
3903  // Non-global variable. Call the runtime to try to delete from the
3904  // context where the variable was introduced.
3905  __ push(context_register());
3906  __ mov(r2, Operand(var->name()));
3907  __ push(r2);
3908  __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3909  context()->Plug(r0);
3910  }
3911  } else {
3912  // Result of deleting non-property, non-variable reference is true.
3913  // The subexpression may have side effects.
3914  VisitForEffect(expr->expression());
3915  context()->Plug(true);
3916  }
3917  break;
3918  }
3919 
3920  case Token::VOID: {
3921  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3922  VisitForEffect(expr->expression());
3923  context()->Plug(Heap::kUndefinedValueRootIndex);
3924  break;
3925  }
3926 
3927  case Token::NOT: {
3928  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3929  if (context()->IsEffect()) {
3930  // Unary NOT has no side effects so it's only necessary to visit the
3931  // subexpression. Match the optimizing compiler by not branching.
3932  VisitForEffect(expr->expression());
3933  } else if (context()->IsTest()) {
3934  const TestContext* test = TestContext::cast(context());
3935  // The labels are swapped for the recursive call.
3936  VisitForControl(expr->expression(),
3937  test->false_label(),
3938  test->true_label(),
3939  test->fall_through());
3940  context()->Plug(test->true_label(), test->false_label());
3941  } else {
3942  // We handle value contexts explicitly rather than simply visiting
3943  // for control and plugging the control flow into the context,
3944  // because we need to prepare a pair of extra administrative AST ids
3945  // for the optimizing compiler.
3946  ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3947  Label materialize_true, materialize_false, done;
3948  VisitForControl(expr->expression(),
3949  &materialize_false,
3950  &materialize_true,
3951  &materialize_true);
3952  __ bind(&materialize_true);
3953  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3954  __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3955  if (context()->IsStackValue()) __ push(r0);
3956  __ jmp(&done);
3957  __ bind(&materialize_false);
3958  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3959  __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3960  if (context()->IsStackValue()) __ push(r0);
3961  __ bind(&done);
3962  }
3963  break;
3964  }
3965 
3966  case Token::TYPEOF: {
3967  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3968  { StackValueContext context(this);
3969  VisitForTypeofValue(expr->expression());
3970  }
3971  __ CallRuntime(Runtime::kTypeof, 1);
3972  context()->Plug(r0);
3973  break;
3974  }
3975 
3976  case Token::ADD: {
3977  Comment cmt(masm_, "[ UnaryOperation (ADD)");
3978  VisitForAccumulatorValue(expr->expression());
3979  Label no_conversion;
3980  __ JumpIfSmi(result_register(), &no_conversion);
3981  ToNumberStub convert_stub;
3982  __ CallStub(&convert_stub);
3983  __ bind(&no_conversion);
3984  context()->Plug(result_register());
3985  break;
3986  }
3987 
3988  case Token::SUB:
3989  EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
3990  break;
3991 
3992  case Token::BIT_NOT:
3993  EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
3994  break;
3995 
3996  default:
3997  UNREACHABLE();
3998  }
3999 }
4000 
4001 
4002 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
4003  const char* comment) {
4004  // TODO(svenpanne): Allowing format strings in Comment would be nice here...
4005  Comment cmt(masm_, comment);
4006  bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
4007  UnaryOverwriteMode overwrite =
4008  can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
4009  UnaryOpStub stub(expr->op(), overwrite);
4010  // UnaryOpStub expects the argument to be in the
4011  // accumulator register r0.
4012  VisitForAccumulatorValue(expr->expression());
4013  SetSourcePosition(expr->position());
4014  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
4015  context()->Plug(r0);
4016 }
4017 
4018 
4019 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4020  Comment cmnt(masm_, "[ CountOperation");
4021  SetSourcePosition(expr->position());
4022 
4023  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
4024  // as the left-hand side.
4025  if (!expr->expression()->IsValidLeftHandSide()) {
4026  VisitForEffect(expr->expression());
4027  return;
4028  }
4029 
4030  // Expression can only be a property, a global or a (parameter or local)
4031  // slot.
4032  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4033  LhsKind assign_type = VARIABLE;
4034  Property* prop = expr->expression()->AsProperty();
4035  // In case of a property we use the uninitialized expression context
4036  // of the key to detect a named property.
4037  if (prop != NULL) {
4038  assign_type =
4039  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4040  }
4041 
4042  // Evaluate expression and get value.
4043  if (assign_type == VARIABLE) {
4044  ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4045  AccumulatorValueContext context(this);
4046  EmitVariableLoad(expr->expression()->AsVariableProxy());
4047  } else {
4048  // Reserve space for result of postfix operation.
4049  if (expr->is_postfix() && !context()->IsEffect()) {
4050  __ mov(ip, Operand(Smi::FromInt(0)));
4051  __ push(ip);
4052  }
4053  if (assign_type == NAMED_PROPERTY) {
4054  // Put the object both on the stack and in the accumulator.
4055  VisitForAccumulatorValue(prop->obj());
4056  __ push(r0);
4057  EmitNamedPropertyLoad(prop);
4058  } else {
4059  VisitForStackValue(prop->obj());
4060  VisitForAccumulatorValue(prop->key());
4061  __ ldr(r1, MemOperand(sp, 0));
4062  __ push(r0);
4063  EmitKeyedPropertyLoad(prop);
4064  }
4065  }
4066 
4067  // We need a second deoptimization point after loading the value
4068  // in case evaluating the property load my have a side effect.
4069  if (assign_type == VARIABLE) {
4070  PrepareForBailout(expr->expression(), TOS_REG);
4071  } else {
4072  PrepareForBailoutForId(expr->CountId(), TOS_REG);
4073  }
4074 
4075  // Call ToNumber only if operand is not a smi.
4076  Label no_conversion;
4077  __ JumpIfSmi(r0, &no_conversion);
4078  ToNumberStub convert_stub;
4079  __ CallStub(&convert_stub);
4080  __ bind(&no_conversion);
4081 
4082  // Save result for postfix expressions.
4083  if (expr->is_postfix()) {
4084  if (!context()->IsEffect()) {
4085  // Save the result on the stack. If we have a named or keyed property
4086  // we store the result under the receiver that is currently on top
4087  // of the stack.
4088  switch (assign_type) {
4089  case VARIABLE:
4090  __ push(r0);
4091  break;
4092  case NAMED_PROPERTY:
4093  __ str(r0, MemOperand(sp, kPointerSize));
4094  break;
4095  case KEYED_PROPERTY:
4096  __ str(r0, MemOperand(sp, 2 * kPointerSize));
4097  break;
4098  }
4099  }
4100  }
4101 
4102 
4103  // Inline smi case if we are in a loop.
4104  Label stub_call, done;
4105  JumpPatchSite patch_site(masm_);
4106 
4107  int count_value = expr->op() == Token::INC ? 1 : -1;
4108  if (ShouldInlineSmiCase(expr->op())) {
4109  __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4110  __ b(vs, &stub_call);
4111  // We could eliminate this smi check if we split the code at
4112  // the first smi check before calling ToNumber.
4113  patch_site.EmitJumpIfSmi(r0, &done);
4114 
4115  __ bind(&stub_call);
4116  // Call stub. Undo operation first.
4117  __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4118  }
4119  __ mov(r1, Operand(Smi::FromInt(count_value)));
4120 
4121  // Record position before stub call.
4122  SetSourcePosition(expr->position());
4123 
4124  BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
4125  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
4126  patch_site.EmitPatchInfo();
4127  __ bind(&done);
4128 
4129  // Store the value returned in r0.
4130  switch (assign_type) {
4131  case VARIABLE:
4132  if (expr->is_postfix()) {
4133  { EffectContext context(this);
4134  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4135  Token::ASSIGN);
4136  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4137  context.Plug(r0);
4138  }
4139  // For all contexts except EffectConstant We have the result on
4140  // top of the stack.
4141  if (!context()->IsEffect()) {
4142  context()->PlugTOS();
4143  }
4144  } else {
4145  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4146  Token::ASSIGN);
4147  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4148  context()->Plug(r0);
4149  }
4150  break;
4151  case NAMED_PROPERTY: {
4152  __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
4153  __ pop(r1);
4154  Handle<Code> ic = is_classic_mode()
4155  ? isolate()->builtins()->StoreIC_Initialize()
4156  : isolate()->builtins()->StoreIC_Initialize_Strict();
4157  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4158  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4159  if (expr->is_postfix()) {
4160  if (!context()->IsEffect()) {
4161  context()->PlugTOS();
4162  }
4163  } else {
4164  context()->Plug(r0);
4165  }
4166  break;
4167  }
4168  case KEYED_PROPERTY: {
4169  __ pop(r1); // Key.
4170  __ pop(r2); // Receiver.
4171  Handle<Code> ic = is_classic_mode()
4172  ? isolate()->builtins()->KeyedStoreIC_Initialize()
4173  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4174  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4175  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4176  if (expr->is_postfix()) {
4177  if (!context()->IsEffect()) {
4178  context()->PlugTOS();
4179  }
4180  } else {
4181  context()->Plug(r0);
4182  }
4183  break;
4184  }
4185  }
4186 }
4187 
4188 
4189 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4190  ASSERT(!context()->IsEffect());
4191  ASSERT(!context()->IsTest());
4192  VariableProxy* proxy = expr->AsVariableProxy();
4193  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4194  Comment cmnt(masm_, "Global variable");
4195  __ ldr(r0, GlobalObjectOperand());
4196  __ mov(r2, Operand(proxy->name()));
4197  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4198  // Use a regular load, not a contextual load, to avoid a reference
4199  // error.
4200  CallIC(ic);
4201  PrepareForBailout(expr, TOS_REG);
4202  context()->Plug(r0);
4203  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4204  Label done, slow;
4205 
4206  // Generate code for loading from variables potentially shadowed
4207  // by eval-introduced variables.
4208  EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4209 
4210  __ bind(&slow);
4211  __ mov(r0, Operand(proxy->name()));
4212  __ Push(cp, r0);
4213  __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4214  PrepareForBailout(expr, TOS_REG);
4215  __ bind(&done);
4216 
4217  context()->Plug(r0);
4218  } else {
4219  // This expression cannot throw a reference error at the top level.
4220  VisitInDuplicateContext(expr);
4221  }
4222 }
4223 
4224 
4225 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4226  Expression* sub_expr,
4227  Handle<String> check) {
4228  Label materialize_true, materialize_false;
4229  Label* if_true = NULL;
4230  Label* if_false = NULL;
4231  Label* fall_through = NULL;
4232  context()->PrepareTest(&materialize_true, &materialize_false,
4233  &if_true, &if_false, &fall_through);
4234 
4235  { AccumulatorValueContext context(this);
4236  VisitForTypeofValue(sub_expr);
4237  }
4238  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4239 
4240  if (check->Equals(isolate()->heap()->number_symbol())) {
4241  __ JumpIfSmi(r0, if_true);
4243  __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4244  __ cmp(r0, ip);
4245  Split(eq, if_true, if_false, fall_through);
4246  } else if (check->Equals(isolate()->heap()->string_symbol())) {
4247  __ JumpIfSmi(r0, if_false);
4248  // Check for undetectable objects => false.
4249  __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4250  __ b(ge, if_false);
4252  __ tst(r1, Operand(1 << Map::kIsUndetectable));
4253  Split(eq, if_true, if_false, fall_through);
4254  } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4255  __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4256  __ b(eq, if_true);
4257  __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4258  Split(eq, if_true, if_false, fall_through);
4259  } else if (FLAG_harmony_typeof &&
4260  check->Equals(isolate()->heap()->null_symbol())) {
4261  __ CompareRoot(r0, Heap::kNullValueRootIndex);
4262  Split(eq, if_true, if_false, fall_through);
4263  } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4264  __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4265  __ b(eq, if_true);
4266  __ JumpIfSmi(r0, if_false);
4267  // Check for undetectable objects => true.
4270  __ tst(r1, Operand(1 << Map::kIsUndetectable));
4271  Split(ne, if_true, if_false, fall_through);
4272 
4273  } else if (check->Equals(isolate()->heap()->function_symbol())) {
4274  __ JumpIfSmi(r0, if_false);
4276  __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
4277  __ b(eq, if_true);
4278  __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
4279  Split(eq, if_true, if_false, fall_through);
4280  } else if (check->Equals(isolate()->heap()->object_symbol())) {
4281  __ JumpIfSmi(r0, if_false);
4282  if (!FLAG_harmony_typeof) {
4283  __ CompareRoot(r0, Heap::kNullValueRootIndex);
4284  __ b(eq, if_true);
4285  }
4286  // Check for JS objects => true.
4287  __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4288  __ b(lt, if_false);
4289  __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4290  __ b(gt, if_false);
4291  // Check for undetectable objects => false.
4293  __ tst(r1, Operand(1 << Map::kIsUndetectable));
4294  Split(eq, if_true, if_false, fall_through);
4295  } else {
4296  if (if_false != fall_through) __ jmp(if_false);
4297  }
4298  context()->Plug(if_true, if_false);
4299 }
4300 
4301 
4302 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4303  Comment cmnt(masm_, "[ CompareOperation");
4304  SetSourcePosition(expr->position());
4305 
4306  // First we try a fast inlined version of the compare when one of
4307  // the operands is a literal.
4308  if (TryLiteralCompare(expr)) return;
4309 
4310  // Always perform the comparison for its control flow. Pack the result
4311  // into the expression's context after the comparison is performed.
4312  Label materialize_true, materialize_false;
4313  Label* if_true = NULL;
4314  Label* if_false = NULL;
4315  Label* fall_through = NULL;
4316  context()->PrepareTest(&materialize_true, &materialize_false,
4317  &if_true, &if_false, &fall_through);
4318 
4319  Token::Value op = expr->op();
4320  VisitForStackValue(expr->left());
4321  switch (op) {
4322  case Token::IN:
4323  VisitForStackValue(expr->right());
4324  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4325  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4326  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4327  __ cmp(r0, ip);
4328  Split(eq, if_true, if_false, fall_through);
4329  break;
4330 
4331  case Token::INSTANCEOF: {
4332  VisitForStackValue(expr->right());
4333  InstanceofStub stub(InstanceofStub::kNoFlags);
4334  __ CallStub(&stub);
4335  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4336  // The stub returns 0 for true.
4337  __ tst(r0, r0);
4338  Split(eq, if_true, if_false, fall_through);
4339  break;
4340  }
4341 
4342  default: {
4343  VisitForAccumulatorValue(expr->right());
4344  Condition cond = eq;
4345  switch (op) {
4346  case Token::EQ_STRICT:
4347  case Token::EQ:
4348  cond = eq;
4349  break;
4350  case Token::LT:
4351  cond = lt;
4352  break;
4353  case Token::GT:
4354  cond = gt;
4355  break;
4356  case Token::LTE:
4357  cond = le;
4358  break;
4359  case Token::GTE:
4360  cond = ge;
4361  break;
4362  case Token::IN:
4363  case Token::INSTANCEOF:
4364  default:
4365  UNREACHABLE();
4366  }
4367  __ pop(r1);
4368 
4369  bool inline_smi_code = ShouldInlineSmiCase(op);
4370  JumpPatchSite patch_site(masm_);
4371  if (inline_smi_code) {
4372  Label slow_case;
4373  __ orr(r2, r0, Operand(r1));
4374  patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4375  __ cmp(r1, r0);
4376  Split(cond, if_true, if_false, NULL);
4377  __ bind(&slow_case);
4378  }
4379 
4380  // Record position and call the compare IC.
4381  SetSourcePosition(expr->position());
4382  Handle<Code> ic = CompareIC::GetUninitialized(op);
4383  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4384  patch_site.EmitPatchInfo();
4385  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4386  __ cmp(r0, Operand(0));
4387  Split(cond, if_true, if_false, fall_through);
4388  }
4389  }
4390 
4391  // Convert the result of the comparison into one expected for this
4392  // expression's context.
4393  context()->Plug(if_true, if_false);
4394 }
4395 
4396 
4397 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4398  Expression* sub_expr,
4399  NilValue nil) {
4400  Label materialize_true, materialize_false;
4401  Label* if_true = NULL;
4402  Label* if_false = NULL;
4403  Label* fall_through = NULL;
4404  context()->PrepareTest(&materialize_true, &materialize_false,
4405  &if_true, &if_false, &fall_through);
4406 
4407  VisitForAccumulatorValue(sub_expr);
4408  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4409  Heap::RootListIndex nil_value = nil == kNullValue ?
4410  Heap::kNullValueRootIndex :
4411  Heap::kUndefinedValueRootIndex;
4412  __ LoadRoot(r1, nil_value);
4413  __ cmp(r0, r1);
4414  if (expr->op() == Token::EQ_STRICT) {
4415  Split(eq, if_true, if_false, fall_through);
4416  } else {
4417  Heap::RootListIndex other_nil_value = nil == kNullValue ?
4418  Heap::kUndefinedValueRootIndex :
4419  Heap::kNullValueRootIndex;
4420  __ b(eq, if_true);
4421  __ LoadRoot(r1, other_nil_value);
4422  __ cmp(r0, r1);
4423  __ b(eq, if_true);
4424  __ JumpIfSmi(r0, if_false);
4425  // It can be an undetectable object.
4428  __ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
4429  __ cmp(r1, Operand(1 << Map::kIsUndetectable));
4430  Split(eq, if_true, if_false, fall_through);
4431  }
4432  context()->Plug(if_true, if_false);
4433 }
4434 
4435 
4436 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4438  context()->Plug(r0);
4439 }
4440 
4441 
4442 Register FullCodeGenerator::result_register() {
4443  return r0;
4444 }
4445 
4446 
4447 Register FullCodeGenerator::context_register() {
4448  return cp;
4449 }
4450 
4451 
4452 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4453  ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4454  __ str(value, MemOperand(fp, frame_offset));
4455 }
4456 
4457 
4458 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4459  __ ldr(dst, ContextOperand(cp, context_index));
4460 }
4461 
4462 
4463 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4464  Scope* declaration_scope = scope()->DeclarationScope();
4465  if (declaration_scope->is_global_scope() ||
4466  declaration_scope->is_module_scope()) {
4467  // Contexts nested in the global context have a canonical empty function
4468  // as their closure, not the anonymous closure containing the global
4469  // code. Pass a smi sentinel and let the runtime look up the empty
4470  // function.
4471  __ mov(ip, Operand(Smi::FromInt(0)));
4472  } else if (declaration_scope->is_eval_scope()) {
4473  // Contexts created by a call to eval have the same closure as the
4474  // context calling eval, not the anonymous closure containing the eval
4475  // code. Fetch it from the context.
4477  } else {
4478  ASSERT(declaration_scope->is_function_scope());
4480  }
4481  __ push(ip);
4482 }
4483 
4484 
4485 // ----------------------------------------------------------------------------
4486 // Non-local control flow support.
4487 
4488 void FullCodeGenerator::EnterFinallyBlock() {
4489  ASSERT(!result_register().is(r1));
4490  // Store result register while executing finally block.
4491  __ push(result_register());
4492  // Cook return address in link register to stack (smi encoded Code* delta)
4493  __ sub(r1, lr, Operand(masm_->CodeObject()));
4495  STATIC_ASSERT(kSmiTag == 0);
4496  __ add(r1, r1, Operand(r1)); // Convert to smi.
4497 
4498  // Store result register while executing finally block.
4499  __ push(r1);
4500 
4501  // Store pending message while executing finally block.
4502  ExternalReference pending_message_obj =
4503  ExternalReference::address_of_pending_message_obj(isolate());
4504  __ mov(ip, Operand(pending_message_obj));
4505  __ ldr(r1, MemOperand(ip));
4506  __ push(r1);
4507 
4508  ExternalReference has_pending_message =
4509  ExternalReference::address_of_has_pending_message(isolate());
4510  __ mov(ip, Operand(has_pending_message));
4511  __ ldr(r1, MemOperand(ip));
4512  __ push(r1);
4513 
4514  ExternalReference pending_message_script =
4515  ExternalReference::address_of_pending_message_script(isolate());
4516  __ mov(ip, Operand(pending_message_script));
4517  __ ldr(r1, MemOperand(ip));
4518  __ push(r1);
4519 }
4520 
4521 
4522 void FullCodeGenerator::ExitFinallyBlock() {
4523  ASSERT(!result_register().is(r1));
4524  // Restore pending message from stack.
4525  __ pop(r1);
4526  ExternalReference pending_message_script =
4527  ExternalReference::address_of_pending_message_script(isolate());
4528  __ mov(ip, Operand(pending_message_script));
4529  __ str(r1, MemOperand(ip));
4530 
4531  __ pop(r1);
4532  ExternalReference has_pending_message =
4533  ExternalReference::address_of_has_pending_message(isolate());
4534  __ mov(ip, Operand(has_pending_message));
4535  __ str(r1, MemOperand(ip));
4536 
4537  __ pop(r1);
4538  ExternalReference pending_message_obj =
4539  ExternalReference::address_of_pending_message_obj(isolate());
4540  __ mov(ip, Operand(pending_message_obj));
4541  __ str(r1, MemOperand(ip));
4542 
4543  // Restore result register from stack.
4544  __ pop(r1);
4545 
4546  // Uncook return address and return.
4547  __ pop(result_register());
4549  __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value.
4550  __ add(pc, r1, Operand(masm_->CodeObject()));
4551 }
4552 
4553 
4554 #undef __
4555 
4556 #define __ ACCESS_MASM(masm())
4557 
4558 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4559  int* stack_depth,
4560  int* context_length) {
4561  // The macros used here must preserve the result register.
4562 
4563  // Because the handler block contains the context of the finally
4564  // code, we can restore it directly from there for the finally code
4565  // rather than iteratively unwinding contexts via their previous
4566  // links.
4567  __ Drop(*stack_depth); // Down to the handler block.
4568  if (*context_length > 0) {
4569  // Restore the context to its dedicated register and the stack.
4572  }
4573  __ PopTryHandler();
4574  __ bl(finally_entry_);
4575 
4576  *stack_depth = 0;
4577  *context_length = 0;
4578  return previous_;
4579 }
4580 
4581 
4582 #undef __
4583 
4584 } } // namespace v8::internal
4585 
4586 #endif // V8_TARGET_ARCH_ARM
const Register cp
static const int kBitFieldOffset
Definition: objects.h:4994
Scope * DeclarationScope()
Definition: scopes.cc:699
int InstructionsGeneratedSince(Label *label)
const intptr_t kSmiTagMask
Definition: v8.h:3855
VariableDeclaration * function() const
Definition: scopes.h:323
static int SlotOffset(int index)
Definition: contexts.h:408
static const int kBuiltinsOffset
Definition: objects.h:6083
const Register r3
static String * cast(Object *obj)
void mov(Register rd, Register rt)
static const int kDeclarationsId
Definition: ast.h:202
static Smi * FromInt(int value)
Definition: objects-inl.h:973
bool IsFastObjectElementsKind(ElementsKind kind)
const DwVfpRegister d8
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
static const int kDataOffset
Definition: objects.h:6432
static const int kGlobalReceiverOffset
Definition: objects.h:6085
int SizeOfCodeGeneratedSince(Label *label)
const Register r6
T Max(T a, T b)
Definition: utils.h:222
Scope * outer_scope() const
Definition: scopes.h:347
Flag flags[]
Definition: flags.cc:1467
int int32_t
Definition: unicode.cc:47
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:5052
static bool IsSupported(CpuFeature f)
static bool enabled()
Definition: serialize.h:480
static const int kSize
Definition: objects.h:6433
#define ASSERT(condition)
Definition: checks.h:270
friend class BlockConstPoolScope
const int kPointerSizeLog2
Definition: globals.h:246
static const int kInObjectFieldCount
Definition: objects.h:6487
const char * comment() const
Definition: flags.cc:1362
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3902
#define POINTER_SIZE_ALIGN(value)
Definition: v8globals.h:401
static const int kMaximumSlots
Definition: code-stubs.h:343
MemOperand GlobalObjectOperand()
const Register r2
static const int kInstanceClassNameOffset
Definition: objects.h:5609
static const int kGlobalContextOffset
Definition: objects.h:6084
Variable * parameter(int index) const
Definition: scopes.h:330
PropertyAttributes
MemOperand ContextOperand(Register context, int index)
static const int kFunctionEntryId
Definition: ast.h:198
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
Definition: scopes.cc:689
static const int kHashFieldOffset
Definition: objects.h:7099
#define IN
const Register sp
static const int kLiteralsOffset
Definition: objects.h:5987
#define UNREACHABLE()
Definition: checks.h:50
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
Definition: objects.h:7098
static const int kValueOffset
Definition: objects.h:1307
Variable * arguments() const
Definition: scopes.h:338
static const int kForInSlowCaseMarker
Definition: objects.h:4149
NilValue
Definition: v8.h:141
const Register ip
const Register r9
const int kPointerSize
Definition: globals.h:234
static const int kForInFastCaseMarker
Definition: objects.h:4148
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:5011
const DwVfpRegister d7
const int kHeapObjectTag
Definition: v8.h:3848
void Jump(Register target, Condition cond=al)
#define __
static const int kCacheStampOffset
Definition: objects.h:6280
static TestContext * cast(AstContext *context)
Definition: hydrogen.h:690
const Register pc
static const int kPropertiesOffset
Definition: objects.h:2113
void ldm(BlockAddrMode am, Register base, RegList dst, Condition cond=al)
static const int kHeaderSize
Definition: objects.h:7282
void CheckConstPool(bool force_emit, bool require_jump)
const Register r0
static const int kElementsOffset
Definition: objects.h:2114
static const int kContainsCachedArrayIndexMask
Definition: objects.h:7154
#define BASE_EMBEDDED
Definition: allocation.h:68
void add(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
Vector< const char > CStrVector(const char *data)
Definition: utils.h:525
static int OffsetOfElementAt(int index)
Definition: objects.h:2291
static const int kLengthOffset
Definition: objects.h:8111
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
Definition: objects.h:2233
static const int kEnumerationIndexOffset
Definition: objects.h:2622
const Register lr
static const int kMapOffset
Definition: objects.h:1219
static const int kValueOffset
Definition: objects.h:6272
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:2627
const Register r1
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:536
static const int kLengthOffset
Definition: objects.h:2232
MemOperand FieldMemOperand(Register object, int offset)
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:168
const int kSmiShiftSize
Definition: v8.h:3899
const int kSmiTagSize
Definition: v8.h:3854
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
Definition: objects-inl.h:3682
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:708
static const int kJSReturnSequenceInstructions
static const int kConstructorOffset
Definition: objects.h:4954
const int kSmiTag
Definition: v8.h:3853
#define ASSERT_NE(v1, v2)
Definition: checks.h:272
static const int kIsUndetectable
Definition: objects.h:5005
static bool ShouldGenerateLog(Expression *type)
Definition: codegen.cc:153
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
#define FACTORY
Definition: isolate.h:1409
static const int kPrototypeOffset
Definition: objects.h:4953
const Register no_reg
static const int kValueOffset
Definition: objects.h:6188
const Register fp
T Min(T a, T b)
Definition: utils.h:229
static const int kSharedFunctionInfoOffset
Definition: objects.h:5984
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:1669
static const int kMaxValue
Definition: objects.h:1006
static const int kBitField2Offset
Definition: objects.h:4995
#define VOID
static Handle< Code > GetUninitialized(Token::Value op)
Definition: ic.cc:2544
void check(i::Vector< const char > string)
FlagType type() const
Definition: flags.cc:1358
const Register r5
static const int kFirstIndex
Definition: objects.h:2611
static const int kInstanceTypeOffset
Definition: objects.h:4992
TypeofState
Definition: codegen.h:70
const Register r4
const Register r7