v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
full-codegen-mips.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_MIPS)
31 
32 // Note on Mips implementation:
33 //
34 // The result_register() for mips is the 'v0' register, which is defined
35 // by the ABI to contain function return values. However, the first
36 // parameter to a function is defined to be 'a0'. So there are many
37 // places where we have to move a previous result in v0 to a0 for the
38 // next call: mov(a0, v0). This is not needed on the other architectures.
39 
40 #include "code-stubs.h"
41 #include "codegen.h"
42 #include "compiler.h"
43 #include "debug.h"
44 #include "full-codegen.h"
45 #include "isolate-inl.h"
46 #include "parser.h"
47 #include "scopes.h"
48 #include "stub-cache.h"
49 
50 #include "mips/code-stubs-mips.h"
52 
53 namespace v8 {
54 namespace internal {
55 
56 #define __ ACCESS_MASM(masm_)
57 
58 
59 // A patch site is a location in the code which it is possible to patch. This
60 // class has a number of methods to emit the code which is patchable and the
61 // method EmitPatchInfo to record a marker back to the patchable code. This
62 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
63 // (raw 16 bit immediate value is used) is the delta from the pc to the first
64 // instruction of the patchable code.
65 // The marker instruction is effectively a NOP (dest is zero_reg) and will
66 // never be emitted by normal code.
67 class JumpPatchSite BASE_EMBEDDED {
68  public:
69  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
70 #ifdef DEBUG
71  info_emitted_ = false;
72 #endif
73  }
74 
75  ~JumpPatchSite() {
76  ASSERT(patch_site_.is_bound() == info_emitted_);
77  }
78 
79  // When initially emitting this ensure that a jump is always generated to skip
80  // the inlined smi code.
81  void EmitJumpIfNotSmi(Register reg, Label* target) {
82  ASSERT(!patch_site_.is_bound() && !info_emitted_);
83  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
84  __ bind(&patch_site_);
85  __ andi(at, reg, 0);
86  // Always taken before patched.
87  __ Branch(target, eq, at, Operand(zero_reg));
88  }
89 
90  // When initially emitting this ensure that a jump is never generated to skip
91  // the inlined smi code.
92  void EmitJumpIfSmi(Register reg, Label* target) {
93  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
94  ASSERT(!patch_site_.is_bound() && !info_emitted_);
95  __ bind(&patch_site_);
96  __ andi(at, reg, 0);
97  // Never taken before patched.
98  __ Branch(target, ne, at, Operand(zero_reg));
99  }
100 
101  void EmitPatchInfo() {
102  if (patch_site_.is_bound()) {
103  int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
104  Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
105  __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
106 #ifdef DEBUG
107  info_emitted_ = true;
108 #endif
109  } else {
110  __ nop(); // Signals no inlined code.
111  }
112  }
113 
114  private:
115  MacroAssembler* masm_;
116  Label patch_site_;
117 #ifdef DEBUG
118  bool info_emitted_;
119 #endif
120 };
121 
122 
123 // Generate code for a JS function. On entry to the function the receiver
124 // and arguments have been pushed on the stack left to right. The actual
125 // argument count matches the formal parameter count expected by the
126 // function.
127 //
128 // The live registers are:
129 // o a1: the JS function object being called (i.e. ourselves)
130 // o cp: our context
131 // o fp: our caller's frame pointer
132 // o sp: stack pointer
133 // o ra: return address
134 //
135 // The function builds a JS frame. Please see JavaScriptFrameConstants in
136 // frames-mips.h for its layout.
137 void FullCodeGenerator::Generate() {
138  CompilationInfo* info = info_;
139  handler_table_ =
140  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
141  profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
142  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
143  SetFunctionPosition(function());
144  Comment cmnt(masm_, "[ function compiled by full code generator");
145 
147 
148 #ifdef DEBUG
149  if (strlen(FLAG_stop_at) > 0 &&
150  info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
151  __ stop("stop-at");
152  }
153 #endif
154 
155  // Strict mode functions and builtins need to replace the receiver
156  // with undefined when called as functions (without an explicit
157  // receiver object). t1 is zero for method calls and non-zero for
158  // function calls.
159  if (!info->is_classic_mode() || info->is_native()) {
160  Label ok;
161  __ Branch(&ok, eq, t1, Operand(zero_reg));
162  int receiver_offset = info->scope()->num_parameters() * kPointerSize;
163  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
164  __ sw(a2, MemOperand(sp, receiver_offset));
165  __ bind(&ok);
166  }
167 
168  // Open a frame scope to indicate that there is a frame on the stack. The
169  // MANUAL indicates that the scope shouldn't actually generate code to set up
170  // the frame (that is done below).
171  FrameScope frame_scope(masm_, StackFrame::MANUAL);
172 
173  int locals_count = info->scope()->num_stack_slots();
174 
175  __ Push(ra, fp, cp, a1);
176  if (locals_count > 0) {
177  // Load undefined value here, so the value is ready for the loop
178  // below.
179  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
180  }
181  // Adjust fp to point to caller's fp.
182  __ Addu(fp, sp, Operand(2 * kPointerSize));
183 
184  { Comment cmnt(masm_, "[ Allocate locals");
185  for (int i = 0; i < locals_count; i++) {
186  __ push(at);
187  }
188  }
189 
190  bool function_in_register = true;
191 
192  // Possibly allocate a local context.
193  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
194  if (heap_slots > 0) {
195  Comment cmnt(masm_, "[ Allocate context");
196  // Argument to NewContext is the function, which is still in a1.
197  __ push(a1);
198  if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
199  __ Push(info->scope()->GetScopeInfo());
200  __ CallRuntime(Runtime::kNewGlobalContext, 2);
201  } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
202  FastNewContextStub stub(heap_slots);
203  __ CallStub(&stub);
204  } else {
205  __ CallRuntime(Runtime::kNewFunctionContext, 1);
206  }
207  function_in_register = false;
208  // Context is returned in both v0 and cp. It replaces the context
209  // passed to us. It's saved in the stack and kept live in cp.
211  // Copy any necessary parameters into the context.
212  int num_parameters = info->scope()->num_parameters();
213  for (int i = 0; i < num_parameters; i++) {
214  Variable* var = scope()->parameter(i);
215  if (var->IsContextSlot()) {
216  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
217  (num_parameters - 1 - i) * kPointerSize;
218  // Load parameter from stack.
219  __ lw(a0, MemOperand(fp, parameter_offset));
220  // Store it in the context.
221  MemOperand target = ContextOperand(cp, var->index());
222  __ sw(a0, target);
223 
224  // Update the write barrier.
225  __ RecordWriteContextSlot(
226  cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
227  }
228  }
229  }
230 
231  Variable* arguments = scope()->arguments();
232  if (arguments != NULL) {
233  // Function uses arguments object.
234  Comment cmnt(masm_, "[ Allocate arguments object");
235  if (!function_in_register) {
236  // Load this again, if it's used by the local context below.
238  } else {
239  __ mov(a3, a1);
240  }
241  // Receiver is just before the parameters on the caller's stack.
242  int num_parameters = info->scope()->num_parameters();
243  int offset = num_parameters * kPointerSize;
244  __ Addu(a2, fp,
245  Operand(StandardFrameConstants::kCallerSPOffset + offset));
246  __ li(a1, Operand(Smi::FromInt(num_parameters)));
247  __ Push(a3, a2, a1);
248 
249  // Arguments to ArgumentsAccessStub:
250  // function, receiver address, parameter count.
251  // The stub will rewrite receiever and parameter count if the previous
252  // stack frame was an arguments adapter frame.
254  if (!is_classic_mode()) {
256  } else if (function()->has_duplicate_parameters()) {
258  } else {
260  }
261  ArgumentsAccessStub stub(type);
262  __ CallStub(&stub);
263 
264  SetVar(arguments, v0, a1, a2);
265  }
266 
267  if (FLAG_trace) {
268  __ CallRuntime(Runtime::kTraceEnter, 0);
269  }
270 
271  // Visit the declarations and body unless there is an illegal
272  // redeclaration.
273  if (scope()->HasIllegalRedeclaration()) {
274  Comment cmnt(masm_, "[ Declarations");
275  scope()->VisitIllegalRedeclaration(this);
276 
277  } else {
278  PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
279  { Comment cmnt(masm_, "[ Declarations");
280  // For named function expressions, declare the function name as a
281  // constant.
282  if (scope()->is_function_scope() && scope()->function() != NULL) {
283  VariableDeclaration* function = scope()->function();
284  ASSERT(function->proxy()->var()->mode() == CONST ||
285  function->proxy()->var()->mode() == CONST_HARMONY);
286  ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
287  VisitVariableDeclaration(function);
288  }
289  VisitDeclarations(scope()->declarations());
290  }
291 
292  { Comment cmnt(masm_, "[ Stack check");
293  PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
294  Label ok;
295  __ LoadRoot(t0, Heap::kStackLimitRootIndex);
296  __ Branch(&ok, hs, sp, Operand(t0));
297  StackCheckStub stub;
298  __ CallStub(&stub);
299  __ bind(&ok);
300  }
301 
302  { Comment cmnt(masm_, "[ Body");
303  ASSERT(loop_depth() == 0);
304  VisitStatements(function()->body());
305  ASSERT(loop_depth() == 0);
306  }
307  }
308 
309  // Always emit a 'return undefined' in case control fell off the end of
310  // the body.
311  { Comment cmnt(masm_, "[ return <undefined>;");
312  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
313  }
314  EmitReturnSequence();
315 }
316 
317 
318 void FullCodeGenerator::ClearAccumulator() {
319  ASSERT(Smi::FromInt(0) == 0);
320  __ mov(v0, zero_reg);
321 }
322 
323 
324 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
325  __ li(a2, Operand(profiling_counter_));
327  __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
329 }
330 
331 
332 void FullCodeGenerator::EmitProfilingCounterReset() {
333  int reset_value = FLAG_interrupt_budget;
334  if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
335  // Self-optimization is a one-off thing: if it fails, don't try again.
336  reset_value = Smi::kMaxValue;
337  }
338  if (isolate()->IsDebuggerActive()) {
339  // Detect debug break requests as soon as possible.
340  reset_value = FLAG_interrupt_budget >> 4;
341  }
342  __ li(a2, Operand(profiling_counter_));
343  __ li(a3, Operand(Smi::FromInt(reset_value)));
345 }
346 
347 
348 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
349  Label* back_edge_target) {
350  // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
351  // to make sure it is constant. Branch may emit a skip-or-jump sequence
352  // instead of the normal Branch. It seems that the "skip" part of that
353  // sequence is about as long as this Branch would be so it is safe to ignore
354  // that.
355  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
356  Comment cmnt(masm_, "[ Stack check");
357  Label ok;
358  if (FLAG_count_based_interrupts) {
359  int weight = 1;
360  if (FLAG_weighted_back_edges) {
361  ASSERT(back_edge_target->is_bound());
362  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
363  weight = Min(kMaxBackEdgeWeight,
364  Max(1, distance / kBackEdgeDistanceUnit));
365  }
366  EmitProfilingCounterDecrement(weight);
367  __ slt(at, a3, zero_reg);
368  __ beq(at, zero_reg, &ok);
369  // CallStub will emit a li t9 first, so it is safe to use the delay slot.
370  InterruptStub stub;
371  __ CallStub(&stub);
372  } else {
373  __ LoadRoot(t0, Heap::kStackLimitRootIndex);
374  __ sltu(at, sp, t0);
375  __ beq(at, zero_reg, &ok);
376  // CallStub will emit a li t9 first, so it is safe to use the delay slot.
377  StackCheckStub stub;
378  __ CallStub(&stub);
379  }
380  // Record a mapping of this PC offset to the OSR id. This is used to find
381  // the AST id from the unoptimized code in order to use it as a key into
382  // the deoptimization input data found in the optimized code.
383  RecordStackCheck(stmt->OsrEntryId());
384  if (FLAG_count_based_interrupts) {
385  EmitProfilingCounterReset();
386  }
387 
388  __ bind(&ok);
389  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
390  // Record a mapping of the OSR id to this PC. This is used if the OSR
391  // entry becomes the target of a bailout. We don't expect it to be, but
392  // we want it to work if it is.
393  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
394 }
395 
396 
397 void FullCodeGenerator::EmitReturnSequence() {
398  Comment cmnt(masm_, "[ Return sequence");
399  if (return_label_.is_bound()) {
400  __ Branch(&return_label_);
401  } else {
402  __ bind(&return_label_);
403  if (FLAG_trace) {
404  // Push the return value on the stack as the parameter.
405  // Runtime::TraceExit returns its parameter in v0.
406  __ push(v0);
407  __ CallRuntime(Runtime::kTraceExit, 1);
408  }
409  if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
410  // Pretend that the exit is a backwards jump to the entry.
411  int weight = 1;
412  if (info_->ShouldSelfOptimize()) {
413  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
414  } else if (FLAG_weighted_back_edges) {
415  int distance = masm_->pc_offset();
416  weight = Min(kMaxBackEdgeWeight,
417  Max(1, distance / kBackEdgeDistanceUnit));
418  }
419  EmitProfilingCounterDecrement(weight);
420  Label ok;
421  __ Branch(&ok, ge, a3, Operand(zero_reg));
422  __ push(v0);
423  if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
425  __ push(a2);
426  __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
427  } else {
428  InterruptStub stub;
429  __ CallStub(&stub);
430  }
431  __ pop(v0);
432  EmitProfilingCounterReset();
433  __ bind(&ok);
434  }
435 
436 #ifdef DEBUG
437  // Add a label for checking the size of the code used for returning.
438  Label check_exit_codesize;
439  masm_->bind(&check_exit_codesize);
440 #endif
441  // Make sure that the constant pool is not emitted inside of the return
442  // sequence.
443  { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
444  // Here we use masm_-> instead of the __ macro to avoid the code coverage
445  // tool from instrumenting as we rely on the code size here.
446  int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
447  CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
448  __ RecordJSReturn();
449  masm_->mov(sp, fp);
450  masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
451  masm_->Addu(sp, sp, Operand(sp_delta));
452  masm_->Jump(ra);
453  }
454 
455 #ifdef DEBUG
456  // Check that the size of the code used for returning is large enough
457  // for the debugger's requirements.
459  masm_->InstructionsGeneratedSince(&check_exit_codesize));
460 #endif
461  }
462 }
463 
464 
465 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
466  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
467 }
468 
469 
470 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
471  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
472  codegen()->GetVar(result_register(), var);
473 }
474 
475 
476 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
477  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
478  codegen()->GetVar(result_register(), var);
479  __ push(result_register());
480 }
481 
482 
483 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
484  // For simplicity we always test the accumulator register.
485  codegen()->GetVar(result_register(), var);
486  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
487  codegen()->DoTest(this);
488 }
489 
490 
491 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
492 }
493 
494 
495 void FullCodeGenerator::AccumulatorValueContext::Plug(
496  Heap::RootListIndex index) const {
497  __ LoadRoot(result_register(), index);
498 }
499 
500 
501 void FullCodeGenerator::StackValueContext::Plug(
502  Heap::RootListIndex index) const {
503  __ LoadRoot(result_register(), index);
504  __ push(result_register());
505 }
506 
507 
508 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
509  codegen()->PrepareForBailoutBeforeSplit(condition(),
510  true,
511  true_label_,
512  false_label_);
513  if (index == Heap::kUndefinedValueRootIndex ||
514  index == Heap::kNullValueRootIndex ||
515  index == Heap::kFalseValueRootIndex) {
516  if (false_label_ != fall_through_) __ Branch(false_label_);
517  } else if (index == Heap::kTrueValueRootIndex) {
518  if (true_label_ != fall_through_) __ Branch(true_label_);
519  } else {
520  __ LoadRoot(result_register(), index);
521  codegen()->DoTest(this);
522  }
523 }
524 
525 
526 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
527 }
528 
529 
530 void FullCodeGenerator::AccumulatorValueContext::Plug(
531  Handle<Object> lit) const {
532  __ li(result_register(), Operand(lit));
533 }
534 
535 
536 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
537  // Immediates cannot be pushed directly.
538  __ li(result_register(), Operand(lit));
539  __ push(result_register());
540 }
541 
542 
543 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
544  codegen()->PrepareForBailoutBeforeSplit(condition(),
545  true,
546  true_label_,
547  false_label_);
548  ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
549  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
550  if (false_label_ != fall_through_) __ Branch(false_label_);
551  } else if (lit->IsTrue() || lit->IsJSObject()) {
552  if (true_label_ != fall_through_) __ Branch(true_label_);
553  } else if (lit->IsString()) {
554  if (String::cast(*lit)->length() == 0) {
555  if (false_label_ != fall_through_) __ Branch(false_label_);
556  } else {
557  if (true_label_ != fall_through_) __ Branch(true_label_);
558  }
559  } else if (lit->IsSmi()) {
560  if (Smi::cast(*lit)->value() == 0) {
561  if (false_label_ != fall_through_) __ Branch(false_label_);
562  } else {
563  if (true_label_ != fall_through_) __ Branch(true_label_);
564  }
565  } else {
566  // For simplicity we always test the accumulator register.
567  __ li(result_register(), Operand(lit));
568  codegen()->DoTest(this);
569  }
570 }
571 
572 
573 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
574  Register reg) const {
575  ASSERT(count > 0);
576  __ Drop(count);
577 }
578 
579 
580 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
581  int count,
582  Register reg) const {
583  ASSERT(count > 0);
584  __ Drop(count);
585  __ Move(result_register(), reg);
586 }
587 
588 
589 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
590  Register reg) const {
591  ASSERT(count > 0);
592  if (count > 1) __ Drop(count - 1);
593  __ sw(reg, MemOperand(sp, 0));
594 }
595 
596 
597 void FullCodeGenerator::TestContext::DropAndPlug(int count,
598  Register reg) const {
599  ASSERT(count > 0);
600  // For simplicity we always test the accumulator register.
601  __ Drop(count);
602  __ Move(result_register(), reg);
603  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
604  codegen()->DoTest(this);
605 }
606 
607 
608 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
609  Label* materialize_false) const {
610  ASSERT(materialize_true == materialize_false);
611  __ bind(materialize_true);
612 }
613 
614 
615 void FullCodeGenerator::AccumulatorValueContext::Plug(
616  Label* materialize_true,
617  Label* materialize_false) const {
618  Label done;
619  __ bind(materialize_true);
620  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
621  __ Branch(&done);
622  __ bind(materialize_false);
623  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
624  __ bind(&done);
625 }
626 
627 
628 void FullCodeGenerator::StackValueContext::Plug(
629  Label* materialize_true,
630  Label* materialize_false) const {
631  Label done;
632  __ bind(materialize_true);
633  __ LoadRoot(at, Heap::kTrueValueRootIndex);
634  __ push(at);
635  __ Branch(&done);
636  __ bind(materialize_false);
637  __ LoadRoot(at, Heap::kFalseValueRootIndex);
638  __ push(at);
639  __ bind(&done);
640 }
641 
642 
643 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
644  Label* materialize_false) const {
645  ASSERT(materialize_true == true_label_);
646  ASSERT(materialize_false == false_label_);
647 }
648 
649 
650 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
651 }
652 
653 
654 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
655  Heap::RootListIndex value_root_index =
656  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
657  __ LoadRoot(result_register(), value_root_index);
658 }
659 
660 
661 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
662  Heap::RootListIndex value_root_index =
663  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
664  __ LoadRoot(at, value_root_index);
665  __ push(at);
666 }
667 
668 
669 void FullCodeGenerator::TestContext::Plug(bool flag) const {
670  codegen()->PrepareForBailoutBeforeSplit(condition(),
671  true,
672  true_label_,
673  false_label_);
674  if (flag) {
675  if (true_label_ != fall_through_) __ Branch(true_label_);
676  } else {
677  if (false_label_ != fall_through_) __ Branch(false_label_);
678  }
679 }
680 
681 
682 void FullCodeGenerator::DoTest(Expression* condition,
683  Label* if_true,
684  Label* if_false,
685  Label* fall_through) {
687  ToBooleanStub stub(result_register());
688  __ CallStub(&stub);
689  __ mov(at, zero_reg);
690  } else {
691  // Call the runtime to find the boolean value of the source and then
692  // translate it into control flow to the pair of labels.
693  __ push(result_register());
694  __ CallRuntime(Runtime::kToBool, 1);
695  __ LoadRoot(at, Heap::kFalseValueRootIndex);
696  }
697  Split(ne, v0, Operand(at), if_true, if_false, fall_through);
698 }
699 
700 
701 void FullCodeGenerator::Split(Condition cc,
702  Register lhs,
703  const Operand& rhs,
704  Label* if_true,
705  Label* if_false,
706  Label* fall_through) {
707  if (if_false == fall_through) {
708  __ Branch(if_true, cc, lhs, rhs);
709  } else if (if_true == fall_through) {
710  __ Branch(if_false, NegateCondition(cc), lhs, rhs);
711  } else {
712  __ Branch(if_true, cc, lhs, rhs);
713  __ Branch(if_false);
714  }
715 }
716 
717 
718 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
719  ASSERT(var->IsStackAllocated());
720  // Offset is negative because higher indexes are at lower addresses.
721  int offset = -var->index() * kPointerSize;
722  // Adjust by a (parameter or local) base offset.
723  if (var->IsParameter()) {
724  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
725  } else {
727  }
728  return MemOperand(fp, offset);
729 }
730 
731 
732 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
733  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
734  if (var->IsContextSlot()) {
735  int context_chain_length = scope()->ContextChainLength(var->scope());
736  __ LoadContext(scratch, context_chain_length);
737  return ContextOperand(scratch, var->index());
738  } else {
739  return StackOperand(var);
740  }
741 }
742 
743 
744 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
745  // Use destination as scratch.
746  MemOperand location = VarOperand(var, dest);
747  __ lw(dest, location);
748 }
749 
750 
751 void FullCodeGenerator::SetVar(Variable* var,
752  Register src,
753  Register scratch0,
754  Register scratch1) {
755  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
756  ASSERT(!scratch0.is(src));
757  ASSERT(!scratch0.is(scratch1));
758  ASSERT(!scratch1.is(src));
759  MemOperand location = VarOperand(var, scratch0);
760  __ sw(src, location);
761  // Emit the write barrier code if the location is in the heap.
762  if (var->IsContextSlot()) {
763  __ RecordWriteContextSlot(scratch0,
764  location.offset(),
765  src,
766  scratch1,
769  }
770 }
771 
772 
773 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
774  bool should_normalize,
775  Label* if_true,
776  Label* if_false) {
777  // Only prepare for bailouts before splits if we're in a test
778  // context. Otherwise, we let the Visit function deal with the
779  // preparation to avoid preparing with the same AST id twice.
780  if (!context()->IsTest() || !info_->IsOptimizable()) return;
781 
782  Label skip;
783  if (should_normalize) __ Branch(&skip);
784  PrepareForBailout(expr, TOS_REG);
785  if (should_normalize) {
786  __ LoadRoot(t0, Heap::kTrueValueRootIndex);
787  Split(eq, a0, Operand(t0), if_true, if_false, NULL);
788  __ bind(&skip);
789  }
790 }
791 
792 
793 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
794  // The variable in the declaration always resides in the current function
795  // context.
796  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
797  if (generate_debug_code_) {
798  // Check that we're not inside a with or catch context.
800  __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
801  __ Check(ne, "Declaration in with context.",
802  a1, Operand(t0));
803  __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
804  __ Check(ne, "Declaration in catch context.",
805  a1, Operand(t0));
806  }
807 }
808 
809 
810 void FullCodeGenerator::VisitVariableDeclaration(
811  VariableDeclaration* declaration) {
812  // If it was not possible to allocate the variable at compile time, we
813  // need to "declare" it at runtime to make sure it actually exists in the
814  // local context.
815  VariableProxy* proxy = declaration->proxy();
816  VariableMode mode = declaration->mode();
817  Variable* variable = proxy->var();
818  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
819  switch (variable->location()) {
821  globals_->Add(variable->name(), zone());
822  globals_->Add(variable->binding_needs_init()
823  ? isolate()->factory()->the_hole_value()
824  : isolate()->factory()->undefined_value(),
825  zone());
826  break;
827 
828  case Variable::PARAMETER:
829  case Variable::LOCAL:
830  if (hole_init) {
831  Comment cmnt(masm_, "[ VariableDeclaration");
832  __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
833  __ sw(t0, StackOperand(variable));
834  }
835  break;
836 
837  case Variable::CONTEXT:
838  if (hole_init) {
839  Comment cmnt(masm_, "[ VariableDeclaration");
840  EmitDebugCheckDeclarationContext(variable);
841  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
842  __ sw(at, ContextOperand(cp, variable->index()));
843  // No write barrier since the_hole_value is in old space.
844  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
845  }
846  break;
847 
848  case Variable::LOOKUP: {
849  Comment cmnt(masm_, "[ VariableDeclaration");
850  __ li(a2, Operand(variable->name()));
851  // Declaration nodes are always introduced in one of four modes.
853  PropertyAttributes attr =
855  __ li(a1, Operand(Smi::FromInt(attr)));
856  // Push initial value, if any.
857  // Note: For variables we must not push an initial value (such as
858  // 'undefined') because we may have a (legal) redeclaration and we
859  // must not destroy the current value.
860  if (hole_init) {
861  __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
862  __ Push(cp, a2, a1, a0);
863  } else {
864  ASSERT(Smi::FromInt(0) == 0);
865  __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
866  __ Push(cp, a2, a1, a0);
867  }
868  __ CallRuntime(Runtime::kDeclareContextSlot, 4);
869  break;
870  }
871  }
872 }
873 
874 
875 void FullCodeGenerator::VisitFunctionDeclaration(
876  FunctionDeclaration* declaration) {
877  VariableProxy* proxy = declaration->proxy();
878  Variable* variable = proxy->var();
879  switch (variable->location()) {
880  case Variable::UNALLOCATED: {
881  globals_->Add(variable->name(), zone());
882  Handle<SharedFunctionInfo> function =
883  Compiler::BuildFunctionInfo(declaration->fun(), script());
884  // Check for stack-overflow exception.
885  if (function.is_null()) return SetStackOverflow();
886  globals_->Add(function, zone());
887  break;
888  }
889 
890  case Variable::PARAMETER:
891  case Variable::LOCAL: {
892  Comment cmnt(masm_, "[ FunctionDeclaration");
893  VisitForAccumulatorValue(declaration->fun());
894  __ sw(result_register(), StackOperand(variable));
895  break;
896  }
897 
898  case Variable::CONTEXT: {
899  Comment cmnt(masm_, "[ FunctionDeclaration");
900  EmitDebugCheckDeclarationContext(variable);
901  VisitForAccumulatorValue(declaration->fun());
902  __ sw(result_register(), ContextOperand(cp, variable->index()));
903  int offset = Context::SlotOffset(variable->index());
904  // We know that we have written a function, which is not a smi.
905  __ RecordWriteContextSlot(cp,
906  offset,
907  result_register(),
908  a2,
913  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
914  break;
915  }
916 
917  case Variable::LOOKUP: {
918  Comment cmnt(masm_, "[ FunctionDeclaration");
919  __ li(a2, Operand(variable->name()));
920  __ li(a1, Operand(Smi::FromInt(NONE)));
921  __ Push(cp, a2, a1);
922  // Push initial value for function declaration.
923  VisitForStackValue(declaration->fun());
924  __ CallRuntime(Runtime::kDeclareContextSlot, 4);
925  break;
926  }
927  }
928 }
929 
930 
931 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
932  VariableProxy* proxy = declaration->proxy();
933  Variable* variable = proxy->var();
934  Handle<JSModule> instance = declaration->module()->interface()->Instance();
935  ASSERT(!instance.is_null());
936 
937  switch (variable->location()) {
938  case Variable::UNALLOCATED: {
939  Comment cmnt(masm_, "[ ModuleDeclaration");
940  globals_->Add(variable->name(), zone());
941  globals_->Add(instance, zone());
942  Visit(declaration->module());
943  break;
944  }
945 
946  case Variable::CONTEXT: {
947  Comment cmnt(masm_, "[ ModuleDeclaration");
948  EmitDebugCheckDeclarationContext(variable);
949  __ li(a1, Operand(instance));
950  __ sw(a1, ContextOperand(cp, variable->index()));
951  Visit(declaration->module());
952  break;
953  }
954 
955  case Variable::PARAMETER:
956  case Variable::LOCAL:
957  case Variable::LOOKUP:
958  UNREACHABLE();
959  }
960 }
961 
962 
963 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
964  VariableProxy* proxy = declaration->proxy();
965  Variable* variable = proxy->var();
966  switch (variable->location()) {
968  // TODO(rossberg)
969  break;
970 
971  case Variable::CONTEXT: {
972  Comment cmnt(masm_, "[ ImportDeclaration");
973  EmitDebugCheckDeclarationContext(variable);
974  // TODO(rossberg)
975  break;
976  }
977 
978  case Variable::PARAMETER:
979  case Variable::LOCAL:
980  case Variable::LOOKUP:
981  UNREACHABLE();
982  }
983 }
984 
985 
986 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
987  // TODO(rossberg)
988 }
989 
990 
991 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
992  // Call the runtime to declare the globals.
993  // The context is the first argument.
994  __ li(a1, Operand(pairs));
995  __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
996  __ Push(cp, a1, a0);
997  __ CallRuntime(Runtime::kDeclareGlobals, 3);
998  // Return value is ignored.
999 }
1000 
1001 
1002 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1003  Comment cmnt(masm_, "[ SwitchStatement");
1004  Breakable nested_statement(this, stmt);
1005  SetStatementPosition(stmt);
1006 
1007  // Keep the switch value on the stack until a case matches.
1008  VisitForStackValue(stmt->tag());
1009  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1010 
1011  ZoneList<CaseClause*>* clauses = stmt->cases();
1012  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1013 
1014  Label next_test; // Recycled for each test.
1015  // Compile all the tests with branches to their bodies.
1016  for (int i = 0; i < clauses->length(); i++) {
1017  CaseClause* clause = clauses->at(i);
1018  clause->body_target()->Unuse();
1019 
1020  // The default is not a test, but remember it as final fall through.
1021  if (clause->is_default()) {
1022  default_clause = clause;
1023  continue;
1024  }
1025 
1026  Comment cmnt(masm_, "[ Case comparison");
1027  __ bind(&next_test);
1028  next_test.Unuse();
1029 
1030  // Compile the label expression.
1031  VisitForAccumulatorValue(clause->label());
1032  __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
1033 
1034  // Perform the comparison as if via '==='.
1035  __ lw(a1, MemOperand(sp, 0)); // Switch value.
1036  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1037  JumpPatchSite patch_site(masm_);
1038  if (inline_smi_code) {
1039  Label slow_case;
1040  __ or_(a2, a1, a0);
1041  patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1042 
1043  __ Branch(&next_test, ne, a1, Operand(a0));
1044  __ Drop(1); // Switch value is no longer needed.
1045  __ Branch(clause->body_target());
1046 
1047  __ bind(&slow_case);
1048  }
1049 
1050  // Record position before stub call for type feedback.
1051  SetSourcePosition(clause->position());
1052  Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
1053  CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1054  patch_site.EmitPatchInfo();
1055 
1056  __ Branch(&next_test, ne, v0, Operand(zero_reg));
1057  __ Drop(1); // Switch value is no longer needed.
1058  __ Branch(clause->body_target());
1059  }
1060 
1061  // Discard the test value and jump to the default if present, otherwise to
1062  // the end of the statement.
1063  __ bind(&next_test);
1064  __ Drop(1); // Switch value is no longer needed.
1065  if (default_clause == NULL) {
1066  __ Branch(nested_statement.break_label());
1067  } else {
1068  __ Branch(default_clause->body_target());
1069  }
1070 
1071  // Compile all the case bodies.
1072  for (int i = 0; i < clauses->length(); i++) {
1073  Comment cmnt(masm_, "[ Case body");
1074  CaseClause* clause = clauses->at(i);
1075  __ bind(clause->body_target());
1076  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1077  VisitStatements(clause->statements());
1078  }
1079 
1080  __ bind(nested_statement.break_label());
1081  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1082 }
1083 
1084 
1085 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1086  Comment cmnt(masm_, "[ ForInStatement");
1087  SetStatementPosition(stmt);
1088 
1089  Label loop, exit;
1090  ForIn loop_statement(this, stmt);
1091  increment_loop_depth();
1092 
1093  // Get the object to enumerate over. Both SpiderMonkey and JSC
1094  // ignore null and undefined in contrast to the specification; see
1095  // ECMA-262 section 12.6.4.
1096  VisitForAccumulatorValue(stmt->enumerable());
1097  __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1098  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1099  __ Branch(&exit, eq, a0, Operand(at));
1100  Register null_value = t1;
1101  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1102  __ Branch(&exit, eq, a0, Operand(null_value));
1103  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1104  __ mov(a0, v0);
1105  // Convert the object to a JS object.
1106  Label convert, done_convert;
1107  __ JumpIfSmi(a0, &convert);
1108  __ GetObjectType(a0, a1, a1);
1109  __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1110  __ bind(&convert);
1111  __ push(a0);
1112  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1113  __ mov(a0, v0);
1114  __ bind(&done_convert);
1115  __ push(a0);
1116 
1117  // Check for proxies.
1118  Label call_runtime;
1120  __ GetObjectType(a0, a1, a1);
1121  __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1122 
1123  // Check cache validity in generated code. This is a fast case for
1124  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1125  // guarantee cache validity, call the runtime system to check cache
1126  // validity or get the property names in a fixed array.
1127  __ CheckEnumCache(null_value, &call_runtime);
1128 
1129  // The enum cache is valid. Load the map of the object being
1130  // iterated over and use the cache for the iteration.
1131  Label use_cache;
1133  __ Branch(&use_cache);
1134 
1135  // Get the set of properties to enumerate.
1136  __ bind(&call_runtime);
1137  __ push(a0); // Duplicate the enumerable object on the stack.
1138  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1139 
1140  // If we got a map from the runtime call, we can do a fast
1141  // modification check. Otherwise, we got a fixed array, and we have
1142  // to do a slow check.
1143  Label fixed_array;
1145  __ LoadRoot(at, Heap::kMetaMapRootIndex);
1146  __ Branch(&fixed_array, ne, a2, Operand(at));
1147 
1148  // We got a map in register v0. Get the enumeration cache from it.
1149  Label no_descriptors;
1150  __ bind(&use_cache);
1151 
1152  __ EnumLength(a1, v0);
1153  __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1154 
1155  __ LoadInstanceDescriptors(v0, a2);
1158 
1159  // Set up the four remaining stack slots.
1160  __ push(v0); // Map.
1161  __ li(a0, Operand(Smi::FromInt(0)));
1162  // Push enumeration cache, enumeration cache length (as smi) and zero.
1163  __ Push(a2, a1, a0);
1164  __ jmp(&loop);
1165 
1166  __ bind(&no_descriptors);
1167  __ Drop(1);
1168  __ jmp(&exit);
1169 
1170  // We got a fixed array in register v0. Iterate through that.
1171  Label non_proxy;
1172  __ bind(&fixed_array);
1173 
1174  Handle<JSGlobalPropertyCell> cell =
1175  isolate()->factory()->NewJSGlobalPropertyCell(
1176  Handle<Object>(
1178  RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1179  __ LoadHeapObject(a1, cell);
1182 
1183  __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1184  __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1186  __ GetObjectType(a2, a3, a3);
1187  __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1188  __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1189  __ bind(&non_proxy);
1190  __ Push(a1, v0); // Smi and array
1192  __ li(a0, Operand(Smi::FromInt(0)));
1193  __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1194 
1195  // Generate code for doing the condition check.
1196  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1197  __ bind(&loop);
1198  // Load the current count to a0, load the length to a1.
1199  __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1200  __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1201  __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1202 
1203  // Get the current entry of the array into register a3.
1204  __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1205  __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1206  __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
1207  __ addu(t0, a2, t0); // Array base + scaled (smi) index.
1208  __ lw(a3, MemOperand(t0)); // Current entry.
1209 
1210  // Get the expected map from the stack or a smi in the
1211  // permanent slow case into register a2.
1212  __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1213 
1214  // Check if the expected map still matches that of the enumerable.
1215  // If not, we may have to filter the key.
1216  Label update_each;
1217  __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1219  __ Branch(&update_each, eq, t0, Operand(a2));
1220 
1221  // For proxies, no filtering is done.
1222  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1223  ASSERT_EQ(Smi::FromInt(0), 0);
1224  __ Branch(&update_each, eq, a2, Operand(zero_reg));
1225 
1226  // Convert the entry to a string or (smi) 0 if it isn't a property
1227  // any more. If the property has been removed while iterating, we
1228  // just skip it.
1229  __ push(a1); // Enumerable.
1230  __ push(a3); // Current entry.
1231  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1232  __ mov(a3, result_register());
1233  __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1234 
1235  // Update the 'each' property or variable from the possibly filtered
1236  // entry in register a3.
1237  __ bind(&update_each);
1238  __ mov(result_register(), a3);
1239  // Perform the assignment as if via '='.
1240  { EffectContext context(this);
1241  EmitAssignment(stmt->each());
1242  }
1243 
1244  // Generate code for the body of the loop.
1245  Visit(stmt->body());
1246 
1247  // Generate code for the going to the next element by incrementing
1248  // the index (smi) stored on top of the stack.
1249  __ bind(loop_statement.continue_label());
1250  __ pop(a0);
1251  __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1252  __ push(a0);
1253 
1254  EmitStackCheck(stmt, &loop);
1255  __ Branch(&loop);
1256 
1257  // Remove the pointers stored on the stack.
1258  __ bind(loop_statement.break_label());
1259  __ Drop(5);
1260 
1261  // Exit and decrement the loop depth.
1262  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1263  __ bind(&exit);
1264  decrement_loop_depth();
1265 }
1266 
1267 
1268 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1269  bool pretenure) {
1270  // Use the fast case closure allocation code that allocates in new
1271  // space for nested functions that don't need literals cloning. If
1272  // we're running with the --always-opt or the --prepare-always-opt
1273  // flag, we need to use the runtime function so that the new function
1274  // we are creating here gets a chance to have its code optimized and
1275  // doesn't just get a copy of the existing unoptimized code.
1276  if (!FLAG_always_opt &&
1277  !FLAG_prepare_always_opt &&
1278  !pretenure &&
1279  scope()->is_function_scope() &&
1280  info->num_literals() == 0) {
1281  FastNewClosureStub stub(info->language_mode());
1282  __ li(a0, Operand(info));
1283  __ push(a0);
1284  __ CallStub(&stub);
1285  } else {
1286  __ li(a0, Operand(info));
1287  __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1288  : Heap::kFalseValueRootIndex);
1289  __ Push(cp, a0, a1);
1290  __ CallRuntime(Runtime::kNewClosure, 3);
1291  }
1292  context()->Plug(v0);
1293 }
1294 
1295 
1296 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1297  Comment cmnt(masm_, "[ VariableProxy");
1298  EmitVariableLoad(expr);
1299 }
1300 
1301 
1302 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1303  TypeofState typeof_state,
1304  Label* slow) {
1305  Register current = cp;
1306  Register next = a1;
1307  Register temp = a2;
1308 
1309  Scope* s = scope();
1310  while (s != NULL) {
1311  if (s->num_heap_slots() > 0) {
1312  if (s->calls_non_strict_eval()) {
1313  // Check that extension is NULL.
1314  __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1315  __ Branch(slow, ne, temp, Operand(zero_reg));
1316  }
1317  // Load next context in chain.
1318  __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1319  // Walk the rest of the chain without clobbering cp.
1320  current = next;
1321  }
1322  // If no outer scope calls eval, we do not need to check more
1323  // context extensions.
1324  if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1325  s = s->outer_scope();
1326  }
1327 
1328  if (s->is_eval_scope()) {
1329  Label loop, fast;
1330  if (!current.is(next)) {
1331  __ Move(next, current);
1332  }
1333  __ bind(&loop);
1334  // Terminate at native context.
1335  __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1336  __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
1337  __ Branch(&fast, eq, temp, Operand(t0));
1338  // Check that extension is NULL.
1339  __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1340  __ Branch(slow, ne, temp, Operand(zero_reg));
1341  // Load next context in chain.
1342  __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1343  __ Branch(&loop);
1344  __ bind(&fast);
1345  }
1346 
1347  __ lw(a0, GlobalObjectOperand());
1348  __ li(a2, Operand(var->name()));
1349  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1350  ? RelocInfo::CODE_TARGET
1351  : RelocInfo::CODE_TARGET_CONTEXT;
1352  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1353  CallIC(ic, mode);
1354 }
1355 
1356 
1357 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1358  Label* slow) {
1359  ASSERT(var->IsContextSlot());
1360  Register context = cp;
1361  Register next = a3;
1362  Register temp = t0;
1363 
1364  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1365  if (s->num_heap_slots() > 0) {
1366  if (s->calls_non_strict_eval()) {
1367  // Check that extension is NULL.
1368  __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1369  __ Branch(slow, ne, temp, Operand(zero_reg));
1370  }
1371  __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1372  // Walk the rest of the chain without clobbering cp.
1373  context = next;
1374  }
1375  }
1376  // Check that last extension is NULL.
1377  __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1378  __ Branch(slow, ne, temp, Operand(zero_reg));
1379 
1380  // This function is used only for loads, not stores, so it's safe to
1381  // return an cp-based operand (the write barrier cannot be allowed to
1382  // destroy the cp register).
1383  return ContextOperand(context, var->index());
1384 }
1385 
1386 
1387 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1388  TypeofState typeof_state,
1389  Label* slow,
1390  Label* done) {
1391  // Generate fast-case code for variables that might be shadowed by
1392  // eval-introduced variables. Eval is used a lot without
1393  // introducing variables. In those cases, we do not want to
1394  // perform a runtime call for all variables in the scope
1395  // containing the eval.
1396  if (var->mode() == DYNAMIC_GLOBAL) {
1397  EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1398  __ Branch(done);
1399  } else if (var->mode() == DYNAMIC_LOCAL) {
1400  Variable* local = var->local_if_not_shadowed();
1401  __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1402  if (local->mode() == CONST ||
1403  local->mode() == CONST_HARMONY ||
1404  local->mode() == LET) {
1405  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1406  __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1407  if (local->mode() == CONST) {
1408  __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1409  __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1410  } else { // LET || CONST_HARMONY
1411  __ Branch(done, ne, at, Operand(zero_reg));
1412  __ li(a0, Operand(var->name()));
1413  __ push(a0);
1414  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1415  }
1416  }
1417  __ Branch(done);
1418  }
1419 }
1420 
1421 
1422 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1423  // Record position before possible IC call.
1424  SetSourcePosition(proxy->position());
1425  Variable* var = proxy->var();
1426 
1427  // Three cases: global variables, lookup variables, and all other types of
1428  // variables.
1429  switch (var->location()) {
1430  case Variable::UNALLOCATED: {
1431  Comment cmnt(masm_, "Global variable");
1432  // Use inline caching. Variable name is passed in a2 and the global
1433  // object (receiver) in a0.
1434  __ lw(a0, GlobalObjectOperand());
1435  __ li(a2, Operand(var->name()));
1436  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1437  CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1438  context()->Plug(v0);
1439  break;
1440  }
1441 
1442  case Variable::PARAMETER:
1443  case Variable::LOCAL:
1444  case Variable::CONTEXT: {
1445  Comment cmnt(masm_, var->IsContextSlot()
1446  ? "Context variable"
1447  : "Stack variable");
1448  if (var->binding_needs_init()) {
1449  // var->scope() may be NULL when the proxy is located in eval code and
1450  // refers to a potential outside binding. Currently those bindings are
1451  // always looked up dynamically, i.e. in that case
1452  // var->location() == LOOKUP.
1453  // always holds.
1454  ASSERT(var->scope() != NULL);
1455 
1456  // Check if the binding really needs an initialization check. The check
1457  // can be skipped in the following situation: we have a LET or CONST
1458  // binding in harmony mode, both the Variable and the VariableProxy have
1459  // the same declaration scope (i.e. they are both in global code, in the
1460  // same function or in the same eval code) and the VariableProxy is in
1461  // the source physically located after the initializer of the variable.
1462  //
1463  // We cannot skip any initialization checks for CONST in non-harmony
1464  // mode because const variables may be declared but never initialized:
1465  // if (false) { const x; }; var y = x;
1466  //
1467  // The condition on the declaration scopes is a conservative check for
1468  // nested functions that access a binding and are called before the
1469  // binding is initialized:
1470  // function() { f(); let x = 1; function f() { x = 2; } }
1471  //
1472  bool skip_init_check;
1473  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1474  skip_init_check = false;
1475  } else {
1476  // Check that we always have valid source position.
1477  ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1478  ASSERT(proxy->position() != RelocInfo::kNoPosition);
1479  skip_init_check = var->mode() != CONST &&
1480  var->initializer_position() < proxy->position();
1481  }
1482 
1483  if (!skip_init_check) {
1484  // Let and const need a read barrier.
1485  GetVar(v0, var);
1486  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1487  __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1488  if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1489  // Throw a reference error when using an uninitialized let/const
1490  // binding in harmony mode.
1491  Label done;
1492  __ Branch(&done, ne, at, Operand(zero_reg));
1493  __ li(a0, Operand(var->name()));
1494  __ push(a0);
1495  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1496  __ bind(&done);
1497  } else {
1498  // Uninitalized const bindings outside of harmony mode are unholed.
1499  ASSERT(var->mode() == CONST);
1500  __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1501  __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1502  }
1503  context()->Plug(v0);
1504  break;
1505  }
1506  }
1507  context()->Plug(var);
1508  break;
1509  }
1510 
1511  case Variable::LOOKUP: {
1512  Label done, slow;
1513  // Generate code for loading from variables potentially shadowed
1514  // by eval-introduced variables.
1515  EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1516  __ bind(&slow);
1517  Comment cmnt(masm_, "Lookup variable");
1518  __ li(a1, Operand(var->name()));
1519  __ Push(cp, a1); // Context and name.
1520  __ CallRuntime(Runtime::kLoadContextSlot, 2);
1521  __ bind(&done);
1522  context()->Plug(v0);
1523  }
1524  }
1525 }
1526 
1527 
1528 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1529  Comment cmnt(masm_, "[ RegExpLiteral");
1530  Label materialized;
1531  // Registers will be used as follows:
1532  // t1 = materialized value (RegExp literal)
1533  // t0 = JS function, literals array
1534  // a3 = literal index
1535  // a2 = RegExp pattern
1536  // a1 = RegExp flags
1537  // a0 = RegExp literal clone
1540  int literal_offset =
1541  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1542  __ lw(t1, FieldMemOperand(t0, literal_offset));
1543  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1544  __ Branch(&materialized, ne, t1, Operand(at));
1545 
1546  // Create regexp literal using runtime function.
1547  // Result will be in v0.
1548  __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1549  __ li(a2, Operand(expr->pattern()));
1550  __ li(a1, Operand(expr->flags()));
1551  __ Push(t0, a3, a2, a1);
1552  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1553  __ mov(t1, v0);
1554 
1555  __ bind(&materialized);
1557  Label allocated, runtime_allocate;
1558  __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1559  __ jmp(&allocated);
1560 
1561  __ bind(&runtime_allocate);
1562  __ push(t1);
1563  __ li(a0, Operand(Smi::FromInt(size)));
1564  __ push(a0);
1565  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1566  __ pop(t1);
1567 
1568  __ bind(&allocated);
1569 
1570  // After this, registers are used as follows:
1571  // v0: Newly allocated regexp.
1572  // t1: Materialized regexp.
1573  // a2: temp.
1574  __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
1575  context()->Plug(v0);
1576 }
1577 
1578 
1579 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1580  if (expression == NULL) {
1581  __ LoadRoot(a1, Heap::kNullValueRootIndex);
1582  __ push(a1);
1583  } else {
1584  VisitForStackValue(expression);
1585  }
1586 }
1587 
1588 
1589 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1590  Comment cmnt(masm_, "[ ObjectLiteral");
1591  Handle<FixedArray> constant_properties = expr->constant_properties();
1594  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1595  __ li(a1, Operand(constant_properties));
1596  int flags = expr->fast_elements()
1599  flags |= expr->has_function()
1602  __ li(a0, Operand(Smi::FromInt(flags)));
1603  __ Push(a3, a2, a1, a0);
1604  int properties_count = constant_properties->length() / 2;
1605  if (expr->depth() > 1) {
1606  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1607  } else if (flags != ObjectLiteral::kFastElements ||
1609  __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1610  } else {
1611  FastCloneShallowObjectStub stub(properties_count);
1612  __ CallStub(&stub);
1613  }
1614 
1615  // If result_saved is true the result is on top of the stack. If
1616  // result_saved is false the result is in v0.
1617  bool result_saved = false;
1618 
1619  // Mark all computed expressions that are bound to a key that
1620  // is shadowed by a later occurrence of the same key. For the
1621  // marked expressions, no store code is emitted.
1622  expr->CalculateEmitStore(zone());
1623 
1624  AccessorTable accessor_table(zone());
1625  for (int i = 0; i < expr->properties()->length(); i++) {
1626  ObjectLiteral::Property* property = expr->properties()->at(i);
1627  if (property->IsCompileTimeValue()) continue;
1628 
1629  Literal* key = property->key();
1630  Expression* value = property->value();
1631  if (!result_saved) {
1632  __ push(v0); // Save result on stack.
1633  result_saved = true;
1634  }
1635  switch (property->kind()) {
1637  UNREACHABLE();
1639  ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1640  // Fall through.
1642  if (key->handle()->IsSymbol()) {
1643  if (property->emit_store()) {
1644  VisitForAccumulatorValue(value);
1645  __ mov(a0, result_register());
1646  __ li(a2, Operand(key->handle()));
1647  __ lw(a1, MemOperand(sp));
1648  Handle<Code> ic = is_classic_mode()
1649  ? isolate()->builtins()->StoreIC_Initialize()
1650  : isolate()->builtins()->StoreIC_Initialize_Strict();
1651  CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
1652  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1653  } else {
1654  VisitForEffect(value);
1655  }
1656  break;
1657  }
1658  // Fall through.
1660  // Duplicate receiver on stack.
1661  __ lw(a0, MemOperand(sp));
1662  __ push(a0);
1663  VisitForStackValue(key);
1664  VisitForStackValue(value);
1665  if (property->emit_store()) {
1666  __ li(a0, Operand(Smi::FromInt(NONE))); // PropertyAttributes.
1667  __ push(a0);
1668  __ CallRuntime(Runtime::kSetProperty, 4);
1669  } else {
1670  __ Drop(3);
1671  }
1672  break;
1674  accessor_table.lookup(key)->second->getter = value;
1675  break;
1677  accessor_table.lookup(key)->second->setter = value;
1678  break;
1679  }
1680  }
1681 
1682  // Emit code to define accessors, using only a single call to the runtime for
1683  // each pair of corresponding getters and setters.
1684  for (AccessorTable::Iterator it = accessor_table.begin();
1685  it != accessor_table.end();
1686  ++it) {
1687  __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1688  __ push(a0);
1689  VisitForStackValue(it->first);
1690  EmitAccessor(it->second->getter);
1691  EmitAccessor(it->second->setter);
1692  __ li(a0, Operand(Smi::FromInt(NONE)));
1693  __ push(a0);
1694  __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1695  }
1696 
1697  if (expr->has_function()) {
1698  ASSERT(result_saved);
1699  __ lw(a0, MemOperand(sp));
1700  __ push(a0);
1701  __ CallRuntime(Runtime::kToFastProperties, 1);
1702  }
1703 
1704  if (result_saved) {
1705  context()->PlugTOS();
1706  } else {
1707  context()->Plug(v0);
1708  }
1709 }
1710 
1711 
1712 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1713  Comment cmnt(masm_, "[ ArrayLiteral");
1714 
1715  ZoneList<Expression*>* subexprs = expr->values();
1716  int length = subexprs->length();
1717 
1718  Handle<FixedArray> constant_elements = expr->constant_elements();
1719  ASSERT_EQ(2, constant_elements->length());
1720  ElementsKind constant_elements_kind =
1721  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1722  bool has_fast_elements =
1723  IsFastObjectElementsKind(constant_elements_kind);
1724  Handle<FixedArrayBase> constant_elements_values(
1725  FixedArrayBase::cast(constant_elements->get(1)));
1726 
1727  __ mov(a0, result_register());
1730  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1731  __ li(a1, Operand(constant_elements));
1732  __ Push(a3, a2, a1);
1733  if (has_fast_elements && constant_elements_values->map() ==
1734  isolate()->heap()->fixed_cow_array_map()) {
1735  FastCloneShallowArrayStub stub(
1737  __ CallStub(&stub);
1738  __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1739  1, a1, a2);
1740  } else if (expr->depth() > 1) {
1741  __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1743  __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1744  } else {
1745  ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1746  FLAG_smi_only_arrays);
1747  FastCloneShallowArrayStub::Mode mode = has_fast_elements
1750  FastCloneShallowArrayStub stub(mode, length);
1751  __ CallStub(&stub);
1752  }
1753 
1754  bool result_saved = false; // Is the result saved to the stack?
1755 
1756  // Emit code to evaluate all the non-constant subexpressions and to store
1757  // them into the newly cloned array.
1758  for (int i = 0; i < length; i++) {
1759  Expression* subexpr = subexprs->at(i);
1760  // If the subexpression is a literal or a simple materialized literal it
1761  // is already set in the cloned array.
1762  if (subexpr->AsLiteral() != NULL ||
1764  continue;
1765  }
1766 
1767  if (!result_saved) {
1768  __ push(v0);
1769  result_saved = true;
1770  }
1771 
1772  VisitForAccumulatorValue(subexpr);
1773 
1774  if (IsFastObjectElementsKind(constant_elements_kind)) {
1775  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1776  __ lw(t2, MemOperand(sp)); // Copy of array literal.
1778  __ sw(result_register(), FieldMemOperand(a1, offset));
1779  // Update the write barrier for the array store.
1780  __ RecordWriteField(a1, offset, result_register(), a2,
1783  } else {
1784  __ lw(a1, MemOperand(sp)); // Copy of array literal.
1786  __ li(a3, Operand(Smi::FromInt(i)));
1787  __ li(t0, Operand(Smi::FromInt(expr->literal_index())));
1788  __ mov(a0, result_register());
1789  StoreArrayLiteralElementStub stub;
1790  __ CallStub(&stub);
1791  }
1792 
1793  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1794  }
1795  if (result_saved) {
1796  context()->PlugTOS();
1797  } else {
1798  context()->Plug(v0);
1799  }
1800 }
1801 
1802 
1803 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1804  Comment cmnt(masm_, "[ Assignment");
1805  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1806  // on the left-hand side.
1807  if (!expr->target()->IsValidLeftHandSide()) {
1808  VisitForEffect(expr->target());
1809  return;
1810  }
1811 
1812  // Left-hand side can only be a property, a global or a (parameter or local)
1813  // slot.
1814  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1815  LhsKind assign_type = VARIABLE;
1816  Property* property = expr->target()->AsProperty();
1817  if (property != NULL) {
1818  assign_type = (property->key()->IsPropertyName())
1819  ? NAMED_PROPERTY
1820  : KEYED_PROPERTY;
1821  }
1822 
1823  // Evaluate LHS expression.
1824  switch (assign_type) {
1825  case VARIABLE:
1826  // Nothing to do here.
1827  break;
1828  case NAMED_PROPERTY:
1829  if (expr->is_compound()) {
1830  // We need the receiver both on the stack and in the accumulator.
1831  VisitForAccumulatorValue(property->obj());
1832  __ push(result_register());
1833  } else {
1834  VisitForStackValue(property->obj());
1835  }
1836  break;
1837  case KEYED_PROPERTY:
1838  // We need the key and receiver on both the stack and in v0 and a1.
1839  if (expr->is_compound()) {
1840  VisitForStackValue(property->obj());
1841  VisitForAccumulatorValue(property->key());
1842  __ lw(a1, MemOperand(sp, 0));
1843  __ push(v0);
1844  } else {
1845  VisitForStackValue(property->obj());
1846  VisitForStackValue(property->key());
1847  }
1848  break;
1849  }
1850 
1851  // For compound assignments we need another deoptimization point after the
1852  // variable/property load.
1853  if (expr->is_compound()) {
1854  { AccumulatorValueContext context(this);
1855  switch (assign_type) {
1856  case VARIABLE:
1857  EmitVariableLoad(expr->target()->AsVariableProxy());
1858  PrepareForBailout(expr->target(), TOS_REG);
1859  break;
1860  case NAMED_PROPERTY:
1861  EmitNamedPropertyLoad(property);
1862  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1863  break;
1864  case KEYED_PROPERTY:
1865  EmitKeyedPropertyLoad(property);
1866  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1867  break;
1868  }
1869  }
1870 
1871  Token::Value op = expr->binary_op();
1872  __ push(v0); // Left operand goes on the stack.
1873  VisitForAccumulatorValue(expr->value());
1874 
1875  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1876  ? OVERWRITE_RIGHT
1877  : NO_OVERWRITE;
1878  SetSourcePosition(expr->position() + 1);
1879  AccumulatorValueContext context(this);
1880  if (ShouldInlineSmiCase(op)) {
1881  EmitInlineSmiBinaryOp(expr->binary_operation(),
1882  op,
1883  mode,
1884  expr->target(),
1885  expr->value());
1886  } else {
1887  EmitBinaryOp(expr->binary_operation(), op, mode);
1888  }
1889 
1890  // Deoptimization point in case the binary operation may have side effects.
1891  PrepareForBailout(expr->binary_operation(), TOS_REG);
1892  } else {
1893  VisitForAccumulatorValue(expr->value());
1894  }
1895 
1896  // Record source position before possible IC call.
1897  SetSourcePosition(expr->position());
1898 
1899  // Store the value.
1900  switch (assign_type) {
1901  case VARIABLE:
1902  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1903  expr->op());
1904  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1905  context()->Plug(v0);
1906  break;
1907  case NAMED_PROPERTY:
1908  EmitNamedPropertyAssignment(expr);
1909  break;
1910  case KEYED_PROPERTY:
1911  EmitKeyedPropertyAssignment(expr);
1912  break;
1913  }
1914 }
1915 
1916 
1917 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1918  SetSourcePosition(prop->position());
1919  Literal* key = prop->key()->AsLiteral();
1920  __ mov(a0, result_register());
1921  __ li(a2, Operand(key->handle()));
1922  // Call load IC. It has arguments receiver and property name a0 and a2.
1923  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1924  CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1925 }
1926 
1927 
1928 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1929  SetSourcePosition(prop->position());
1930  __ mov(a0, result_register());
1931  // Call keyed load IC. It has arguments key and receiver in a0 and a1.
1932  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1933  CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1934 }
1935 
1936 
1937 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1938  Token::Value op,
1939  OverwriteMode mode,
1940  Expression* left_expr,
1941  Expression* right_expr) {
1942  Label done, smi_case, stub_call;
1943 
1944  Register scratch1 = a2;
1945  Register scratch2 = a3;
1946 
1947  // Get the arguments.
1948  Register left = a1;
1949  Register right = a0;
1950  __ pop(left);
1951  __ mov(a0, result_register());
1952 
1953  // Perform combined smi check on both operands.
1954  __ Or(scratch1, left, Operand(right));
1955  STATIC_ASSERT(kSmiTag == 0);
1956  JumpPatchSite patch_site(masm_);
1957  patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1958 
1959  __ bind(&stub_call);
1960  BinaryOpStub stub(op, mode);
1961  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
1962  expr->BinaryOperationFeedbackId());
1963  patch_site.EmitPatchInfo();
1964  __ jmp(&done);
1965 
1966  __ bind(&smi_case);
1967  // Smi case. This code works the same way as the smi-smi case in the type
1968  // recording binary operation stub, see
1969  // BinaryOpStub::GenerateSmiSmiOperation for comments.
1970  switch (op) {
1971  case Token::SAR:
1972  __ Branch(&stub_call);
1973  __ GetLeastBitsFromSmi(scratch1, right, 5);
1974  __ srav(right, left, scratch1);
1975  __ And(v0, right, Operand(~kSmiTagMask));
1976  break;
1977  case Token::SHL: {
1978  __ Branch(&stub_call);
1979  __ SmiUntag(scratch1, left);
1980  __ GetLeastBitsFromSmi(scratch2, right, 5);
1981  __ sllv(scratch1, scratch1, scratch2);
1982  __ Addu(scratch2, scratch1, Operand(0x40000000));
1983  __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1984  __ SmiTag(v0, scratch1);
1985  break;
1986  }
1987  case Token::SHR: {
1988  __ Branch(&stub_call);
1989  __ SmiUntag(scratch1, left);
1990  __ GetLeastBitsFromSmi(scratch2, right, 5);
1991  __ srlv(scratch1, scratch1, scratch2);
1992  __ And(scratch2, scratch1, 0xc0000000);
1993  __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
1994  __ SmiTag(v0, scratch1);
1995  break;
1996  }
1997  case Token::ADD:
1998  __ AdduAndCheckForOverflow(v0, left, right, scratch1);
1999  __ BranchOnOverflow(&stub_call, scratch1);
2000  break;
2001  case Token::SUB:
2002  __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2003  __ BranchOnOverflow(&stub_call, scratch1);
2004  break;
2005  case Token::MUL: {
2006  __ SmiUntag(scratch1, right);
2007  __ Mult(left, scratch1);
2008  __ mflo(scratch1);
2009  __ mfhi(scratch2);
2010  __ sra(scratch1, scratch1, 31);
2011  __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
2012  __ mflo(v0);
2013  __ Branch(&done, ne, v0, Operand(zero_reg));
2014  __ Addu(scratch2, right, left);
2015  __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2016  ASSERT(Smi::FromInt(0) == 0);
2017  __ mov(v0, zero_reg);
2018  break;
2019  }
2020  case Token::BIT_OR:
2021  __ Or(v0, left, Operand(right));
2022  break;
2023  case Token::BIT_AND:
2024  __ And(v0, left, Operand(right));
2025  break;
2026  case Token::BIT_XOR:
2027  __ Xor(v0, left, Operand(right));
2028  break;
2029  default:
2030  UNREACHABLE();
2031  }
2032 
2033  __ bind(&done);
2034  context()->Plug(v0);
2035 }
2036 
2037 
2038 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2039  Token::Value op,
2040  OverwriteMode mode) {
2041  __ mov(a0, result_register());
2042  __ pop(a1);
2043  BinaryOpStub stub(op, mode);
2044  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2045  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
2046  expr->BinaryOperationFeedbackId());
2047  patch_site.EmitPatchInfo();
2048  context()->Plug(v0);
2049 }
2050 
2051 
2052 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2053  // Invalid left-hand sides are rewritten to have a 'throw
2054  // ReferenceError' on the left-hand side.
2055  if (!expr->IsValidLeftHandSide()) {
2056  VisitForEffect(expr);
2057  return;
2058  }
2059 
2060  // Left-hand side can only be a property, a global or a (parameter or local)
2061  // slot.
2062  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2063  LhsKind assign_type = VARIABLE;
2064  Property* prop = expr->AsProperty();
2065  if (prop != NULL) {
2066  assign_type = (prop->key()->IsPropertyName())
2067  ? NAMED_PROPERTY
2068  : KEYED_PROPERTY;
2069  }
2070 
2071  switch (assign_type) {
2072  case VARIABLE: {
2073  Variable* var = expr->AsVariableProxy()->var();
2074  EffectContext context(this);
2075  EmitVariableAssignment(var, Token::ASSIGN);
2076  break;
2077  }
2078  case NAMED_PROPERTY: {
2079  __ push(result_register()); // Preserve value.
2080  VisitForAccumulatorValue(prop->obj());
2081  __ mov(a1, result_register());
2082  __ pop(a0); // Restore value.
2083  __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
2084  Handle<Code> ic = is_classic_mode()
2085  ? isolate()->builtins()->StoreIC_Initialize()
2086  : isolate()->builtins()->StoreIC_Initialize_Strict();
2087  CallIC(ic);
2088  break;
2089  }
2090  case KEYED_PROPERTY: {
2091  __ push(result_register()); // Preserve value.
2092  VisitForStackValue(prop->obj());
2093  VisitForAccumulatorValue(prop->key());
2094  __ mov(a1, result_register());
2095  __ pop(a2);
2096  __ pop(a0); // Restore value.
2097  Handle<Code> ic = is_classic_mode()
2098  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2099  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2100  CallIC(ic);
2101  break;
2102  }
2103  }
2104  context()->Plug(v0);
2105 }
2106 
2107 
2108 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2109  Token::Value op) {
2110  if (var->IsUnallocated()) {
2111  // Global var, const, or let.
2112  __ mov(a0, result_register());
2113  __ li(a2, Operand(var->name()));
2114  __ lw(a1, GlobalObjectOperand());
2115  Handle<Code> ic = is_classic_mode()
2116  ? isolate()->builtins()->StoreIC_Initialize()
2117  : isolate()->builtins()->StoreIC_Initialize_Strict();
2118  CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2119 
2120  } else if (op == Token::INIT_CONST) {
2121  // Const initializers need a write barrier.
2122  ASSERT(!var->IsParameter()); // No const parameters.
2123  if (var->IsStackLocal()) {
2124  Label skip;
2125  __ lw(a1, StackOperand(var));
2126  __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2127  __ Branch(&skip, ne, a1, Operand(t0));
2128  __ sw(result_register(), StackOperand(var));
2129  __ bind(&skip);
2130  } else {
2131  ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2132  // Like var declarations, const declarations are hoisted to function
2133  // scope. However, unlike var initializers, const initializers are
2134  // able to drill a hole to that function context, even from inside a
2135  // 'with' context. We thus bypass the normal static scope lookup for
2136  // var->IsContextSlot().
2137  __ push(v0);
2138  __ li(a0, Operand(var->name()));
2139  __ Push(cp, a0); // Context and name.
2140  __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2141  }
2142 
2143  } else if (var->mode() == LET && op != Token::INIT_LET) {
2144  // Non-initializing assignment to let variable needs a write barrier.
2145  if (var->IsLookupSlot()) {
2146  __ push(v0); // Value.
2147  __ li(a1, Operand(var->name()));
2148  __ li(a0, Operand(Smi::FromInt(language_mode())));
2149  __ Push(cp, a1, a0); // Context, name, strict mode.
2150  __ CallRuntime(Runtime::kStoreContextSlot, 4);
2151  } else {
2152  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2153  Label assign;
2154  MemOperand location = VarOperand(var, a1);
2155  __ lw(a3, location);
2156  __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2157  __ Branch(&assign, ne, a3, Operand(t0));
2158  __ li(a3, Operand(var->name()));
2159  __ push(a3);
2160  __ CallRuntime(Runtime::kThrowReferenceError, 1);
2161  // Perform the assignment.
2162  __ bind(&assign);
2163  __ sw(result_register(), location);
2164  if (var->IsContextSlot()) {
2165  // RecordWrite may destroy all its register arguments.
2166  __ mov(a3, result_register());
2167  int offset = Context::SlotOffset(var->index());
2168  __ RecordWriteContextSlot(
2169  a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2170  }
2171  }
2172 
2173  } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2174  // Assignment to var or initializing assignment to let/const
2175  // in harmony mode.
2176  if (var->IsStackAllocated() || var->IsContextSlot()) {
2177  MemOperand location = VarOperand(var, a1);
2178  if (generate_debug_code_ && op == Token::INIT_LET) {
2179  // Check for an uninitialized let binding.
2180  __ lw(a2, location);
2181  __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2182  __ Check(eq, "Let binding re-initialization.", a2, Operand(t0));
2183  }
2184  // Perform the assignment.
2185  __ sw(v0, location);
2186  if (var->IsContextSlot()) {
2187  __ mov(a3, v0);
2188  int offset = Context::SlotOffset(var->index());
2189  __ RecordWriteContextSlot(
2190  a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2191  }
2192  } else {
2193  ASSERT(var->IsLookupSlot());
2194  __ push(v0); // Value.
2195  __ li(a1, Operand(var->name()));
2196  __ li(a0, Operand(Smi::FromInt(language_mode())));
2197  __ Push(cp, a1, a0); // Context, name, strict mode.
2198  __ CallRuntime(Runtime::kStoreContextSlot, 4);
2199  }
2200  }
2201  // Non-initializing assignments to consts are ignored.
2202 }
2203 
2204 
2205 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2206  // Assignment to a property, using a named store IC.
2207  Property* prop = expr->target()->AsProperty();
2208  ASSERT(prop != NULL);
2209  ASSERT(prop->key()->AsLiteral() != NULL);
2210 
2211  // Record source code position before IC call.
2212  SetSourcePosition(expr->position());
2213  __ mov(a0, result_register()); // Load the value.
2214  __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
2215  __ pop(a1);
2216 
2217  Handle<Code> ic = is_classic_mode()
2218  ? isolate()->builtins()->StoreIC_Initialize()
2219  : isolate()->builtins()->StoreIC_Initialize_Strict();
2220  CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2221 
2222  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2223  context()->Plug(v0);
2224 }
2225 
2226 
2227 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2228  // Assignment to a property, using a keyed store IC.
2229 
2230  // Record source code position before IC call.
2231  SetSourcePosition(expr->position());
2232  // Call keyed store IC.
2233  // The arguments are:
2234  // - a0 is the value,
2235  // - a1 is the key,
2236  // - a2 is the receiver.
2237  __ mov(a0, result_register());
2238  __ pop(a1); // Key.
2239  __ pop(a2);
2240 
2241  Handle<Code> ic = is_classic_mode()
2242  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2243  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2244  CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2245 
2246  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2247  context()->Plug(v0);
2248 }
2249 
2250 
2251 void FullCodeGenerator::VisitProperty(Property* expr) {
2252  Comment cmnt(masm_, "[ Property");
2253  Expression* key = expr->key();
2254 
2255  if (key->IsPropertyName()) {
2256  VisitForAccumulatorValue(expr->obj());
2257  EmitNamedPropertyLoad(expr);
2258  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2259  context()->Plug(v0);
2260  } else {
2261  VisitForStackValue(expr->obj());
2262  VisitForAccumulatorValue(expr->key());
2263  __ pop(a1);
2264  EmitKeyedPropertyLoad(expr);
2265  context()->Plug(v0);
2266  }
2267 }
2268 
2269 
2270 void FullCodeGenerator::CallIC(Handle<Code> code,
2271  RelocInfo::Mode rmode,
2272  TypeFeedbackId id) {
2273  ic_total_count_++;
2274  __ Call(code, rmode, id);
2275 }
2276 
2277 
2278 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2279  Handle<Object> name,
2280  RelocInfo::Mode mode) {
2281  // Code common for calls using the IC.
2282  ZoneList<Expression*>* args = expr->arguments();
2283  int arg_count = args->length();
2284  { PreservePositionScope scope(masm()->positions_recorder());
2285  for (int i = 0; i < arg_count; i++) {
2286  VisitForStackValue(args->at(i));
2287  }
2288  __ li(a2, Operand(name));
2289  }
2290  // Record source position for debugger.
2291  SetSourcePosition(expr->position());
2292  // Call the IC initialization code.
2293  Handle<Code> ic =
2294  isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2295  CallIC(ic, mode, expr->CallFeedbackId());
2296  RecordJSReturnSite(expr);
2297  // Restore context register.
2299  context()->Plug(v0);
2300 }
2301 
2302 
2303 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2304  Expression* key) {
2305  // Load the key.
2306  VisitForAccumulatorValue(key);
2307 
2308  // Swap the name of the function and the receiver on the stack to follow
2309  // the calling convention for call ICs.
2310  __ pop(a1);
2311  __ push(v0);
2312  __ push(a1);
2313 
2314  // Code common for calls using the IC.
2315  ZoneList<Expression*>* args = expr->arguments();
2316  int arg_count = args->length();
2317  { PreservePositionScope scope(masm()->positions_recorder());
2318  for (int i = 0; i < arg_count; i++) {
2319  VisitForStackValue(args->at(i));
2320  }
2321  }
2322  // Record source position for debugger.
2323  SetSourcePosition(expr->position());
2324  // Call the IC initialization code.
2325  Handle<Code> ic =
2326  isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2327  __ lw(a2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
2328  CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
2329  RecordJSReturnSite(expr);
2330  // Restore context register.
2332  context()->DropAndPlug(1, v0); // Drop the key still on the stack.
2333 }
2334 
2335 
2336 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2337  // Code common for calls using the call stub.
2338  ZoneList<Expression*>* args = expr->arguments();
2339  int arg_count = args->length();
2340  { PreservePositionScope scope(masm()->positions_recorder());
2341  for (int i = 0; i < arg_count; i++) {
2342  VisitForStackValue(args->at(i));
2343  }
2344  }
2345  // Record source position for debugger.
2346  SetSourcePosition(expr->position());
2347 
2348  // Record call targets.
2349  flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2350  Handle<Object> uninitialized =
2352  Handle<JSGlobalPropertyCell> cell =
2353  isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2354  RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2355  __ li(a2, Operand(cell));
2356 
2357  CallFunctionStub stub(arg_count, flags);
2358  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2359  __ CallStub(&stub);
2360  RecordJSReturnSite(expr);
2361  // Restore context register.
2363  context()->DropAndPlug(1, v0);
2364 }
2365 
2366 
2367 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2368  // Push copy of the first argument or undefined if it doesn't exist.
2369  if (arg_count > 0) {
2370  __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2371  } else {
2372  __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2373  }
2374  __ push(a1);
2375 
2376  // Push the receiver of the enclosing function.
2377  int receiver_offset = 2 + info_->scope()->num_parameters();
2378  __ lw(a1, MemOperand(fp, receiver_offset * kPointerSize));
2379  __ push(a1);
2380  // Push the language mode.
2381  __ li(a1, Operand(Smi::FromInt(language_mode())));
2382  __ push(a1);
2383 
2384  // Push the start position of the scope the calls resides in.
2385  __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2386  __ push(a1);
2387 
2388  // Do the runtime call.
2389  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2390 }
2391 
2392 
2393 void FullCodeGenerator::VisitCall(Call* expr) {
2394 #ifdef DEBUG
2395  // We want to verify that RecordJSReturnSite gets called on all paths
2396  // through this function. Avoid early returns.
2397  expr->return_is_recorded_ = false;
2398 #endif
2399 
2400  Comment cmnt(masm_, "[ Call");
2401  Expression* callee = expr->expression();
2402  VariableProxy* proxy = callee->AsVariableProxy();
2403  Property* property = callee->AsProperty();
2404 
2405  if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2406  // In a call to eval, we first call %ResolvePossiblyDirectEval to
2407  // resolve the function we need to call and the receiver of the
2408  // call. Then we call the resolved function using the given
2409  // arguments.
2410  ZoneList<Expression*>* args = expr->arguments();
2411  int arg_count = args->length();
2412 
2413  { PreservePositionScope pos_scope(masm()->positions_recorder());
2414  VisitForStackValue(callee);
2415  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2416  __ push(a2); // Reserved receiver slot.
2417 
2418  // Push the arguments.
2419  for (int i = 0; i < arg_count; i++) {
2420  VisitForStackValue(args->at(i));
2421  }
2422 
2423  // Push a copy of the function (found below the arguments) and
2424  // resolve eval.
2425  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2426  __ push(a1);
2427  EmitResolvePossiblyDirectEval(arg_count);
2428 
2429  // The runtime call returns a pair of values in v0 (function) and
2430  // v1 (receiver). Touch up the stack with the right values.
2431  __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2432  __ sw(v1, MemOperand(sp, arg_count * kPointerSize));
2433  }
2434  // Record source position for debugger.
2435  SetSourcePosition(expr->position());
2436  CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2437  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2438  __ CallStub(&stub);
2439  RecordJSReturnSite(expr);
2440  // Restore context register.
2442  context()->DropAndPlug(1, v0);
2443  } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2444  // Push global object as receiver for the call IC.
2445  __ lw(a0, GlobalObjectOperand());
2446  __ push(a0);
2447  EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2448  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2449  // Call to a lookup slot (dynamically introduced variable).
2450  Label slow, done;
2451 
2452  { PreservePositionScope scope(masm()->positions_recorder());
2453  // Generate code for loading from variables potentially shadowed
2454  // by eval-introduced variables.
2455  EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2456  }
2457 
2458  __ bind(&slow);
2459  // Call the runtime to find the function to call (returned in v0)
2460  // and the object holding it (returned in v1).
2461  __ push(context_register());
2462  __ li(a2, Operand(proxy->name()));
2463  __ push(a2);
2464  __ CallRuntime(Runtime::kLoadContextSlot, 2);
2465  __ Push(v0, v1); // Function, receiver.
2466 
2467  // If fast case code has been generated, emit code to push the
2468  // function and receiver and have the slow path jump around this
2469  // code.
2470  if (done.is_linked()) {
2471  Label call;
2472  __ Branch(&call);
2473  __ bind(&done);
2474  // Push function.
2475  __ push(v0);
2476  // The receiver is implicitly the global receiver. Indicate this
2477  // by passing the hole to the call function stub.
2478  __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
2479  __ push(a1);
2480  __ bind(&call);
2481  }
2482 
2483  // The receiver is either the global receiver or an object found
2484  // by LoadContextSlot. That object could be the hole if the
2485  // receiver is implicitly the global object.
2486  EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2487  } else if (property != NULL) {
2488  { PreservePositionScope scope(masm()->positions_recorder());
2489  VisitForStackValue(property->obj());
2490  }
2491  if (property->key()->IsPropertyName()) {
2492  EmitCallWithIC(expr,
2493  property->key()->AsLiteral()->handle(),
2494  RelocInfo::CODE_TARGET);
2495  } else {
2496  EmitKeyedCallWithIC(expr, property->key());
2497  }
2498  } else {
2499  // Call to an arbitrary expression not handled specially above.
2500  { PreservePositionScope scope(masm()->positions_recorder());
2501  VisitForStackValue(callee);
2502  }
2503  // Load global receiver object.
2504  __ lw(a1, GlobalObjectOperand());
2506  __ push(a1);
2507  // Emit function call.
2508  EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2509  }
2510 
2511 #ifdef DEBUG
2512  // RecordJSReturnSite should have been called.
2513  ASSERT(expr->return_is_recorded_);
2514 #endif
2515 }
2516 
2517 
2518 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2519  Comment cmnt(masm_, "[ CallNew");
2520  // According to ECMA-262, section 11.2.2, page 44, the function
2521  // expression in new calls must be evaluated before the
2522  // arguments.
2523 
2524  // Push constructor on the stack. If it's not a function it's used as
2525  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2526  // ignored.
2527  VisitForStackValue(expr->expression());
2528 
2529  // Push the arguments ("left-to-right") on the stack.
2530  ZoneList<Expression*>* args = expr->arguments();
2531  int arg_count = args->length();
2532  for (int i = 0; i < arg_count; i++) {
2533  VisitForStackValue(args->at(i));
2534  }
2535 
2536  // Call the construct call builtin that handles allocation and
2537  // constructor invocation.
2538  SetSourcePosition(expr->position());
2539 
2540  // Load function and argument count into a1 and a0.
2541  __ li(a0, Operand(arg_count));
2542  __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2543 
2544  // Record call targets in unoptimized code.
2545  Handle<Object> uninitialized =
2547  Handle<JSGlobalPropertyCell> cell =
2548  isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2549  RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2550  __ li(a2, Operand(cell));
2551 
2552  CallConstructStub stub(RECORD_CALL_TARGET);
2553  __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2554  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2555  context()->Plug(v0);
2556 }
2557 
2558 
2559 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2560  ZoneList<Expression*>* args = expr->arguments();
2561  ASSERT(args->length() == 1);
2562 
2563  VisitForAccumulatorValue(args->at(0));
2564 
2565  Label materialize_true, materialize_false;
2566  Label* if_true = NULL;
2567  Label* if_false = NULL;
2568  Label* fall_through = NULL;
2569  context()->PrepareTest(&materialize_true, &materialize_false,
2570  &if_true, &if_false, &fall_through);
2571 
2572  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2573  __ And(t0, v0, Operand(kSmiTagMask));
2574  Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
2575 
2576  context()->Plug(if_true, if_false);
2577 }
2578 
2579 
2580 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2581  ZoneList<Expression*>* args = expr->arguments();
2582  ASSERT(args->length() == 1);
2583 
2584  VisitForAccumulatorValue(args->at(0));
2585 
2586  Label materialize_true, materialize_false;
2587  Label* if_true = NULL;
2588  Label* if_false = NULL;
2589  Label* fall_through = NULL;
2590  context()->PrepareTest(&materialize_true, &materialize_false,
2591  &if_true, &if_false, &fall_through);
2592 
2593  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2594  __ And(at, v0, Operand(kSmiTagMask | 0x80000000));
2595  Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
2596 
2597  context()->Plug(if_true, if_false);
2598 }
2599 
2600 
2601 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2602  ZoneList<Expression*>* args = expr->arguments();
2603  ASSERT(args->length() == 1);
2604 
2605  VisitForAccumulatorValue(args->at(0));
2606 
2607  Label materialize_true, materialize_false;
2608  Label* if_true = NULL;
2609  Label* if_false = NULL;
2610  Label* fall_through = NULL;
2611  context()->PrepareTest(&materialize_true, &materialize_false,
2612  &if_true, &if_false, &fall_through);
2613 
2614  __ JumpIfSmi(v0, if_false);
2615  __ LoadRoot(at, Heap::kNullValueRootIndex);
2616  __ Branch(if_true, eq, v0, Operand(at));
2618  // Undetectable objects behave like undefined when tested with typeof.
2619  __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
2620  __ And(at, a1, Operand(1 << Map::kIsUndetectable));
2621  __ Branch(if_false, ne, at, Operand(zero_reg));
2623  __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2624  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2625  Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
2626  if_true, if_false, fall_through);
2627 
2628  context()->Plug(if_true, if_false);
2629 }
2630 
2631 
2632 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2633  ZoneList<Expression*>* args = expr->arguments();
2634  ASSERT(args->length() == 1);
2635 
2636  VisitForAccumulatorValue(args->at(0));
2637 
2638  Label materialize_true, materialize_false;
2639  Label* if_true = NULL;
2640  Label* if_false = NULL;
2641  Label* fall_through = NULL;
2642  context()->PrepareTest(&materialize_true, &materialize_false,
2643  &if_true, &if_false, &fall_through);
2644 
2645  __ JumpIfSmi(v0, if_false);
2646  __ GetObjectType(v0, a1, a1);
2647  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2648  Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
2649  if_true, if_false, fall_through);
2650 
2651  context()->Plug(if_true, if_false);
2652 }
2653 
2654 
2655 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2656  ZoneList<Expression*>* args = expr->arguments();
2657  ASSERT(args->length() == 1);
2658 
2659  VisitForAccumulatorValue(args->at(0));
2660 
2661  Label materialize_true, materialize_false;
2662  Label* if_true = NULL;
2663  Label* if_false = NULL;
2664  Label* fall_through = NULL;
2665  context()->PrepareTest(&materialize_true, &materialize_false,
2666  &if_true, &if_false, &fall_through);
2667 
2668  __ JumpIfSmi(v0, if_false);
2670  __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
2671  __ And(at, a1, Operand(1 << Map::kIsUndetectable));
2672  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2673  Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
2674 
2675  context()->Plug(if_true, if_false);
2676 }
2677 
2678 
2679 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2680  CallRuntime* expr) {
2681  ZoneList<Expression*>* args = expr->arguments();
2682  ASSERT(args->length() == 1);
2683 
2684  VisitForAccumulatorValue(args->at(0));
2685 
2686  Label materialize_true, materialize_false;
2687  Label* if_true = NULL;
2688  Label* if_false = NULL;
2689  Label* fall_through = NULL;
2690  context()->PrepareTest(&materialize_true, &materialize_false,
2691  &if_true, &if_false, &fall_through);
2692 
2693  __ AssertNotSmi(v0);
2694 
2697  __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf);
2698  __ Branch(if_true, ne, t0, Operand(zero_reg));
2699 
2700  // Check for fast case object. Generate false result for slow case object.
2703  __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
2704  __ Branch(if_false, eq, a2, Operand(t0));
2705 
2706  // Look for valueOf symbol in the descriptor array, and indicate false if
2707  // found. Since we omit an enumeration index check, if it is added via a
2708  // transition that shares its descriptor array, this is a false positive.
2709  Label entry, loop, done;
2710 
2711  // Skip loop if no descriptors are valid.
2712  __ NumberOfOwnDescriptors(a3, a1);
2713  __ Branch(&done, eq, a3, Operand(zero_reg));
2714 
2715  __ LoadInstanceDescriptors(a1, t0);
2716  // t0: descriptor array.
2717  // a3: valid entries in the descriptor array.
2718  STATIC_ASSERT(kSmiTag == 0);
2719  STATIC_ASSERT(kSmiTagSize == 1);
2720  STATIC_ASSERT(kPointerSize == 4);
2721  __ li(at, Operand(DescriptorArray::kDescriptorSize));
2722  __ Mul(a3, a3, at);
2723  // Calculate location of the first key name.
2724  __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
2725  // Calculate the end of the descriptor array.
2726  __ mov(a2, t0);
2727  __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize);
2728  __ Addu(a2, a2, t1);
2729 
2730  // Loop through all the keys in the descriptor array. If one of these is the
2731  // symbol valueOf the result is false.
2732  // The use of t2 to store the valueOf symbol asumes that it is not otherwise
2733  // used in the loop below.
2734  __ LoadRoot(t2, Heap::kvalue_of_symbolRootIndex);
2735  __ jmp(&entry);
2736  __ bind(&loop);
2737  __ lw(a3, MemOperand(t0, 0));
2738  __ Branch(if_false, eq, a3, Operand(t2));
2739  __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
2740  __ bind(&entry);
2741  __ Branch(&loop, ne, t0, Operand(a2));
2742 
2743  __ bind(&done);
2744  // If a valueOf property is not found on the object check that its
2745  // prototype is the un-modified String prototype. If not result is false.
2747  __ JumpIfSmi(a2, if_false);
2752  __ Branch(if_false, ne, a2, Operand(a3));
2753 
2754  // Set the bit in the map to indicate that it has been checked safe for
2755  // default valueOf and set true result.
2757  __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2759  __ jmp(if_true);
2760 
2761  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2762  context()->Plug(if_true, if_false);
2763 }
2764 
2765 
2766 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2767  ZoneList<Expression*>* args = expr->arguments();
2768  ASSERT(args->length() == 1);
2769 
2770  VisitForAccumulatorValue(args->at(0));
2771 
2772  Label materialize_true, materialize_false;
2773  Label* if_true = NULL;
2774  Label* if_false = NULL;
2775  Label* fall_through = NULL;
2776  context()->PrepareTest(&materialize_true, &materialize_false,
2777  &if_true, &if_false, &fall_through);
2778 
2779  __ JumpIfSmi(v0, if_false);
2780  __ GetObjectType(v0, a1, a2);
2781  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2782  __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
2783  __ Branch(if_false);
2784 
2785  context()->Plug(if_true, if_false);
2786 }
2787 
2788 
2789 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2790  ZoneList<Expression*>* args = expr->arguments();
2791  ASSERT(args->length() == 1);
2792 
2793  VisitForAccumulatorValue(args->at(0));
2794 
2795  Label materialize_true, materialize_false;
2796  Label* if_true = NULL;
2797  Label* if_false = NULL;
2798  Label* fall_through = NULL;
2799  context()->PrepareTest(&materialize_true, &materialize_false,
2800  &if_true, &if_false, &fall_through);
2801 
2802  __ JumpIfSmi(v0, if_false);
2803  __ GetObjectType(v0, a1, a1);
2804  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2805  Split(eq, a1, Operand(JS_ARRAY_TYPE),
2806  if_true, if_false, fall_through);
2807 
2808  context()->Plug(if_true, if_false);
2809 }
2810 
2811 
2812 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2813  ZoneList<Expression*>* args = expr->arguments();
2814  ASSERT(args->length() == 1);
2815 
2816  VisitForAccumulatorValue(args->at(0));
2817 
2818  Label materialize_true, materialize_false;
2819  Label* if_true = NULL;
2820  Label* if_false = NULL;
2821  Label* fall_through = NULL;
2822  context()->PrepareTest(&materialize_true, &materialize_false,
2823  &if_true, &if_false, &fall_through);
2824 
2825  __ JumpIfSmi(v0, if_false);
2826  __ GetObjectType(v0, a1, a1);
2827  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2828  Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
2829 
2830  context()->Plug(if_true, if_false);
2831 }
2832 
2833 
2834 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2835  ASSERT(expr->arguments()->length() == 0);
2836 
2837  Label materialize_true, materialize_false;
2838  Label* if_true = NULL;
2839  Label* if_false = NULL;
2840  Label* fall_through = NULL;
2841  context()->PrepareTest(&materialize_true, &materialize_false,
2842  &if_true, &if_false, &fall_through);
2843 
2844  // Get the frame pointer for the calling frame.
2846 
2847  // Skip the arguments adaptor frame if it exists.
2848  Label check_frame_marker;
2850  __ Branch(&check_frame_marker, ne,
2853 
2854  // Check the marker in the calling frame.
2855  __ bind(&check_frame_marker);
2857  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2858  Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
2859  if_true, if_false, fall_through);
2860 
2861  context()->Plug(if_true, if_false);
2862 }
2863 
2864 
2865 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2866  ZoneList<Expression*>* args = expr->arguments();
2867  ASSERT(args->length() == 2);
2868 
2869  // Load the two objects into registers and perform the comparison.
2870  VisitForStackValue(args->at(0));
2871  VisitForAccumulatorValue(args->at(1));
2872 
2873  Label materialize_true, materialize_false;
2874  Label* if_true = NULL;
2875  Label* if_false = NULL;
2876  Label* fall_through = NULL;
2877  context()->PrepareTest(&materialize_true, &materialize_false,
2878  &if_true, &if_false, &fall_through);
2879 
2880  __ pop(a1);
2881  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2882  Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
2883 
2884  context()->Plug(if_true, if_false);
2885 }
2886 
2887 
2888 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2889  ZoneList<Expression*>* args = expr->arguments();
2890  ASSERT(args->length() == 1);
2891 
2892  // ArgumentsAccessStub expects the key in a1 and the formal
2893  // parameter count in a0.
2894  VisitForAccumulatorValue(args->at(0));
2895  __ mov(a1, v0);
2896  __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2897  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2898  __ CallStub(&stub);
2899  context()->Plug(v0);
2900 }
2901 
2902 
2903 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2904  ASSERT(expr->arguments()->length() == 0);
2905  Label exit;
2906  // Get the number of formal parameters.
2907  __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2908 
2909  // Check if the calling frame is an arguments adaptor frame.
2912  __ Branch(&exit, ne, a3,
2914 
2915  // Arguments adaptor case: Read the arguments length from the
2916  // adaptor frame.
2918 
2919  __ bind(&exit);
2920  context()->Plug(v0);
2921 }
2922 
2923 
2924 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2925  ZoneList<Expression*>* args = expr->arguments();
2926  ASSERT(args->length() == 1);
2927  Label done, null, function, non_function_constructor;
2928 
2929  VisitForAccumulatorValue(args->at(0));
2930 
2931  // If the object is a smi, we return null.
2932  __ JumpIfSmi(v0, &null);
2933 
2934  // Check that the object is a JS object but take special care of JS
2935  // functions to make sure they have 'Function' as their class.
2936  // Assume that there are only two callable types, and one of them is at
2937  // either end of the type range for JS object types. Saves extra comparisons.
2939  __ GetObjectType(v0, v0, a1); // Map is now in v0.
2940  __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
2941 
2944  __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
2945 
2947  LAST_SPEC_OBJECT_TYPE - 1);
2948  __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
2949  // Assume that there is no larger type.
2951 
2952  // Check if the constructor in the map is a JS function.
2954  __ GetObjectType(v0, a1, a1);
2955  __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
2956 
2957  // v0 now contains the constructor function. Grab the
2958  // instance class name from there.
2961  __ Branch(&done);
2962 
2963  // Functions have class 'Function'.
2964  __ bind(&function);
2965  __ LoadRoot(v0, Heap::kfunction_class_symbolRootIndex);
2966  __ jmp(&done);
2967 
2968  // Objects with a non-function constructor have class 'Object'.
2969  __ bind(&non_function_constructor);
2970  __ LoadRoot(v0, Heap::kObject_symbolRootIndex);
2971  __ jmp(&done);
2972 
2973  // Non-JS objects have class null.
2974  __ bind(&null);
2975  __ LoadRoot(v0, Heap::kNullValueRootIndex);
2976 
2977  // All done.
2978  __ bind(&done);
2979 
2980  context()->Plug(v0);
2981 }
2982 
2983 
2984 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2985  // Conditionally generate a log call.
2986  // Args:
2987  // 0 (literal string): The type of logging (corresponds to the flags).
2988  // This is used to determine whether or not to generate the log call.
2989  // 1 (string): Format string. Access the string at argument index 2
2990  // with '%2s' (see Logger::LogRuntime for all the formats).
2991  // 2 (array): Arguments to the format string.
2992  ZoneList<Expression*>* args = expr->arguments();
2993  ASSERT_EQ(args->length(), 3);
2994  if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2995  VisitForStackValue(args->at(1));
2996  VisitForStackValue(args->at(2));
2997  __ CallRuntime(Runtime::kLog, 2);
2998  }
2999 
3000  // Finally, we're expected to leave a value on the top of the stack.
3001  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3002  context()->Plug(v0);
3003 }
3004 
3005 
3006 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
3007  ASSERT(expr->arguments()->length() == 0);
3008  Label slow_allocate_heapnumber;
3009  Label heapnumber_allocated;
3010 
3011  // Save the new heap number in callee-saved register s0, since
3012  // we call out to external C code below.
3013  __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3014  __ AllocateHeapNumber(s0, a1, a2, t6, &slow_allocate_heapnumber);
3015  __ jmp(&heapnumber_allocated);
3016 
3017  __ bind(&slow_allocate_heapnumber);
3018 
3019  // Allocate a heap number.
3020  __ CallRuntime(Runtime::kNumberAlloc, 0);
3021  __ mov(s0, v0); // Save result in s0, so it is saved thru CFunc call.
3022 
3023  __ bind(&heapnumber_allocated);
3024 
3025  // Convert 32 random bits in v0 to 0.(32 random bits) in a double
3026  // by computing:
3027  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
3029  __ PrepareCallCFunction(1, a0);
3032  __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3033 
3034  CpuFeatures::Scope scope(FPU);
3035  // 0x41300000 is the top half of 1.0 x 2^20 as a double.
3036  __ li(a1, Operand(0x41300000));
3037  // Move 0x41300000xxxxxxxx (x = random bits in v0) to FPU.
3038  __ Move(f12, v0, a1);
3039  // Move 0x4130000000000000 to FPU.
3040  __ Move(f14, zero_reg, a1);
3041  // Subtract and store the result in the heap number.
3042  __ sub_d(f0, f12, f14);
3044  __ mov(v0, s0);
3045  } else {
3046  __ PrepareCallCFunction(2, a0);
3047  __ mov(a0, s0);
3050  __ CallCFunction(
3051  ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
3052  }
3053 
3054  context()->Plug(v0);
3055 }
3056 
3057 
3058 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3059  // Load the arguments on the stack and call the stub.
3060  SubStringStub stub;
3061  ZoneList<Expression*>* args = expr->arguments();
3062  ASSERT(args->length() == 3);
3063  VisitForStackValue(args->at(0));
3064  VisitForStackValue(args->at(1));
3065  VisitForStackValue(args->at(2));
3066  __ CallStub(&stub);
3067  context()->Plug(v0);
3068 }
3069 
3070 
3071 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3072  // Load the arguments on the stack and call the stub.
3073  RegExpExecStub stub;
3074  ZoneList<Expression*>* args = expr->arguments();
3075  ASSERT(args->length() == 4);
3076  VisitForStackValue(args->at(0));
3077  VisitForStackValue(args->at(1));
3078  VisitForStackValue(args->at(2));
3079  VisitForStackValue(args->at(3));
3080  __ CallStub(&stub);
3081  context()->Plug(v0);
3082 }
3083 
3084 
3085 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3086  ZoneList<Expression*>* args = expr->arguments();
3087  ASSERT(args->length() == 1);
3088 
3089  VisitForAccumulatorValue(args->at(0)); // Load the object.
3090 
3091  Label done;
3092  // If the object is a smi return the object.
3093  __ JumpIfSmi(v0, &done);
3094  // If the object is not a value type, return the object.
3095  __ GetObjectType(v0, a1, a1);
3096  __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3097 
3099 
3100  __ bind(&done);
3101  context()->Plug(v0);
3102 }
3103 
3104 
3105 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3106  ZoneList<Expression*>* args = expr->arguments();
3107  ASSERT(args->length() == 2);
3108  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3109  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3110 
3111  VisitForAccumulatorValue(args->at(0)); // Load the object.
3112 
3113  Label runtime, done, not_date_object;
3114  Register object = v0;
3115  Register result = v0;
3116  Register scratch0 = t5;
3117  Register scratch1 = a1;
3118 
3119  __ JumpIfSmi(object, &not_date_object);
3120  __ GetObjectType(object, scratch1, scratch1);
3121  __ Branch(&not_date_object, ne, scratch1, Operand(JS_DATE_TYPE));
3122 
3123  if (index->value() == 0) {
3124  __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
3125  __ jmp(&done);
3126  } else {
3127  if (index->value() < JSDate::kFirstUncachedField) {
3128  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3129  __ li(scratch1, Operand(stamp));
3130  __ lw(scratch1, MemOperand(scratch1));
3131  __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3132  __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3133  __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
3134  kPointerSize * index->value()));
3135  __ jmp(&done);
3136  }
3137  __ bind(&runtime);
3138  __ PrepareCallCFunction(2, scratch1);
3139  __ li(a1, Operand(index));
3140  __ Move(a0, object);
3141  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3142  __ jmp(&done);
3143  }
3144 
3145  __ bind(&not_date_object);
3146  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3147  __ bind(&done);
3148  context()->Plug(v0);
3149 }
3150 
3151 
3152 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3153  // Load the arguments on the stack and call the runtime function.
3154  ZoneList<Expression*>* args = expr->arguments();
3155  ASSERT(args->length() == 2);
3156  VisitForStackValue(args->at(0));
3157  VisitForStackValue(args->at(1));
3159  MathPowStub stub(MathPowStub::ON_STACK);
3160  __ CallStub(&stub);
3161  } else {
3162  __ CallRuntime(Runtime::kMath_pow, 2);
3163  }
3164  context()->Plug(v0);
3165 }
3166 
3167 
3168 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3169  ZoneList<Expression*>* args = expr->arguments();
3170  ASSERT(args->length() == 2);
3171 
3172  VisitForStackValue(args->at(0)); // Load the object.
3173  VisitForAccumulatorValue(args->at(1)); // Load the value.
3174  __ pop(a1); // v0 = value. a1 = object.
3175 
3176  Label done;
3177  // If the object is a smi, return the value.
3178  __ JumpIfSmi(a1, &done);
3179 
3180  // If the object is not a value type, return the value.
3181  __ GetObjectType(a1, a2, a2);
3182  __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3183 
3184  // Store the value.
3186  // Update the write barrier. Save the value as it will be
3187  // overwritten by the write barrier code and is needed afterward.
3188  __ mov(a2, v0);
3189  __ RecordWriteField(
3191 
3192  __ bind(&done);
3193  context()->Plug(v0);
3194 }
3195 
3196 
3197 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3198  ZoneList<Expression*>* args = expr->arguments();
3199  ASSERT_EQ(args->length(), 1);
3200 
3201  // Load the argument on the stack and call the stub.
3202  VisitForStackValue(args->at(0));
3203 
3204  NumberToStringStub stub;
3205  __ CallStub(&stub);
3206  context()->Plug(v0);
3207 }
3208 
3209 
3210 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3211  ZoneList<Expression*>* args = expr->arguments();
3212  ASSERT(args->length() == 1);
3213 
3214  VisitForAccumulatorValue(args->at(0));
3215 
3216  Label done;
3217  StringCharFromCodeGenerator generator(v0, a1);
3218  generator.GenerateFast(masm_);
3219  __ jmp(&done);
3220 
3221  NopRuntimeCallHelper call_helper;
3222  generator.GenerateSlow(masm_, call_helper);
3223 
3224  __ bind(&done);
3225  context()->Plug(a1);
3226 }
3227 
3228 
3229 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3230  ZoneList<Expression*>* args = expr->arguments();
3231  ASSERT(args->length() == 2);
3232 
3233  VisitForStackValue(args->at(0));
3234  VisitForAccumulatorValue(args->at(1));
3235  __ mov(a0, result_register());
3236 
3237  Register object = a1;
3238  Register index = a0;
3239  Register result = v0;
3240 
3241  __ pop(object);
3242 
3243  Label need_conversion;
3244  Label index_out_of_range;
3245  Label done;
3246  StringCharCodeAtGenerator generator(object,
3247  index,
3248  result,
3249  &need_conversion,
3250  &need_conversion,
3251  &index_out_of_range,
3253  generator.GenerateFast(masm_);
3254  __ jmp(&done);
3255 
3256  __ bind(&index_out_of_range);
3257  // When the index is out of range, the spec requires us to return
3258  // NaN.
3259  __ LoadRoot(result, Heap::kNanValueRootIndex);
3260  __ jmp(&done);
3261 
3262  __ bind(&need_conversion);
3263  // Load the undefined value into the result register, which will
3264  // trigger conversion.
3265  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3266  __ jmp(&done);
3267 
3268  NopRuntimeCallHelper call_helper;
3269  generator.GenerateSlow(masm_, call_helper);
3270 
3271  __ bind(&done);
3272  context()->Plug(result);
3273 }
3274 
3275 
3276 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3277  ZoneList<Expression*>* args = expr->arguments();
3278  ASSERT(args->length() == 2);
3279 
3280  VisitForStackValue(args->at(0));
3281  VisitForAccumulatorValue(args->at(1));
3282  __ mov(a0, result_register());
3283 
3284  Register object = a1;
3285  Register index = a0;
3286  Register scratch = a3;
3287  Register result = v0;
3288 
3289  __ pop(object);
3290 
3291  Label need_conversion;
3292  Label index_out_of_range;
3293  Label done;
3294  StringCharAtGenerator generator(object,
3295  index,
3296  scratch,
3297  result,
3298  &need_conversion,
3299  &need_conversion,
3300  &index_out_of_range,
3302  generator.GenerateFast(masm_);
3303  __ jmp(&done);
3304 
3305  __ bind(&index_out_of_range);
3306  // When the index is out of range, the spec requires us to return
3307  // the empty string.
3308  __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3309  __ jmp(&done);
3310 
3311  __ bind(&need_conversion);
3312  // Move smi zero into the result register, which will trigger
3313  // conversion.
3314  __ li(result, Operand(Smi::FromInt(0)));
3315  __ jmp(&done);
3316 
3317  NopRuntimeCallHelper call_helper;
3318  generator.GenerateSlow(masm_, call_helper);
3319 
3320  __ bind(&done);
3321  context()->Plug(result);
3322 }
3323 
3324 
3325 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3326  ZoneList<Expression*>* args = expr->arguments();
3327  ASSERT_EQ(2, args->length());
3328  VisitForStackValue(args->at(0));
3329  VisitForStackValue(args->at(1));
3330 
3331  StringAddStub stub(NO_STRING_ADD_FLAGS);
3332  __ CallStub(&stub);
3333  context()->Plug(v0);
3334 }
3335 
3336 
3337 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3338  ZoneList<Expression*>* args = expr->arguments();
3339  ASSERT_EQ(2, args->length());
3340 
3341  VisitForStackValue(args->at(0));
3342  VisitForStackValue(args->at(1));
3343 
3344  StringCompareStub stub;
3345  __ CallStub(&stub);
3346  context()->Plug(v0);
3347 }
3348 
3349 
3350 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3351  // Load the argument on the stack and call the stub.
3352  TranscendentalCacheStub stub(TranscendentalCache::SIN,
3354  ZoneList<Expression*>* args = expr->arguments();
3355  ASSERT(args->length() == 1);
3356  VisitForStackValue(args->at(0));
3357  __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
3358  __ CallStub(&stub);
3359  context()->Plug(v0);
3360 }
3361 
3362 
3363 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3364  // Load the argument on the stack and call the stub.
3365  TranscendentalCacheStub stub(TranscendentalCache::COS,
3367  ZoneList<Expression*>* args = expr->arguments();
3368  ASSERT(args->length() == 1);
3369  VisitForStackValue(args->at(0));
3370  __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
3371  __ CallStub(&stub);
3372  context()->Plug(v0);
3373 }
3374 
3375 
3376 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3377  // Load the argument on the stack and call the stub.
3378  TranscendentalCacheStub stub(TranscendentalCache::TAN,
3380  ZoneList<Expression*>* args = expr->arguments();
3381  ASSERT(args->length() == 1);
3382  VisitForStackValue(args->at(0));
3383  __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
3384  __ CallStub(&stub);
3385  context()->Plug(v0);
3386 }
3387 
3388 
3389 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3390  // Load the argument on the stack and call the stub.
3391  TranscendentalCacheStub stub(TranscendentalCache::LOG,
3393  ZoneList<Expression*>* args = expr->arguments();
3394  ASSERT(args->length() == 1);
3395  VisitForStackValue(args->at(0));
3396  __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
3397  __ CallStub(&stub);
3398  context()->Plug(v0);
3399 }
3400 
3401 
3402 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3403  // Load the argument on the stack and call the runtime function.
3404  ZoneList<Expression*>* args = expr->arguments();
3405  ASSERT(args->length() == 1);
3406  VisitForStackValue(args->at(0));
3407  __ CallRuntime(Runtime::kMath_sqrt, 1);
3408  context()->Plug(v0);
3409 }
3410 
3411 
3412 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3413  ZoneList<Expression*>* args = expr->arguments();
3414  ASSERT(args->length() >= 2);
3415 
3416  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3417  for (int i = 0; i < arg_count + 1; i++) {
3418  VisitForStackValue(args->at(i));
3419  }
3420  VisitForAccumulatorValue(args->last()); // Function.
3421 
3422  Label runtime, done;
3423  // Check for non-function argument (including proxy).
3424  __ JumpIfSmi(v0, &runtime);
3425  __ GetObjectType(v0, a1, a1);
3426  __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
3427 
3428  // InvokeFunction requires the function in a1. Move it in there.
3429  __ mov(a1, result_register());
3430  ParameterCount count(arg_count);
3431  __ InvokeFunction(a1, count, CALL_FUNCTION,
3432  NullCallWrapper(), CALL_AS_METHOD);
3434  __ jmp(&done);
3435 
3436  __ bind(&runtime);
3437  __ push(v0);
3438  __ CallRuntime(Runtime::kCall, args->length());
3439  __ bind(&done);
3440 
3441  context()->Plug(v0);
3442 }
3443 
3444 
3445 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3446  RegExpConstructResultStub stub;
3447  ZoneList<Expression*>* args = expr->arguments();
3448  ASSERT(args->length() == 3);
3449  VisitForStackValue(args->at(0));
3450  VisitForStackValue(args->at(1));
3451  VisitForStackValue(args->at(2));
3452  __ CallStub(&stub);
3453  context()->Plug(v0);
3454 }
3455 
3456 
3457 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3458  ZoneList<Expression*>* args = expr->arguments();
3459  ASSERT_EQ(2, args->length());
3460 
3461  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3462  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3463 
3464  Handle<FixedArray> jsfunction_result_caches(
3465  isolate()->native_context()->jsfunction_result_caches());
3466  if (jsfunction_result_caches->length() <= cache_id) {
3467  __ Abort("Attempt to use undefined cache.");
3468  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3469  context()->Plug(v0);
3470  return;
3471  }
3472 
3473  VisitForAccumulatorValue(args->at(1));
3474 
3475  Register key = v0;
3476  Register cache = a1;
3479  __ lw(cache,
3482  __ lw(cache,
3484 
3485 
3486  Label done, not_found;
3487  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3489  // a2 now holds finger offset as a smi.
3490  __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3491  // a3 now points to the start of fixed array elements.
3492  __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize);
3493  __ addu(a3, a3, at);
3494  // a3 now points to key of indexed element of cache.
3495  __ lw(a2, MemOperand(a3));
3496  __ Branch(&not_found, ne, key, Operand(a2));
3497 
3498  __ lw(v0, MemOperand(a3, kPointerSize));
3499  __ Branch(&done);
3500 
3501  __ bind(&not_found);
3502  // Call runtime to perform the lookup.
3503  __ Push(cache, key);
3504  __ CallRuntime(Runtime::kGetFromCache, 2);
3505 
3506  __ bind(&done);
3507  context()->Plug(v0);
3508 }
3509 
3510 
3511 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3512  ZoneList<Expression*>* args = expr->arguments();
3513  ASSERT_EQ(2, args->length());
3514 
3515  Register right = v0;
3516  Register left = a1;
3517  Register tmp = a2;
3518  Register tmp2 = a3;
3519 
3520  VisitForStackValue(args->at(0));
3521  VisitForAccumulatorValue(args->at(1)); // Result (right) in v0.
3522  __ pop(left);
3523 
3524  Label done, fail, ok;
3525  __ Branch(&ok, eq, left, Operand(right));
3526  // Fail if either is a non-HeapObject.
3527  __ And(tmp, left, Operand(right));
3528  __ JumpIfSmi(tmp, &fail);
3529  __ lw(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
3530  __ lbu(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
3531  __ Branch(&fail, ne, tmp2, Operand(JS_REGEXP_TYPE));
3532  __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3533  __ Branch(&fail, ne, tmp, Operand(tmp2));
3534  __ lw(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
3535  __ lw(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
3536  __ Branch(&ok, eq, tmp, Operand(tmp2));
3537  __ bind(&fail);
3538  __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3539  __ jmp(&done);
3540  __ bind(&ok);
3541  __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3542  __ bind(&done);
3543 
3544  context()->Plug(v0);
3545 }
3546 
3547 
3548 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3549  ZoneList<Expression*>* args = expr->arguments();
3550  VisitForAccumulatorValue(args->at(0));
3551 
3552  Label materialize_true, materialize_false;
3553  Label* if_true = NULL;
3554  Label* if_false = NULL;
3555  Label* fall_through = NULL;
3556  context()->PrepareTest(&materialize_true, &materialize_false,
3557  &if_true, &if_false, &fall_through);
3558 
3560  __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3561 
3562  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3563  Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3564 
3565  context()->Plug(if_true, if_false);
3566 }
3567 
3568 
3569 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3570  ZoneList<Expression*>* args = expr->arguments();
3571  ASSERT(args->length() == 1);
3572  VisitForAccumulatorValue(args->at(0));
3573 
3574  __ AssertString(v0);
3575 
3577  __ IndexFromHash(v0, v0);
3578 
3579  context()->Plug(v0);
3580 }
3581 
3582 
3583 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3584  Label bailout, done, one_char_separator, long_separator,
3585  non_trivial_array, not_size_one_array, loop,
3586  empty_separator_loop, one_char_separator_loop,
3587  one_char_separator_loop_entry, long_separator_loop;
3588  ZoneList<Expression*>* args = expr->arguments();
3589  ASSERT(args->length() == 2);
3590  VisitForStackValue(args->at(1));
3591  VisitForAccumulatorValue(args->at(0));
3592 
3593  // All aliases of the same register have disjoint lifetimes.
3594  Register array = v0;
3595  Register elements = no_reg; // Will be v0.
3596  Register result = no_reg; // Will be v0.
3597  Register separator = a1;
3598  Register array_length = a2;
3599  Register result_pos = no_reg; // Will be a2.
3600  Register string_length = a3;
3601  Register string = t0;
3602  Register element = t1;
3603  Register elements_end = t2;
3604  Register scratch1 = t3;
3605  Register scratch2 = t5;
3606  Register scratch3 = t4;
3607 
3608  // Separator operand is on the stack.
3609  __ pop(separator);
3610 
3611  // Check that the array is a JSArray.
3612  __ JumpIfSmi(array, &bailout);
3613  __ GetObjectType(array, scratch1, scratch2);
3614  __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
3615 
3616  // Check that the array has fast elements.
3617  __ CheckFastElements(scratch1, scratch2, &bailout);
3618 
3619  // If the array has length zero, return the empty string.
3620  __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3621  __ SmiUntag(array_length);
3622  __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
3623  __ LoadRoot(v0, Heap::kEmptyStringRootIndex);
3624  __ Branch(&done);
3625 
3626  __ bind(&non_trivial_array);
3627 
3628  // Get the FixedArray containing array's elements.
3629  elements = array;
3630  __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3631  array = no_reg; // End of array's live range.
3632 
3633  // Check that all array elements are sequential ASCII strings, and
3634  // accumulate the sum of their lengths, as a smi-encoded value.
3635  __ mov(string_length, zero_reg);
3636  __ Addu(element,
3637  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3638  __ sll(elements_end, array_length, kPointerSizeLog2);
3639  __ Addu(elements_end, element, elements_end);
3640  // Loop condition: while (element < elements_end).
3641  // Live values in registers:
3642  // elements: Fixed array of strings.
3643  // array_length: Length of the fixed array of strings (not smi)
3644  // separator: Separator string
3645  // string_length: Accumulated sum of string lengths (smi).
3646  // element: Current array element.
3647  // elements_end: Array end.
3648  if (generate_debug_code_) {
3649  __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin",
3650  array_length, Operand(zero_reg));
3651  }
3652  __ bind(&loop);
3653  __ lw(string, MemOperand(element));
3654  __ Addu(element, element, kPointerSize);
3655  __ JumpIfSmi(string, &bailout);
3656  __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3657  __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3658  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3659  __ lw(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
3660  __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
3661  __ BranchOnOverflow(&bailout, scratch3);
3662  __ Branch(&loop, lt, element, Operand(elements_end));
3663 
3664  // If array_length is 1, return elements[0], a string.
3665  __ Branch(&not_size_one_array, ne, array_length, Operand(1));
3666  __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3667  __ Branch(&done);
3668 
3669  __ bind(&not_size_one_array);
3670 
3671  // Live values in registers:
3672  // separator: Separator string
3673  // array_length: Length of the array.
3674  // string_length: Sum of string lengths (smi).
3675  // elements: FixedArray of strings.
3676 
3677  // Check that the separator is a flat ASCII string.
3678  __ JumpIfSmi(separator, &bailout);
3679  __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3680  __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3681  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3682 
3683  // Add (separator length times array_length) - separator length to the
3684  // string_length to get the length of the result string. array_length is not
3685  // smi but the other values are, so the result is a smi.
3686  __ lw(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3687  __ Subu(string_length, string_length, Operand(scratch1));
3688  __ Mult(array_length, scratch1);
3689  // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3690  // zero.
3691  __ mfhi(scratch2);
3692  __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
3693  __ mflo(scratch2);
3694  __ And(scratch3, scratch2, Operand(0x80000000));
3695  __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
3696  __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
3697  __ BranchOnOverflow(&bailout, scratch3);
3698  __ SmiUntag(string_length);
3699 
3700  // Get first element in the array to free up the elements register to be used
3701  // for the result.
3702  __ Addu(element,
3703  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3704  result = elements; // End of live range for elements.
3705  elements = no_reg;
3706  // Live values in registers:
3707  // element: First array element
3708  // separator: Separator string
3709  // string_length: Length of result string (not smi)
3710  // array_length: Length of the array.
3711  __ AllocateAsciiString(result,
3712  string_length,
3713  scratch1,
3714  scratch2,
3715  elements_end,
3716  &bailout);
3717  // Prepare for looping. Set up elements_end to end of the array. Set
3718  // result_pos to the position of the result where to write the first
3719  // character.
3720  __ sll(elements_end, array_length, kPointerSizeLog2);
3721  __ Addu(elements_end, element, elements_end);
3722  result_pos = array_length; // End of live range for array_length.
3723  array_length = no_reg;
3724  __ Addu(result_pos,
3725  result,
3727 
3728  // Check the length of the separator.
3729  __ lw(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3730  __ li(at, Operand(Smi::FromInt(1)));
3731  __ Branch(&one_char_separator, eq, scratch1, Operand(at));
3732  __ Branch(&long_separator, gt, scratch1, Operand(at));
3733 
3734  // Empty separator case.
3735  __ bind(&empty_separator_loop);
3736  // Live values in registers:
3737  // result_pos: the position to which we are currently copying characters.
3738  // element: Current array element.
3739  // elements_end: Array end.
3740 
3741  // Copy next array element to the result.
3742  __ lw(string, MemOperand(element));
3743  __ Addu(element, element, kPointerSize);
3744  __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
3745  __ SmiUntag(string_length);
3746  __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag);
3747  __ CopyBytes(string, result_pos, string_length, scratch1);
3748  // End while (element < elements_end).
3749  __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
3750  ASSERT(result.is(v0));
3751  __ Branch(&done);
3752 
3753  // One-character separator case.
3754  __ bind(&one_char_separator);
3755  // Replace separator with its ASCII character value.
3756  __ lbu(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
3757  // Jump into the loop after the code that copies the separator, so the first
3758  // element is not preceded by a separator.
3759  __ jmp(&one_char_separator_loop_entry);
3760 
3761  __ bind(&one_char_separator_loop);
3762  // Live values in registers:
3763  // result_pos: the position to which we are currently copying characters.
3764  // element: Current array element.
3765  // elements_end: Array end.
3766  // separator: Single separator ASCII char (in lower byte).
3767 
3768  // Copy the separator character to the result.
3769  __ sb(separator, MemOperand(result_pos));
3770  __ Addu(result_pos, result_pos, 1);
3771 
3772  // Copy next array element to the result.
3773  __ bind(&one_char_separator_loop_entry);
3774  __ lw(string, MemOperand(element));
3775  __ Addu(element, element, kPointerSize);
3776  __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
3777  __ SmiUntag(string_length);
3778  __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag);
3779  __ CopyBytes(string, result_pos, string_length, scratch1);
3780  // End while (element < elements_end).
3781  __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
3782  ASSERT(result.is(v0));
3783  __ Branch(&done);
3784 
3785  // Long separator case (separator is more than one character). Entry is at the
3786  // label long_separator below.
3787  __ bind(&long_separator_loop);
3788  // Live values in registers:
3789  // result_pos: the position to which we are currently copying characters.
3790  // element: Current array element.
3791  // elements_end: Array end.
3792  // separator: Separator string.
3793 
3794  // Copy the separator to the result.
3795  __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset));
3796  __ SmiUntag(string_length);
3797  __ Addu(string,
3798  separator,
3800  __ CopyBytes(string, result_pos, string_length, scratch1);
3801 
3802  __ bind(&long_separator);
3803  __ lw(string, MemOperand(element));
3804  __ Addu(element, element, kPointerSize);
3805  __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
3806  __ SmiUntag(string_length);
3807  __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag);
3808  __ CopyBytes(string, result_pos, string_length, scratch1);
3809  // End while (element < elements_end).
3810  __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
3811  ASSERT(result.is(v0));
3812  __ Branch(&done);
3813 
3814  __ bind(&bailout);
3815  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3816  __ bind(&done);
3817  context()->Plug(v0);
3818 }
3819 
3820 
3821 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3822  Handle<String> name = expr->name();
3823  if (name->length() > 0 && name->Get(0) == '_') {
3824  Comment cmnt(masm_, "[ InlineRuntimeCall");
3825  EmitInlineRuntimeCall(expr);
3826  return;
3827  }
3828 
3829  Comment cmnt(masm_, "[ CallRuntime");
3830  ZoneList<Expression*>* args = expr->arguments();
3831 
3832  if (expr->is_jsruntime()) {
3833  // Prepare for calling JS runtime function.
3834  __ lw(a0, GlobalObjectOperand());
3836  __ push(a0);
3837  }
3838 
3839  // Push the arguments ("left-to-right").
3840  int arg_count = args->length();
3841  for (int i = 0; i < arg_count; i++) {
3842  VisitForStackValue(args->at(i));
3843  }
3844 
3845  if (expr->is_jsruntime()) {
3846  // Call the JS runtime function.
3847  __ li(a2, Operand(expr->name()));
3848  RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3849  Handle<Code> ic =
3850  isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3851  CallIC(ic, mode, expr->CallRuntimeFeedbackId());
3852  // Restore context register.
3854  } else {
3855  // Call the C runtime function.
3856  __ CallRuntime(expr->function(), arg_count);
3857  }
3858  context()->Plug(v0);
3859 }
3860 
3861 
3862 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3863  switch (expr->op()) {
3864  case Token::DELETE: {
3865  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3866  Property* property = expr->expression()->AsProperty();
3867  VariableProxy* proxy = expr->expression()->AsVariableProxy();
3868 
3869  if (property != NULL) {
3870  VisitForStackValue(property->obj());
3871  VisitForStackValue(property->key());
3872  StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
3874  __ li(a1, Operand(Smi::FromInt(strict_mode_flag)));
3875  __ push(a1);
3876  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3877  context()->Plug(v0);
3878  } else if (proxy != NULL) {
3879  Variable* var = proxy->var();
3880  // Delete of an unqualified identifier is disallowed in strict mode
3881  // but "delete this" is allowed.
3882  ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
3883  if (var->IsUnallocated()) {
3884  __ lw(a2, GlobalObjectOperand());
3885  __ li(a1, Operand(var->name()));
3886  __ li(a0, Operand(Smi::FromInt(kNonStrictMode)));
3887  __ Push(a2, a1, a0);
3888  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3889  context()->Plug(v0);
3890  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3891  // Result of deleting non-global, non-dynamic variables is false.
3892  // The subexpression does not have side effects.
3893  context()->Plug(var->is_this());
3894  } else {
3895  // Non-global variable. Call the runtime to try to delete from the
3896  // context where the variable was introduced.
3897  __ push(context_register());
3898  __ li(a2, Operand(var->name()));
3899  __ push(a2);
3900  __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3901  context()->Plug(v0);
3902  }
3903  } else {
3904  // Result of deleting non-property, non-variable reference is true.
3905  // The subexpression may have side effects.
3906  VisitForEffect(expr->expression());
3907  context()->Plug(true);
3908  }
3909  break;
3910  }
3911 
3912  case Token::VOID: {
3913  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3914  VisitForEffect(expr->expression());
3915  context()->Plug(Heap::kUndefinedValueRootIndex);
3916  break;
3917  }
3918 
3919  case Token::NOT: {
3920  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3921  if (context()->IsEffect()) {
3922  // Unary NOT has no side effects so it's only necessary to visit the
3923  // subexpression. Match the optimizing compiler by not branching.
3924  VisitForEffect(expr->expression());
3925  } else if (context()->IsTest()) {
3926  const TestContext* test = TestContext::cast(context());
3927  // The labels are swapped for the recursive call.
3928  VisitForControl(expr->expression(),
3929  test->false_label(),
3930  test->true_label(),
3931  test->fall_through());
3932  context()->Plug(test->true_label(), test->false_label());
3933  } else {
3934  // We handle value contexts explicitly rather than simply visiting
3935  // for control and plugging the control flow into the context,
3936  // because we need to prepare a pair of extra administrative AST ids
3937  // for the optimizing compiler.
3938  ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3939  Label materialize_true, materialize_false, done;
3940  VisitForControl(expr->expression(),
3941  &materialize_false,
3942  &materialize_true,
3943  &materialize_true);
3944  __ bind(&materialize_true);
3945  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3946  __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3947  if (context()->IsStackValue()) __ push(v0);
3948  __ jmp(&done);
3949  __ bind(&materialize_false);
3950  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3951  __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3952  if (context()->IsStackValue()) __ push(v0);
3953  __ bind(&done);
3954  }
3955  break;
3956  }
3957 
3958  case Token::TYPEOF: {
3959  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3960  { StackValueContext context(this);
3961  VisitForTypeofValue(expr->expression());
3962  }
3963  __ CallRuntime(Runtime::kTypeof, 1);
3964  context()->Plug(v0);
3965  break;
3966  }
3967 
3968  case Token::ADD: {
3969  Comment cmt(masm_, "[ UnaryOperation (ADD)");
3970  VisitForAccumulatorValue(expr->expression());
3971  Label no_conversion;
3972  __ JumpIfSmi(result_register(), &no_conversion);
3973  __ mov(a0, result_register());
3974  ToNumberStub convert_stub;
3975  __ CallStub(&convert_stub);
3976  __ bind(&no_conversion);
3977  context()->Plug(result_register());
3978  break;
3979  }
3980 
3981  case Token::SUB:
3982  EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
3983  break;
3984 
3985  case Token::BIT_NOT:
3986  EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
3987  break;
3988 
3989  default:
3990  UNREACHABLE();
3991  }
3992 }
3993 
3994 
3995 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3996  const char* comment) {
3997  // TODO(svenpanne): Allowing format strings in Comment would be nice here...
3998  Comment cmt(masm_, comment);
3999  bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
4000  UnaryOverwriteMode overwrite =
4001  can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
4002  UnaryOpStub stub(expr->op(), overwrite);
4003  // GenericUnaryOpStub expects the argument to be in a0.
4004  VisitForAccumulatorValue(expr->expression());
4005  SetSourcePosition(expr->position());
4006  __ mov(a0, result_register());
4007  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
4008  expr->UnaryOperationFeedbackId());
4009  context()->Plug(v0);
4010 }
4011 
4012 
4013 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4014  Comment cmnt(masm_, "[ CountOperation");
4015  SetSourcePosition(expr->position());
4016 
4017  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
4018  // as the left-hand side.
4019  if (!expr->expression()->IsValidLeftHandSide()) {
4020  VisitForEffect(expr->expression());
4021  return;
4022  }
4023 
4024  // Expression can only be a property, a global or a (parameter or local)
4025  // slot.
4026  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4027  LhsKind assign_type = VARIABLE;
4028  Property* prop = expr->expression()->AsProperty();
4029  // In case of a property we use the uninitialized expression context
4030  // of the key to detect a named property.
4031  if (prop != NULL) {
4032  assign_type =
4033  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4034  }
4035 
4036  // Evaluate expression and get value.
4037  if (assign_type == VARIABLE) {
4038  ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4039  AccumulatorValueContext context(this);
4040  EmitVariableLoad(expr->expression()->AsVariableProxy());
4041  } else {
4042  // Reserve space for result of postfix operation.
4043  if (expr->is_postfix() && !context()->IsEffect()) {
4044  __ li(at, Operand(Smi::FromInt(0)));
4045  __ push(at);
4046  }
4047  if (assign_type == NAMED_PROPERTY) {
4048  // Put the object both on the stack and in the accumulator.
4049  VisitForAccumulatorValue(prop->obj());
4050  __ push(v0);
4051  EmitNamedPropertyLoad(prop);
4052  } else {
4053  VisitForStackValue(prop->obj());
4054  VisitForAccumulatorValue(prop->key());
4055  __ lw(a1, MemOperand(sp, 0));
4056  __ push(v0);
4057  EmitKeyedPropertyLoad(prop);
4058  }
4059  }
4060 
4061  // We need a second deoptimization point after loading the value
4062  // in case evaluating the property load my have a side effect.
4063  if (assign_type == VARIABLE) {
4064  PrepareForBailout(expr->expression(), TOS_REG);
4065  } else {
4066  PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4067  }
4068 
4069  // Call ToNumber only if operand is not a smi.
4070  Label no_conversion;
4071  __ JumpIfSmi(v0, &no_conversion);
4072  __ mov(a0, v0);
4073  ToNumberStub convert_stub;
4074  __ CallStub(&convert_stub);
4075  __ bind(&no_conversion);
4076 
4077  // Save result for postfix expressions.
4078  if (expr->is_postfix()) {
4079  if (!context()->IsEffect()) {
4080  // Save the result on the stack. If we have a named or keyed property
4081  // we store the result under the receiver that is currently on top
4082  // of the stack.
4083  switch (assign_type) {
4084  case VARIABLE:
4085  __ push(v0);
4086  break;
4087  case NAMED_PROPERTY:
4088  __ sw(v0, MemOperand(sp, kPointerSize));
4089  break;
4090  case KEYED_PROPERTY:
4091  __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4092  break;
4093  }
4094  }
4095  }
4096  __ mov(a0, result_register());
4097 
4098  // Inline smi case if we are in a loop.
4099  Label stub_call, done;
4100  JumpPatchSite patch_site(masm_);
4101 
4102  int count_value = expr->op() == Token::INC ? 1 : -1;
4103  __ li(a1, Operand(Smi::FromInt(count_value)));
4104 
4105  if (ShouldInlineSmiCase(expr->op())) {
4106  __ AdduAndCheckForOverflow(v0, a0, a1, t0);
4107  __ BranchOnOverflow(&stub_call, t0); // Do stub on overflow.
4108 
4109  // We could eliminate this smi check if we split the code at
4110  // the first smi check before calling ToNumber.
4111  patch_site.EmitJumpIfSmi(v0, &done);
4112  __ bind(&stub_call);
4113  }
4114 
4115  // Record position before stub call.
4116  SetSourcePosition(expr->position());
4117 
4118  BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
4119  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
4120  patch_site.EmitPatchInfo();
4121  __ bind(&done);
4122 
4123  // Store the value returned in v0.
4124  switch (assign_type) {
4125  case VARIABLE:
4126  if (expr->is_postfix()) {
4127  { EffectContext context(this);
4128  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4129  Token::ASSIGN);
4130  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4131  context.Plug(v0);
4132  }
4133  // For all contexts except EffectConstant we have the result on
4134  // top of the stack.
4135  if (!context()->IsEffect()) {
4136  context()->PlugTOS();
4137  }
4138  } else {
4139  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4140  Token::ASSIGN);
4141  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4142  context()->Plug(v0);
4143  }
4144  break;
4145  case NAMED_PROPERTY: {
4146  __ mov(a0, result_register()); // Value.
4147  __ li(a2, Operand(prop->key()->AsLiteral()->handle())); // Name.
4148  __ pop(a1); // Receiver.
4149  Handle<Code> ic = is_classic_mode()
4150  ? isolate()->builtins()->StoreIC_Initialize()
4151  : isolate()->builtins()->StoreIC_Initialize_Strict();
4152  CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4153  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4154  if (expr->is_postfix()) {
4155  if (!context()->IsEffect()) {
4156  context()->PlugTOS();
4157  }
4158  } else {
4159  context()->Plug(v0);
4160  }
4161  break;
4162  }
4163  case KEYED_PROPERTY: {
4164  __ mov(a0, result_register()); // Value.
4165  __ pop(a1); // Key.
4166  __ pop(a2); // Receiver.
4167  Handle<Code> ic = is_classic_mode()
4168  ? isolate()->builtins()->KeyedStoreIC_Initialize()
4169  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4170  CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4171  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4172  if (expr->is_postfix()) {
4173  if (!context()->IsEffect()) {
4174  context()->PlugTOS();
4175  }
4176  } else {
4177  context()->Plug(v0);
4178  }
4179  break;
4180  }
4181  }
4182 }
4183 
4184 
4185 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4186  ASSERT(!context()->IsEffect());
4187  ASSERT(!context()->IsTest());
4188  VariableProxy* proxy = expr->AsVariableProxy();
4189  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4190  Comment cmnt(masm_, "Global variable");
4191  __ lw(a0, GlobalObjectOperand());
4192  __ li(a2, Operand(proxy->name()));
4193  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4194  // Use a regular load, not a contextual load, to avoid a reference
4195  // error.
4196  CallIC(ic);
4197  PrepareForBailout(expr, TOS_REG);
4198  context()->Plug(v0);
4199  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4200  Label done, slow;
4201 
4202  // Generate code for loading from variables potentially shadowed
4203  // by eval-introduced variables.
4204  EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4205 
4206  __ bind(&slow);
4207  __ li(a0, Operand(proxy->name()));
4208  __ Push(cp, a0);
4209  __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4210  PrepareForBailout(expr, TOS_REG);
4211  __ bind(&done);
4212 
4213  context()->Plug(v0);
4214  } else {
4215  // This expression cannot throw a reference error at the top level.
4216  VisitInDuplicateContext(expr);
4217  }
4218 }
4219 
4220 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4221  Expression* sub_expr,
4222  Handle<String> check) {
4223  Label materialize_true, materialize_false;
4224  Label* if_true = NULL;
4225  Label* if_false = NULL;
4226  Label* fall_through = NULL;
4227  context()->PrepareTest(&materialize_true, &materialize_false,
4228  &if_true, &if_false, &fall_through);
4229 
4230  { AccumulatorValueContext context(this);
4231  VisitForTypeofValue(sub_expr);
4232  }
4233  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4234 
4235  if (check->Equals(isolate()->heap()->number_symbol())) {
4236  __ JumpIfSmi(v0, if_true);
4238  __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4239  Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4240  } else if (check->Equals(isolate()->heap()->string_symbol())) {
4241  __ JumpIfSmi(v0, if_false);
4242  // Check for undetectable objects => false.
4243  __ GetObjectType(v0, v0, a1);
4244  __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
4245  __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4246  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4247  Split(eq, a1, Operand(zero_reg),
4248  if_true, if_false, fall_through);
4249  } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4250  __ LoadRoot(at, Heap::kTrueValueRootIndex);
4251  __ Branch(if_true, eq, v0, Operand(at));
4252  __ LoadRoot(at, Heap::kFalseValueRootIndex);
4253  Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4254  } else if (FLAG_harmony_typeof &&
4255  check->Equals(isolate()->heap()->null_symbol())) {
4256  __ LoadRoot(at, Heap::kNullValueRootIndex);
4257  Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4258  } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4259  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4260  __ Branch(if_true, eq, v0, Operand(at));
4261  __ JumpIfSmi(v0, if_false);
4262  // Check for undetectable objects => true.
4264  __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4265  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4266  Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4267  } else if (check->Equals(isolate()->heap()->function_symbol())) {
4268  __ JumpIfSmi(v0, if_false);
4270  __ GetObjectType(v0, v0, a1);
4271  __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
4272  Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
4273  if_true, if_false, fall_through);
4274  } else if (check->Equals(isolate()->heap()->object_symbol())) {
4275  __ JumpIfSmi(v0, if_false);
4276  if (!FLAG_harmony_typeof) {
4277  __ LoadRoot(at, Heap::kNullValueRootIndex);
4278  __ Branch(if_true, eq, v0, Operand(at));
4279  }
4280  // Check for JS objects => true.
4281  __ GetObjectType(v0, v0, a1);
4282  __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
4284  __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
4285  // Check for undetectable objects => false.
4286  __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4287  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4288  Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4289  } else {
4290  if (if_false != fall_through) __ jmp(if_false);
4291  }
4292  context()->Plug(if_true, if_false);
4293 }
4294 
4295 
4296 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4297  Comment cmnt(masm_, "[ CompareOperation");
4298  SetSourcePosition(expr->position());
4299 
4300  // First we try a fast inlined version of the compare when one of
4301  // the operands is a literal.
4302  if (TryLiteralCompare(expr)) return;
4303 
4304  // Always perform the comparison for its control flow. Pack the result
4305  // into the expression's context after the comparison is performed.
4306  Label materialize_true, materialize_false;
4307  Label* if_true = NULL;
4308  Label* if_false = NULL;
4309  Label* fall_through = NULL;
4310  context()->PrepareTest(&materialize_true, &materialize_false,
4311  &if_true, &if_false, &fall_through);
4312 
4313  Token::Value op = expr->op();
4314  VisitForStackValue(expr->left());
4315  switch (op) {
4316  case Token::IN:
4317  VisitForStackValue(expr->right());
4318  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4319  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4320  __ LoadRoot(t0, Heap::kTrueValueRootIndex);
4321  Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
4322  break;
4323 
4324  case Token::INSTANCEOF: {
4325  VisitForStackValue(expr->right());
4326  InstanceofStub stub(InstanceofStub::kNoFlags);
4327  __ CallStub(&stub);
4328  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4329  // The stub returns 0 for true.
4330  Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
4331  break;
4332  }
4333 
4334  default: {
4335  VisitForAccumulatorValue(expr->right());
4336  Condition cc = eq;
4337  switch (op) {
4338  case Token::EQ_STRICT:
4339  case Token::EQ:
4340  cc = eq;
4341  break;
4342  case Token::LT:
4343  cc = lt;
4344  break;
4345  case Token::GT:
4346  cc = gt;
4347  break;
4348  case Token::LTE:
4349  cc = le;
4350  break;
4351  case Token::GTE:
4352  cc = ge;
4353  break;
4354  case Token::IN:
4355  case Token::INSTANCEOF:
4356  default:
4357  UNREACHABLE();
4358  }
4359  __ mov(a0, result_register());
4360  __ pop(a1);
4361 
4362  bool inline_smi_code = ShouldInlineSmiCase(op);
4363  JumpPatchSite patch_site(masm_);
4364  if (inline_smi_code) {
4365  Label slow_case;
4366  __ Or(a2, a0, Operand(a1));
4367  patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4368  Split(cc, a1, Operand(a0), if_true, if_false, NULL);
4369  __ bind(&slow_case);
4370  }
4371  // Record position and call the compare IC.
4372  SetSourcePosition(expr->position());
4373  Handle<Code> ic = CompareIC::GetUninitialized(op);
4374  CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4375  patch_site.EmitPatchInfo();
4376  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4377  Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4378  }
4379  }
4380 
4381  // Convert the result of the comparison into one expected for this
4382  // expression's context.
4383  context()->Plug(if_true, if_false);
4384 }
4385 
4386 
4387 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4388  Expression* sub_expr,
4389  NilValue nil) {
4390  Label materialize_true, materialize_false;
4391  Label* if_true = NULL;
4392  Label* if_false = NULL;
4393  Label* fall_through = NULL;
4394  context()->PrepareTest(&materialize_true, &materialize_false,
4395  &if_true, &if_false, &fall_through);
4396 
4397  VisitForAccumulatorValue(sub_expr);
4398  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4399  Heap::RootListIndex nil_value = nil == kNullValue ?
4400  Heap::kNullValueRootIndex :
4401  Heap::kUndefinedValueRootIndex;
4402  __ mov(a0, result_register());
4403  __ LoadRoot(a1, nil_value);
4404  if (expr->op() == Token::EQ_STRICT) {
4405  Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
4406  } else {
4407  Heap::RootListIndex other_nil_value = nil == kNullValue ?
4408  Heap::kUndefinedValueRootIndex :
4409  Heap::kNullValueRootIndex;
4410  __ Branch(if_true, eq, a0, Operand(a1));
4411  __ LoadRoot(a1, other_nil_value);
4412  __ Branch(if_true, eq, a0, Operand(a1));
4413  __ JumpIfSmi(a0, if_false);
4414  // It can be an undetectable object.
4416  __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
4417  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4418  Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4419  }
4420  context()->Plug(if_true, if_false);
4421 }
4422 
4423 
4424 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4426  context()->Plug(v0);
4427 }
4428 
4429 
4430 Register FullCodeGenerator::result_register() {
4431  return v0;
4432 }
4433 
4434 
4435 Register FullCodeGenerator::context_register() {
4436  return cp;
4437 }
4438 
4439 
4440 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4441  ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4442  __ sw(value, MemOperand(fp, frame_offset));
4443 }
4444 
4445 
4446 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4447  __ lw(dst, ContextOperand(cp, context_index));
4448 }
4449 
4450 
4451 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4452  Scope* declaration_scope = scope()->DeclarationScope();
4453  if (declaration_scope->is_global_scope() ||
4454  declaration_scope->is_module_scope()) {
4455  // Contexts nested in the native context have a canonical empty function
4456  // as their closure, not the anonymous closure containing the global
4457  // code. Pass a smi sentinel and let the runtime look up the empty
4458  // function.
4459  __ li(at, Operand(Smi::FromInt(0)));
4460  } else if (declaration_scope->is_eval_scope()) {
4461  // Contexts created by a call to eval have the same closure as the
4462  // context calling eval, not the anonymous closure containing the eval
4463  // code. Fetch it from the context.
4465  } else {
4466  ASSERT(declaration_scope->is_function_scope());
4468  }
4469  __ push(at);
4470 }
4471 
4472 
4473 // ----------------------------------------------------------------------------
4474 // Non-local control flow support.
4475 
4476 void FullCodeGenerator::EnterFinallyBlock() {
4477  ASSERT(!result_register().is(a1));
4478  // Store result register while executing finally block.
4479  __ push(result_register());
4480  // Cook return address in link register to stack (smi encoded Code* delta).
4481  __ Subu(a1, ra, Operand(masm_->CodeObject()));
4483  STATIC_ASSERT(0 == kSmiTag);
4484  __ Addu(a1, a1, Operand(a1)); // Convert to smi.
4485 
4486  // Store result register while executing finally block.
4487  __ push(a1);
4488 
4489  // Store pending message while executing finally block.
4490  ExternalReference pending_message_obj =
4491  ExternalReference::address_of_pending_message_obj(isolate());
4492  __ li(at, Operand(pending_message_obj));
4493  __ lw(a1, MemOperand(at));
4494  __ push(a1);
4495 
4496  ExternalReference has_pending_message =
4497  ExternalReference::address_of_has_pending_message(isolate());
4498  __ li(at, Operand(has_pending_message));
4499  __ lw(a1, MemOperand(at));
4500  __ SmiTag(a1);
4501  __ push(a1);
4502 
4503  ExternalReference pending_message_script =
4504  ExternalReference::address_of_pending_message_script(isolate());
4505  __ li(at, Operand(pending_message_script));
4506  __ lw(a1, MemOperand(at));
4507  __ push(a1);
4508 }
4509 
4510 
4511 void FullCodeGenerator::ExitFinallyBlock() {
4512  ASSERT(!result_register().is(a1));
4513  // Restore pending message from stack.
4514  __ pop(a1);
4515  ExternalReference pending_message_script =
4516  ExternalReference::address_of_pending_message_script(isolate());
4517  __ li(at, Operand(pending_message_script));
4518  __ sw(a1, MemOperand(at));
4519 
4520  __ pop(a1);
4521  __ SmiUntag(a1);
4522  ExternalReference has_pending_message =
4523  ExternalReference::address_of_has_pending_message(isolate());
4524  __ li(at, Operand(has_pending_message));
4525  __ sw(a1, MemOperand(at));
4526 
4527  __ pop(a1);
4528  ExternalReference pending_message_obj =
4529  ExternalReference::address_of_pending_message_obj(isolate());
4530  __ li(at, Operand(pending_message_obj));
4531  __ sw(a1, MemOperand(at));
4532 
4533  // Restore result register from stack.
4534  __ pop(a1);
4535 
4536  // Uncook return address and return.
4537  __ pop(result_register());
4539  __ sra(a1, a1, 1); // Un-smi-tag value.
4540  __ Addu(at, a1, Operand(masm_->CodeObject()));
4541  __ Jump(at);
4542 }
4543 
4544 
4545 #undef __
4546 
4547 #define __ ACCESS_MASM(masm())
4548 
4549 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4550  int* stack_depth,
4551  int* context_length) {
4552  // The macros used here must preserve the result register.
4553 
4554  // Because the handler block contains the context of the finally
4555  // code, we can restore it directly from there for the finally code
4556  // rather than iteratively unwinding contexts via their previous
4557  // links.
4558  __ Drop(*stack_depth); // Down to the handler block.
4559  if (*context_length > 0) {
4560  // Restore the context to its dedicated register and the stack.
4563  }
4564  __ PopTryHandler();
4565  __ Call(finally_entry_);
4566 
4567  *stack_depth = 0;
4568  *context_length = 0;
4569  return previous_;
4570 }
4571 
4572 
4573 #undef __
4574 
4575 } } // namespace v8::internal
4576 
4577 #endif // V8_TARGET_ARCH_MIPS
static const int kBitFieldOffset
Definition: objects.h:5160
Scope * DeclarationScope()
Definition: scopes.cc:745
int InstructionsGeneratedSince(Label *label)
const intptr_t kSmiTagMask
Definition: v8.h:4016
VariableDeclaration * function() const
Definition: scopes.h:324
static int SlotOffset(int index)
Definition: contexts.h:425
static const int kBuiltinsOffset
Definition: objects.h:6285
static const int kEnumCacheOffset
Definition: objects.h:2632
static String * cast(Object *obj)
void mov(Register rd, Register rt)
const Register cp
const FPURegister f0
static Smi * FromInt(int value)
Definition: objects-inl.h:981
bool IsFastObjectElementsKind(ElementsKind kind)
const int kImm16Mask
static const int kDataOffset
Definition: objects.h:6624
static const int kGlobalReceiverOffset
Definition: objects.h:6288
int SizeOfCodeGeneratedSince(Label *label)
T Max(T a, T b)
Definition: utils.h:222
Scope * outer_scope() const
Definition: scopes.h:348
int int32_t
Definition: unicode.cc:47
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:5339
static bool IsSupported(CpuFeature f)
static const int kSize
Definition: objects.h:6625
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:232
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:120
static const int kInObjectFieldCount
Definition: objects.h:6679
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3830
#define POINTER_SIZE_ALIGN(value)
Definition: v8globals.h:387
static const int kMaximumSlots
Definition: code-stubs.h:344
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
Definition: objects.h:5800
bool IsOptimizable() const
Definition: compiler.h:151
Variable * parameter(int index) const
Definition: scopes.h:331
PropertyAttributes
MemOperand ContextOperand(Register context, int index)
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
Definition: scopes.cc:735
static const int kHashFieldOffset
Definition: objects.h:7319
#define IN
const Register sp
void MultiPop(RegList regs)
static const int kLiteralsOffset
Definition: objects.h:6188
#define UNREACHABLE()
Definition: checks.h:50
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
Definition: objects.h:7318
static const int kValueOffset
Definition: objects.h:1342
Variable * arguments() const
Definition: scopes.h:339
static const int kForInSlowCaseMarker
Definition: objects.h:4167
static const int kFirstOffset
Definition: objects.h:2633
NilValue
Definition: v8.h:141
static BailoutId Declarations()
Definition: utils.h:1016
const int kPointerSize
Definition: globals.h:220
static const int kForInFastCaseMarker
Definition: objects.h:4166
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:5177
static void MaybeCallEntryHook(MacroAssembler *masm)
const int kHeapObjectTag
Definition: v8.h:4009
void Jump(Register target, Condition cond=al)
#define __
static const int kCacheStampOffset
Definition: objects.h:6476
static TestContext * cast(AstContext *context)
Definition: hydrogen.h:721
static const int kDescriptorSize
Definition: objects.h:2642
static const int kPropertiesOffset
Definition: objects.h:2171
static Register from_code(int code)
int num_parameters() const
Definition: scopes.h:336
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
const SwVfpRegister s0
static const int kHeaderSize
Definition: objects.h:7517
static const int kElementsOffset
Definition: objects.h:2172
static const int kContainsCachedArrayIndexMask
Definition: objects.h:7374
static BailoutId FunctionEntry()
Definition: utils.h:1015
#define BASE_EMBEDDED
Definition: allocation.h:68
friend class BlockTrampolinePoolScope
bool IsDeclaredVariableMode(VariableMode mode)
Definition: v8globals.h:516
Vector< const char > CStrVector(const char *data)
Definition: utils.h:526
static int OffsetOfElementAt(int index)
Definition: objects.h:2356
static const int kLengthOffset
Definition: objects.h:8332
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
Definition: objects.h:2296
static const int kMapOffset
Definition: objects.h:1261
static const int kValueOffset
Definition: objects.h:6468
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:2636
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:545
static const int kLengthOffset
Definition: objects.h:2295
MemOperand FieldMemOperand(Register object, int offset)
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:168
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
const int kSmiShiftSize
Definition: v8.h:4060
const int kSmiTagSize
Definition: v8.h:4015
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:926
static const int kJSReturnSequenceInstructions
static const int kConstructorOffset
Definition: objects.h:5127
const int kSmiTag
Definition: v8.h:4014
#define ASSERT_NE(v1, v2)
Definition: checks.h:272
static const int kIsUndetectable
Definition: objects.h:5171
const FPURegister f12
static bool ShouldGenerateLog(Expression *type)
Definition: codegen.cc:153
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
static const int kPrototypeOffset
Definition: objects.h:5126
const Register no_reg
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
Definition: objects.h:6385
bool IsImmutableVariableMode(VariableMode mode)
Definition: v8globals.h:526
const Register fp
static const int kNativeContextOffset
Definition: objects.h:6286
T Min(T a, T b)
Definition: utils.h:229
static const int kSharedFunctionInfoOffset
Definition: objects.h:6185
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:1731
const FPURegister f14
static const int kMaxValue
Definition: objects.h:1050
static const int kBitField2Offset
Definition: objects.h:5161
#define VOID
static Handle< Code > GetUninitialized(Token::Value op)
Definition: ic.cc:2565
void check(i::Vector< const char > string)
static const int kInstanceTypeOffset
Definition: objects.h:5158
TypeofState
Definition: codegen.h:70
Scope * scope() const
Definition: compiler.h:67
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag
Definition: objects-inl.h:3923