v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
full-codegen-mips.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_MIPS)
31 
32 // Note on Mips implementation:
33 //
34 // The result_register() for mips is the 'v0' register, which is defined
35 // by the ABI to contain function return values. However, the first
36 // parameter to a function is defined to be 'a0'. So there are many
37 // places where we have to move a previous result in v0 to a0 for the
38 // next call: mov(a0, v0). This is not needed on the other architectures.
39 
40 #include "code-stubs.h"
41 #include "codegen.h"
42 #include "compiler.h"
43 #include "debug.h"
44 #include "full-codegen.h"
45 #include "isolate-inl.h"
46 #include "parser.h"
47 #include "scopes.h"
48 #include "stub-cache.h"
49 
50 #include "mips/code-stubs-mips.h"
52 
53 namespace v8 {
54 namespace internal {
55 
56 #define __ ACCESS_MASM(masm_)
57 
58 
59 // A patch site is a location in the code which it is possible to patch. This
60 // class has a number of methods to emit the code which is patchable and the
61 // method EmitPatchInfo to record a marker back to the patchable code. This
62 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
63 // (raw 16 bit immediate value is used) is the delta from the pc to the first
64 // instruction of the patchable code.
65 // The marker instruction is effectively a NOP (dest is zero_reg) and will
66 // never be emitted by normal code.
67 class JumpPatchSite BASE_EMBEDDED {
68  public:
69  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
70 #ifdef DEBUG
71  info_emitted_ = false;
72 #endif
73  }
74 
75  ~JumpPatchSite() {
76  ASSERT(patch_site_.is_bound() == info_emitted_);
77  }
78 
79  // When initially emitting this ensure that a jump is always generated to skip
80  // the inlined smi code.
81  void EmitJumpIfNotSmi(Register reg, Label* target) {
82  ASSERT(!patch_site_.is_bound() && !info_emitted_);
83  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
84  __ bind(&patch_site_);
85  __ andi(at, reg, 0);
86  // Always taken before patched.
87  __ Branch(target, eq, at, Operand(zero_reg));
88  }
89 
90  // When initially emitting this ensure that a jump is never generated to skip
91  // the inlined smi code.
92  void EmitJumpIfSmi(Register reg, Label* target) {
93  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
94  ASSERT(!patch_site_.is_bound() && !info_emitted_);
95  __ bind(&patch_site_);
96  __ andi(at, reg, 0);
97  // Never taken before patched.
98  __ Branch(target, ne, at, Operand(zero_reg));
99  }
100 
101  void EmitPatchInfo() {
102  if (patch_site_.is_bound()) {
103  int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
104  Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
105  __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
106 #ifdef DEBUG
107  info_emitted_ = true;
108 #endif
109  } else {
110  __ nop(); // Signals no inlined code.
111  }
112  }
113 
114  private:
115  MacroAssembler* masm_;
116  Label patch_site_;
117 #ifdef DEBUG
118  bool info_emitted_;
119 #endif
120 };
121 
122 
123 // Generate code for a JS function. On entry to the function the receiver
124 // and arguments have been pushed on the stack left to right. The actual
125 // argument count matches the formal parameter count expected by the
126 // function.
127 //
128 // The live registers are:
129 // o a1: the JS function object being called (i.e. ourselves)
130 // o cp: our context
131 // o fp: our caller's frame pointer
132 // o sp: stack pointer
133 // o ra: return address
134 //
135 // The function builds a JS frame. Please see JavaScriptFrameConstants in
136 // frames-mips.h for its layout.
137 void FullCodeGenerator::Generate() {
138  CompilationInfo* info = info_;
139  handler_table_ =
140  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
141  profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
142  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
143  SetFunctionPosition(function());
144  Comment cmnt(masm_, "[ function compiled by full code generator");
145 
146 #ifdef DEBUG
147  if (strlen(FLAG_stop_at) > 0 &&
148  info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
149  __ stop("stop-at");
150  }
151 #endif
152 
153  // Strict mode functions and builtins need to replace the receiver
154  // with undefined when called as functions (without an explicit
155  // receiver object). t1 is zero for method calls and non-zero for
156  // function calls.
157  if (!info->is_classic_mode() || info->is_native()) {
158  Label ok;
159  __ Branch(&ok, eq, t1, Operand(zero_reg));
160  int receiver_offset = info->scope()->num_parameters() * kPointerSize;
161  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
162  __ sw(a2, MemOperand(sp, receiver_offset));
163  __ bind(&ok);
164  }
165 
166  // Open a frame scope to indicate that there is a frame on the stack. The
167  // MANUAL indicates that the scope shouldn't actually generate code to set up
168  // the frame (that is done below).
169  FrameScope frame_scope(masm_, StackFrame::MANUAL);
170 
171  int locals_count = info->scope()->num_stack_slots();
172 
173  __ Push(ra, fp, cp, a1);
174  if (locals_count > 0) {
175  // Load undefined value here, so the value is ready for the loop
176  // below.
177  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
178  }
179  // Adjust fp to point to caller's fp.
180  __ Addu(fp, sp, Operand(2 * kPointerSize));
181 
182  { Comment cmnt(masm_, "[ Allocate locals");
183  for (int i = 0; i < locals_count; i++) {
184  __ push(at);
185  }
186  }
187 
188  bool function_in_register = true;
189 
190  // Possibly allocate a local context.
191  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
192  if (heap_slots > 0) {
193  Comment cmnt(masm_, "[ Allocate local context");
194  // Argument to NewContext is the function, which is in a1.
195  __ push(a1);
196  if (heap_slots <= FastNewContextStub::kMaximumSlots) {
197  FastNewContextStub stub(heap_slots);
198  __ CallStub(&stub);
199  } else {
200  __ CallRuntime(Runtime::kNewFunctionContext, 1);
201  }
202  function_in_register = false;
203  // Context is returned in both v0 and cp. It replaces the context
204  // passed to us. It's saved in the stack and kept live in cp.
206  // Copy any necessary parameters into the context.
207  int num_parameters = info->scope()->num_parameters();
208  for (int i = 0; i < num_parameters; i++) {
209  Variable* var = scope()->parameter(i);
210  if (var->IsContextSlot()) {
211  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
212  (num_parameters - 1 - i) * kPointerSize;
213  // Load parameter from stack.
214  __ lw(a0, MemOperand(fp, parameter_offset));
215  // Store it in the context.
216  MemOperand target = ContextOperand(cp, var->index());
217  __ sw(a0, target);
218 
219  // Update the write barrier.
220  __ RecordWriteContextSlot(
221  cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
222  }
223  }
224  }
225 
226  Variable* arguments = scope()->arguments();
227  if (arguments != NULL) {
228  // Function uses arguments object.
229  Comment cmnt(masm_, "[ Allocate arguments object");
230  if (!function_in_register) {
231  // Load this again, if it's used by the local context below.
233  } else {
234  __ mov(a3, a1);
235  }
236  // Receiver is just before the parameters on the caller's stack.
237  int num_parameters = info->scope()->num_parameters();
238  int offset = num_parameters * kPointerSize;
239  __ Addu(a2, fp,
240  Operand(StandardFrameConstants::kCallerSPOffset + offset));
241  __ li(a1, Operand(Smi::FromInt(num_parameters)));
242  __ Push(a3, a2, a1);
243 
244  // Arguments to ArgumentsAccessStub:
245  // function, receiver address, parameter count.
246  // The stub will rewrite receiever and parameter count if the previous
247  // stack frame was an arguments adapter frame.
249  if (!is_classic_mode()) {
251  } else if (function()->has_duplicate_parameters()) {
253  } else {
255  }
256  ArgumentsAccessStub stub(type);
257  __ CallStub(&stub);
258 
259  SetVar(arguments, v0, a1, a2);
260  }
261 
262  if (FLAG_trace) {
263  __ CallRuntime(Runtime::kTraceEnter, 0);
264  }
265 
266  // Visit the declarations and body unless there is an illegal
267  // redeclaration.
268  if (scope()->HasIllegalRedeclaration()) {
269  Comment cmnt(masm_, "[ Declarations");
270  scope()->VisitIllegalRedeclaration(this);
271 
272  } else {
273  PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
274  { Comment cmnt(masm_, "[ Declarations");
275  // For named function expressions, declare the function name as a
276  // constant.
277  if (scope()->is_function_scope() && scope()->function() != NULL) {
278  VariableDeclaration* function = scope()->function();
279  ASSERT(function->proxy()->var()->mode() == CONST ||
280  function->proxy()->var()->mode() == CONST_HARMONY);
281  ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
282  VisitVariableDeclaration(function);
283  }
284  VisitDeclarations(scope()->declarations());
285  }
286 
287  { Comment cmnt(masm_, "[ Stack check");
288  PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
289  Label ok;
290  __ LoadRoot(t0, Heap::kStackLimitRootIndex);
291  __ Branch(&ok, hs, sp, Operand(t0));
292  StackCheckStub stub;
293  __ CallStub(&stub);
294  __ bind(&ok);
295  }
296 
297  { Comment cmnt(masm_, "[ Body");
298  ASSERT(loop_depth() == 0);
299  VisitStatements(function()->body());
300  ASSERT(loop_depth() == 0);
301  }
302  }
303 
304  // Always emit a 'return undefined' in case control fell off the end of
305  // the body.
306  { Comment cmnt(masm_, "[ return <undefined>;");
307  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
308  }
309  EmitReturnSequence();
310 }
311 
312 
313 void FullCodeGenerator::ClearAccumulator() {
314  ASSERT(Smi::FromInt(0) == 0);
315  __ mov(v0, zero_reg);
316 }
317 
318 
319 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
320  __ li(a2, Operand(profiling_counter_));
322  __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
324 }
325 
326 
327 void FullCodeGenerator::EmitProfilingCounterReset() {
328  int reset_value = FLAG_interrupt_budget;
329  if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
330  // Self-optimization is a one-off thing: if it fails, don't try again.
331  reset_value = Smi::kMaxValue;
332  }
333  if (isolate()->IsDebuggerActive()) {
334  // Detect debug break requests as soon as possible.
335  reset_value = 10;
336  }
337  __ li(a2, Operand(profiling_counter_));
338  __ li(a3, Operand(Smi::FromInt(reset_value)));
340 }
341 
342 
343 static const int kMaxBackEdgeWeight = 127;
344 static const int kBackEdgeDistanceDivisor = 142;
345 
346 
347 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
348  Label* back_edge_target) {
349  // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
350  // to make sure it is constant. Branch may emit a skip-or-jump sequence
351  // instead of the normal Branch. It seems that the "skip" part of that
352  // sequence is about as long as this Branch would be so it is safe to ignore
353  // that.
354  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
355  Comment cmnt(masm_, "[ Stack check");
356  Label ok;
357  if (FLAG_count_based_interrupts) {
358  int weight = 1;
359  if (FLAG_weighted_back_edges) {
360  ASSERT(back_edge_target->is_bound());
361  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
362  weight = Min(kMaxBackEdgeWeight,
363  Max(1, distance / kBackEdgeDistanceDivisor));
364  }
365  EmitProfilingCounterDecrement(weight);
366  __ slt(at, a3, zero_reg);
367  __ beq(at, zero_reg, &ok);
368  // CallStub will emit a li t9 first, so it is safe to use the delay slot.
369  InterruptStub stub;
370  __ CallStub(&stub);
371  } else {
372  __ LoadRoot(t0, Heap::kStackLimitRootIndex);
373  __ sltu(at, sp, t0);
374  __ beq(at, zero_reg, &ok);
375  // CallStub will emit a li t9 first, so it is safe to use the delay slot.
376  StackCheckStub stub;
377  __ CallStub(&stub);
378  }
379  // Record a mapping of this PC offset to the OSR id. This is used to find
380  // the AST id from the unoptimized code in order to use it as a key into
381  // the deoptimization input data found in the optimized code.
382  RecordStackCheck(stmt->OsrEntryId());
383  if (FLAG_count_based_interrupts) {
384  EmitProfilingCounterReset();
385  }
386 
387  __ bind(&ok);
388  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
389  // Record a mapping of the OSR id to this PC. This is used if the OSR
390  // entry becomes the target of a bailout. We don't expect it to be, but
391  // we want it to work if it is.
392  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
393 }
394 
395 
396 void FullCodeGenerator::EmitReturnSequence() {
397  Comment cmnt(masm_, "[ Return sequence");
398  if (return_label_.is_bound()) {
399  __ Branch(&return_label_);
400  } else {
401  __ bind(&return_label_);
402  if (FLAG_trace) {
403  // Push the return value on the stack as the parameter.
404  // Runtime::TraceExit returns its parameter in v0.
405  __ push(v0);
406  __ CallRuntime(Runtime::kTraceExit, 1);
407  }
408  if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
409  // Pretend that the exit is a backwards jump to the entry.
410  int weight = 1;
411  if (info_->ShouldSelfOptimize()) {
412  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
413  } else if (FLAG_weighted_back_edges) {
414  int distance = masm_->pc_offset();
415  weight = Min(kMaxBackEdgeWeight,
416  Max(1, distance / kBackEdgeDistanceDivisor));
417  }
418  EmitProfilingCounterDecrement(weight);
419  Label ok;
420  __ Branch(&ok, ge, a3, Operand(zero_reg));
421  __ push(v0);
422  if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
424  __ push(a2);
425  __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
426  } else {
427  InterruptStub stub;
428  __ CallStub(&stub);
429  }
430  __ pop(v0);
431  EmitProfilingCounterReset();
432  __ bind(&ok);
433  }
434 
435 #ifdef DEBUG
436  // Add a label for checking the size of the code used for returning.
437  Label check_exit_codesize;
438  masm_->bind(&check_exit_codesize);
439 #endif
440  // Make sure that the constant pool is not emitted inside of the return
441  // sequence.
442  { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
443  // Here we use masm_-> instead of the __ macro to avoid the code coverage
444  // tool from instrumenting as we rely on the code size here.
445  int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
446  CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
447  __ RecordJSReturn();
448  masm_->mov(sp, fp);
449  masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
450  masm_->Addu(sp, sp, Operand(sp_delta));
451  masm_->Jump(ra);
452  }
453 
454 #ifdef DEBUG
455  // Check that the size of the code used for returning is large enough
456  // for the debugger's requirements.
458  masm_->InstructionsGeneratedSince(&check_exit_codesize));
459 #endif
460  }
461 }
462 
463 
464 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
465  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
466 }
467 
468 
469 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
470  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
471  codegen()->GetVar(result_register(), var);
472 }
473 
474 
475 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
476  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
477  codegen()->GetVar(result_register(), var);
478  __ push(result_register());
479 }
480 
481 
482 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
483  // For simplicity we always test the accumulator register.
484  codegen()->GetVar(result_register(), var);
485  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
486  codegen()->DoTest(this);
487 }
488 
489 
490 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
491 }
492 
493 
494 void FullCodeGenerator::AccumulatorValueContext::Plug(
495  Heap::RootListIndex index) const {
496  __ LoadRoot(result_register(), index);
497 }
498 
499 
500 void FullCodeGenerator::StackValueContext::Plug(
501  Heap::RootListIndex index) const {
502  __ LoadRoot(result_register(), index);
503  __ push(result_register());
504 }
505 
506 
507 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
508  codegen()->PrepareForBailoutBeforeSplit(condition(),
509  true,
510  true_label_,
511  false_label_);
512  if (index == Heap::kUndefinedValueRootIndex ||
513  index == Heap::kNullValueRootIndex ||
514  index == Heap::kFalseValueRootIndex) {
515  if (false_label_ != fall_through_) __ Branch(false_label_);
516  } else if (index == Heap::kTrueValueRootIndex) {
517  if (true_label_ != fall_through_) __ Branch(true_label_);
518  } else {
519  __ LoadRoot(result_register(), index);
520  codegen()->DoTest(this);
521  }
522 }
523 
524 
525 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
526 }
527 
528 
529 void FullCodeGenerator::AccumulatorValueContext::Plug(
530  Handle<Object> lit) const {
531  __ li(result_register(), Operand(lit));
532 }
533 
534 
535 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
536  // Immediates cannot be pushed directly.
537  __ li(result_register(), Operand(lit));
538  __ push(result_register());
539 }
540 
541 
542 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
543  codegen()->PrepareForBailoutBeforeSplit(condition(),
544  true,
545  true_label_,
546  false_label_);
547  ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
548  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
549  if (false_label_ != fall_through_) __ Branch(false_label_);
550  } else if (lit->IsTrue() || lit->IsJSObject()) {
551  if (true_label_ != fall_through_) __ Branch(true_label_);
552  } else if (lit->IsString()) {
553  if (String::cast(*lit)->length() == 0) {
554  if (false_label_ != fall_through_) __ Branch(false_label_);
555  } else {
556  if (true_label_ != fall_through_) __ Branch(true_label_);
557  }
558  } else if (lit->IsSmi()) {
559  if (Smi::cast(*lit)->value() == 0) {
560  if (false_label_ != fall_through_) __ Branch(false_label_);
561  } else {
562  if (true_label_ != fall_through_) __ Branch(true_label_);
563  }
564  } else {
565  // For simplicity we always test the accumulator register.
566  __ li(result_register(), Operand(lit));
567  codegen()->DoTest(this);
568  }
569 }
570 
571 
572 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
573  Register reg) const {
574  ASSERT(count > 0);
575  __ Drop(count);
576 }
577 
578 
579 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
580  int count,
581  Register reg) const {
582  ASSERT(count > 0);
583  __ Drop(count);
584  __ Move(result_register(), reg);
585 }
586 
587 
588 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
589  Register reg) const {
590  ASSERT(count > 0);
591  if (count > 1) __ Drop(count - 1);
592  __ sw(reg, MemOperand(sp, 0));
593 }
594 
595 
596 void FullCodeGenerator::TestContext::DropAndPlug(int count,
597  Register reg) const {
598  ASSERT(count > 0);
599  // For simplicity we always test the accumulator register.
600  __ Drop(count);
601  __ Move(result_register(), reg);
602  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
603  codegen()->DoTest(this);
604 }
605 
606 
607 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
608  Label* materialize_false) const {
609  ASSERT(materialize_true == materialize_false);
610  __ bind(materialize_true);
611 }
612 
613 
614 void FullCodeGenerator::AccumulatorValueContext::Plug(
615  Label* materialize_true,
616  Label* materialize_false) const {
617  Label done;
618  __ bind(materialize_true);
619  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
620  __ Branch(&done);
621  __ bind(materialize_false);
622  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
623  __ bind(&done);
624 }
625 
626 
627 void FullCodeGenerator::StackValueContext::Plug(
628  Label* materialize_true,
629  Label* materialize_false) const {
630  Label done;
631  __ bind(materialize_true);
632  __ LoadRoot(at, Heap::kTrueValueRootIndex);
633  __ push(at);
634  __ Branch(&done);
635  __ bind(materialize_false);
636  __ LoadRoot(at, Heap::kFalseValueRootIndex);
637  __ push(at);
638  __ bind(&done);
639 }
640 
641 
642 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
643  Label* materialize_false) const {
644  ASSERT(materialize_true == true_label_);
645  ASSERT(materialize_false == false_label_);
646 }
647 
648 
649 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
650 }
651 
652 
653 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
654  Heap::RootListIndex value_root_index =
655  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
656  __ LoadRoot(result_register(), value_root_index);
657 }
658 
659 
660 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
661  Heap::RootListIndex value_root_index =
662  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
663  __ LoadRoot(at, value_root_index);
664  __ push(at);
665 }
666 
667 
668 void FullCodeGenerator::TestContext::Plug(bool flag) const {
669  codegen()->PrepareForBailoutBeforeSplit(condition(),
670  true,
671  true_label_,
672  false_label_);
673  if (flag) {
674  if (true_label_ != fall_through_) __ Branch(true_label_);
675  } else {
676  if (false_label_ != fall_through_) __ Branch(false_label_);
677  }
678 }
679 
680 
681 void FullCodeGenerator::DoTest(Expression* condition,
682  Label* if_true,
683  Label* if_false,
684  Label* fall_through) {
686  ToBooleanStub stub(result_register());
687  __ CallStub(&stub);
688  __ mov(at, zero_reg);
689  } else {
690  // Call the runtime to find the boolean value of the source and then
691  // translate it into control flow to the pair of labels.
692  __ push(result_register());
693  __ CallRuntime(Runtime::kToBool, 1);
694  __ LoadRoot(at, Heap::kFalseValueRootIndex);
695  }
696  Split(ne, v0, Operand(at), if_true, if_false, fall_through);
697 }
698 
699 
700 void FullCodeGenerator::Split(Condition cc,
701  Register lhs,
702  const Operand& rhs,
703  Label* if_true,
704  Label* if_false,
705  Label* fall_through) {
706  if (if_false == fall_through) {
707  __ Branch(if_true, cc, lhs, rhs);
708  } else if (if_true == fall_through) {
709  __ Branch(if_false, NegateCondition(cc), lhs, rhs);
710  } else {
711  __ Branch(if_true, cc, lhs, rhs);
712  __ Branch(if_false);
713  }
714 }
715 
716 
717 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
718  ASSERT(var->IsStackAllocated());
719  // Offset is negative because higher indexes are at lower addresses.
720  int offset = -var->index() * kPointerSize;
721  // Adjust by a (parameter or local) base offset.
722  if (var->IsParameter()) {
723  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
724  } else {
726  }
727  return MemOperand(fp, offset);
728 }
729 
730 
731 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
732  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
733  if (var->IsContextSlot()) {
734  int context_chain_length = scope()->ContextChainLength(var->scope());
735  __ LoadContext(scratch, context_chain_length);
736  return ContextOperand(scratch, var->index());
737  } else {
738  return StackOperand(var);
739  }
740 }
741 
742 
743 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
744  // Use destination as scratch.
745  MemOperand location = VarOperand(var, dest);
746  __ lw(dest, location);
747 }
748 
749 
750 void FullCodeGenerator::SetVar(Variable* var,
751  Register src,
752  Register scratch0,
753  Register scratch1) {
754  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
755  ASSERT(!scratch0.is(src));
756  ASSERT(!scratch0.is(scratch1));
757  ASSERT(!scratch1.is(src));
758  MemOperand location = VarOperand(var, scratch0);
759  __ sw(src, location);
760  // Emit the write barrier code if the location is in the heap.
761  if (var->IsContextSlot()) {
762  __ RecordWriteContextSlot(scratch0,
763  location.offset(),
764  src,
765  scratch1,
768  }
769 }
770 
771 
772 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
773  bool should_normalize,
774  Label* if_true,
775  Label* if_false) {
776  // Only prepare for bailouts before splits if we're in a test
777  // context. Otherwise, we let the Visit function deal with the
778  // preparation to avoid preparing with the same AST id twice.
779  if (!context()->IsTest() || !info_->IsOptimizable()) return;
780 
781  Label skip;
782  if (should_normalize) __ Branch(&skip);
783  PrepareForBailout(expr, TOS_REG);
784  if (should_normalize) {
785  __ LoadRoot(t0, Heap::kTrueValueRootIndex);
786  Split(eq, a0, Operand(t0), if_true, if_false, NULL);
787  __ bind(&skip);
788  }
789 }
790 
791 
792 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
793  // The variable in the declaration always resides in the current function
794  // context.
795  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
796  if (FLAG_debug_code) {
797  // Check that we're not inside a with or catch context.
799  __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
800  __ Check(ne, "Declaration in with context.",
801  a1, Operand(t0));
802  __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
803  __ Check(ne, "Declaration in catch context.",
804  a1, Operand(t0));
805  }
806 }
807 
808 
809 void FullCodeGenerator::VisitVariableDeclaration(
810  VariableDeclaration* declaration) {
811  // If it was not possible to allocate the variable at compile time, we
812  // need to "declare" it at runtime to make sure it actually exists in the
813  // local context.
814  VariableProxy* proxy = declaration->proxy();
815  VariableMode mode = declaration->mode();
816  Variable* variable = proxy->var();
817  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
818  switch (variable->location()) {
820  globals_->Add(variable->name(), zone());
821  globals_->Add(variable->binding_needs_init()
822  ? isolate()->factory()->the_hole_value()
823  : isolate()->factory()->undefined_value(),
824  zone());
825  break;
826 
827  case Variable::PARAMETER:
828  case Variable::LOCAL:
829  if (hole_init) {
830  Comment cmnt(masm_, "[ VariableDeclaration");
831  __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
832  __ sw(t0, StackOperand(variable));
833  }
834  break;
835 
836  case Variable::CONTEXT:
837  if (hole_init) {
838  Comment cmnt(masm_, "[ VariableDeclaration");
839  EmitDebugCheckDeclarationContext(variable);
840  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
841  __ sw(at, ContextOperand(cp, variable->index()));
842  // No write barrier since the_hole_value is in old space.
843  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
844  }
845  break;
846 
847  case Variable::LOOKUP: {
848  Comment cmnt(masm_, "[ VariableDeclaration");
849  __ li(a2, Operand(variable->name()));
850  // Declaration nodes are always introduced in one of four modes.
851  ASSERT(mode == VAR || mode == LET ||
852  mode == CONST || mode == CONST_HARMONY);
853  PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
854  ? READ_ONLY : NONE;
855  __ li(a1, Operand(Smi::FromInt(attr)));
856  // Push initial value, if any.
857  // Note: For variables we must not push an initial value (such as
858  // 'undefined') because we may have a (legal) redeclaration and we
859  // must not destroy the current value.
860  if (hole_init) {
861  __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
862  __ Push(cp, a2, a1, a0);
863  } else {
864  ASSERT(Smi::FromInt(0) == 0);
865  __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
866  __ Push(cp, a2, a1, a0);
867  }
868  __ CallRuntime(Runtime::kDeclareContextSlot, 4);
869  break;
870  }
871  }
872 }
873 
874 
875 void FullCodeGenerator::VisitFunctionDeclaration(
876  FunctionDeclaration* declaration) {
877  VariableProxy* proxy = declaration->proxy();
878  Variable* variable = proxy->var();
879  switch (variable->location()) {
880  case Variable::UNALLOCATED: {
881  globals_->Add(variable->name(), zone());
882  Handle<SharedFunctionInfo> function =
883  Compiler::BuildFunctionInfo(declaration->fun(), script());
884  // Check for stack-overflow exception.
885  if (function.is_null()) return SetStackOverflow();
886  globals_->Add(function, zone());
887  break;
888  }
889 
890  case Variable::PARAMETER:
891  case Variable::LOCAL: {
892  Comment cmnt(masm_, "[ FunctionDeclaration");
893  VisitForAccumulatorValue(declaration->fun());
894  __ sw(result_register(), StackOperand(variable));
895  break;
896  }
897 
898  case Variable::CONTEXT: {
899  Comment cmnt(masm_, "[ FunctionDeclaration");
900  EmitDebugCheckDeclarationContext(variable);
901  VisitForAccumulatorValue(declaration->fun());
902  __ sw(result_register(), ContextOperand(cp, variable->index()));
903  int offset = Context::SlotOffset(variable->index());
904  // We know that we have written a function, which is not a smi.
905  __ RecordWriteContextSlot(cp,
906  offset,
907  result_register(),
908  a2,
913  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
914  break;
915  }
916 
917  case Variable::LOOKUP: {
918  Comment cmnt(masm_, "[ FunctionDeclaration");
919  __ li(a2, Operand(variable->name()));
920  __ li(a1, Operand(Smi::FromInt(NONE)));
921  __ Push(cp, a2, a1);
922  // Push initial value for function declaration.
923  VisitForStackValue(declaration->fun());
924  __ CallRuntime(Runtime::kDeclareContextSlot, 4);
925  break;
926  }
927  }
928 }
929 
930 
931 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
932  VariableProxy* proxy = declaration->proxy();
933  Variable* variable = proxy->var();
934  Handle<JSModule> instance = declaration->module()->interface()->Instance();
935  ASSERT(!instance.is_null());
936 
937  switch (variable->location()) {
938  case Variable::UNALLOCATED: {
939  Comment cmnt(masm_, "[ ModuleDeclaration");
940  globals_->Add(variable->name(), zone());
941  globals_->Add(instance, zone());
942  Visit(declaration->module());
943  break;
944  }
945 
946  case Variable::CONTEXT: {
947  Comment cmnt(masm_, "[ ModuleDeclaration");
948  EmitDebugCheckDeclarationContext(variable);
949  __ li(a1, Operand(instance));
950  __ sw(a1, ContextOperand(cp, variable->index()));
951  Visit(declaration->module());
952  break;
953  }
954 
955  case Variable::PARAMETER:
956  case Variable::LOCAL:
957  case Variable::LOOKUP:
958  UNREACHABLE();
959  }
960 }
961 
962 
963 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
964  VariableProxy* proxy = declaration->proxy();
965  Variable* variable = proxy->var();
966  switch (variable->location()) {
968  // TODO(rossberg)
969  break;
970 
971  case Variable::CONTEXT: {
972  Comment cmnt(masm_, "[ ImportDeclaration");
973  EmitDebugCheckDeclarationContext(variable);
974  // TODO(rossberg)
975  break;
976  }
977 
978  case Variable::PARAMETER:
979  case Variable::LOCAL:
980  case Variable::LOOKUP:
981  UNREACHABLE();
982  }
983 }
984 
985 
986 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
987  // TODO(rossberg)
988 }
989 
990 
991 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
992  // Call the runtime to declare the globals.
993  // The context is the first argument.
994  __ li(a1, Operand(pairs));
995  __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
996  __ Push(cp, a1, a0);
997  __ CallRuntime(Runtime::kDeclareGlobals, 3);
998  // Return value is ignored.
999 }
1000 
1001 
1002 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1003  Comment cmnt(masm_, "[ SwitchStatement");
1004  Breakable nested_statement(this, stmt);
1005  SetStatementPosition(stmt);
1006 
1007  // Keep the switch value on the stack until a case matches.
1008  VisitForStackValue(stmt->tag());
1009  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1010 
1011  ZoneList<CaseClause*>* clauses = stmt->cases();
1012  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1013 
1014  Label next_test; // Recycled for each test.
1015  // Compile all the tests with branches to their bodies.
1016  for (int i = 0; i < clauses->length(); i++) {
1017  CaseClause* clause = clauses->at(i);
1018  clause->body_target()->Unuse();
1019 
1020  // The default is not a test, but remember it as final fall through.
1021  if (clause->is_default()) {
1022  default_clause = clause;
1023  continue;
1024  }
1025 
1026  Comment cmnt(masm_, "[ Case comparison");
1027  __ bind(&next_test);
1028  next_test.Unuse();
1029 
1030  // Compile the label expression.
1031  VisitForAccumulatorValue(clause->label());
1032  __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
1033 
1034  // Perform the comparison as if via '==='.
1035  __ lw(a1, MemOperand(sp, 0)); // Switch value.
1036  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1037  JumpPatchSite patch_site(masm_);
1038  if (inline_smi_code) {
1039  Label slow_case;
1040  __ or_(a2, a1, a0);
1041  patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1042 
1043  __ Branch(&next_test, ne, a1, Operand(a0));
1044  __ Drop(1); // Switch value is no longer needed.
1045  __ Branch(clause->body_target());
1046 
1047  __ bind(&slow_case);
1048  }
1049 
1050  // Record position before stub call for type feedback.
1051  SetSourcePosition(clause->position());
1052  Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
1053  CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1054  patch_site.EmitPatchInfo();
1055 
1056  __ Branch(&next_test, ne, v0, Operand(zero_reg));
1057  __ Drop(1); // Switch value is no longer needed.
1058  __ Branch(clause->body_target());
1059  }
1060 
1061  // Discard the test value and jump to the default if present, otherwise to
1062  // the end of the statement.
1063  __ bind(&next_test);
1064  __ Drop(1); // Switch value is no longer needed.
1065  if (default_clause == NULL) {
1066  __ Branch(nested_statement.break_label());
1067  } else {
1068  __ Branch(default_clause->body_target());
1069  }
1070 
1071  // Compile all the case bodies.
1072  for (int i = 0; i < clauses->length(); i++) {
1073  Comment cmnt(masm_, "[ Case body");
1074  CaseClause* clause = clauses->at(i);
1075  __ bind(clause->body_target());
1076  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1077  VisitStatements(clause->statements());
1078  }
1079 
1080  __ bind(nested_statement.break_label());
1081  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1082 }
1083 
1084 
1085 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1086  Comment cmnt(masm_, "[ ForInStatement");
1087  SetStatementPosition(stmt);
1088 
1089  Label loop, exit;
1090  ForIn loop_statement(this, stmt);
1091  increment_loop_depth();
1092 
1093  // Get the object to enumerate over. Both SpiderMonkey and JSC
1094  // ignore null and undefined in contrast to the specification; see
1095  // ECMA-262 section 12.6.4.
1096  VisitForAccumulatorValue(stmt->enumerable());
1097  __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1098  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1099  __ Branch(&exit, eq, a0, Operand(at));
1100  Register null_value = t1;
1101  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1102  __ Branch(&exit, eq, a0, Operand(null_value));
1103  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1104  __ mov(a0, v0);
1105  // Convert the object to a JS object.
1106  Label convert, done_convert;
1107  __ JumpIfSmi(a0, &convert);
1108  __ GetObjectType(a0, a1, a1);
1109  __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1110  __ bind(&convert);
1111  __ push(a0);
1112  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1113  __ mov(a0, v0);
1114  __ bind(&done_convert);
1115  __ push(a0);
1116 
1117  // Check for proxies.
1118  Label call_runtime;
1120  __ GetObjectType(a0, a1, a1);
1121  __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1122 
1123  // Check cache validity in generated code. This is a fast case for
1124  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1125  // guarantee cache validity, call the runtime system to check cache
1126  // validity or get the property names in a fixed array.
1127  __ CheckEnumCache(null_value, &call_runtime);
1128 
1129  // The enum cache is valid. Load the map of the object being
1130  // iterated over and use the cache for the iteration.
1131  Label use_cache;
1133  __ Branch(&use_cache);
1134 
1135  // Get the set of properties to enumerate.
1136  __ bind(&call_runtime);
1137  __ push(a0); // Duplicate the enumerable object on the stack.
1138  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1139 
1140  // If we got a map from the runtime call, we can do a fast
1141  // modification check. Otherwise, we got a fixed array, and we have
1142  // to do a slow check.
1143  Label fixed_array;
1144  __ mov(a2, v0);
1146  __ LoadRoot(at, Heap::kMetaMapRootIndex);
1147  __ Branch(&fixed_array, ne, a1, Operand(at));
1148 
1149  // We got a map in register v0. Get the enumeration cache from it.
1150  __ bind(&use_cache);
1151  __ LoadInstanceDescriptors(v0, a1);
1154 
1155  // Set up the four remaining stack slots.
1156  __ push(v0); // Map.
1158  __ li(a0, Operand(Smi::FromInt(0)));
1159  // Push enumeration cache, enumeration cache length (as smi) and zero.
1160  __ Push(a2, a1, a0);
1161  __ jmp(&loop);
1162 
1163  // We got a fixed array in register v0. Iterate through that.
1164  Label non_proxy;
1165  __ bind(&fixed_array);
1166 
1167  Handle<JSGlobalPropertyCell> cell =
1168  isolate()->factory()->NewJSGlobalPropertyCell(
1169  Handle<Object>(
1171  RecordTypeFeedbackCell(stmt->PrepareId(), cell);
1172  __ LoadHeapObject(a1, cell);
1175 
1176  __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1177  __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1179  __ GetObjectType(a2, a3, a3);
1180  __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1181  __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1182  __ bind(&non_proxy);
1183  __ Push(a1, v0); // Smi and array
1185  __ li(a0, Operand(Smi::FromInt(0)));
1186  __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1187 
1188  // Generate code for doing the condition check.
1189  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1190  __ bind(&loop);
1191  // Load the current count to a0, load the length to a1.
1192  __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1193  __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1194  __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1195 
1196  // Get the current entry of the array into register a3.
1197  __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1198  __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1199  __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
1200  __ addu(t0, a2, t0); // Array base + scaled (smi) index.
1201  __ lw(a3, MemOperand(t0)); // Current entry.
1202 
1203  // Get the expected map from the stack or a smi in the
1204  // permanent slow case into register a2.
1205  __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1206 
1207  // Check if the expected map still matches that of the enumerable.
1208  // If not, we may have to filter the key.
1209  Label update_each;
1210  __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1212  __ Branch(&update_each, eq, t0, Operand(a2));
1213 
1214  // For proxies, no filtering is done.
1215  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1216  ASSERT_EQ(Smi::FromInt(0), 0);
1217  __ Branch(&update_each, eq, a2, Operand(zero_reg));
1218 
1219  // Convert the entry to a string or (smi) 0 if it isn't a property
1220  // any more. If the property has been removed while iterating, we
1221  // just skip it.
1222  __ push(a1); // Enumerable.
1223  __ push(a3); // Current entry.
1224  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1225  __ mov(a3, result_register());
1226  __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1227 
1228  // Update the 'each' property or variable from the possibly filtered
1229  // entry in register a3.
1230  __ bind(&update_each);
1231  __ mov(result_register(), a3);
1232  // Perform the assignment as if via '='.
1233  { EffectContext context(this);
1234  EmitAssignment(stmt->each());
1235  }
1236 
1237  // Generate code for the body of the loop.
1238  Visit(stmt->body());
1239 
1240  // Generate code for the going to the next element by incrementing
1241  // the index (smi) stored on top of the stack.
1242  __ bind(loop_statement.continue_label());
1243  __ pop(a0);
1244  __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1245  __ push(a0);
1246 
1247  EmitStackCheck(stmt, &loop);
1248  __ Branch(&loop);
1249 
1250  // Remove the pointers stored on the stack.
1251  __ bind(loop_statement.break_label());
1252  __ Drop(5);
1253 
1254  // Exit and decrement the loop depth.
1255  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1256  __ bind(&exit);
1257  decrement_loop_depth();
1258 }
1259 
1260 
1261 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1262  bool pretenure) {
1263  // Use the fast case closure allocation code that allocates in new
1264  // space for nested functions that don't need literals cloning. If
1265  // we're running with the --always-opt or the --prepare-always-opt
1266  // flag, we need to use the runtime function so that the new function
1267  // we are creating here gets a chance to have its code optimized and
1268  // doesn't just get a copy of the existing unoptimized code.
1269  if (!FLAG_always_opt &&
1270  !FLAG_prepare_always_opt &&
1271  !pretenure &&
1272  scope()->is_function_scope() &&
1273  info->num_literals() == 0) {
1274  FastNewClosureStub stub(info->language_mode());
1275  __ li(a0, Operand(info));
1276  __ push(a0);
1277  __ CallStub(&stub);
1278  } else {
1279  __ li(a0, Operand(info));
1280  __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1281  : Heap::kFalseValueRootIndex);
1282  __ Push(cp, a0, a1);
1283  __ CallRuntime(Runtime::kNewClosure, 3);
1284  }
1285  context()->Plug(v0);
1286 }
1287 
1288 
1289 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1290  Comment cmnt(masm_, "[ VariableProxy");
1291  EmitVariableLoad(expr);
1292 }
1293 
1294 
1295 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1296  TypeofState typeof_state,
1297  Label* slow) {
1298  Register current = cp;
1299  Register next = a1;
1300  Register temp = a2;
1301 
1302  Scope* s = scope();
1303  while (s != NULL) {
1304  if (s->num_heap_slots() > 0) {
1305  if (s->calls_non_strict_eval()) {
1306  // Check that extension is NULL.
1307  __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1308  __ Branch(slow, ne, temp, Operand(zero_reg));
1309  }
1310  // Load next context in chain.
1311  __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1312  // Walk the rest of the chain without clobbering cp.
1313  current = next;
1314  }
1315  // If no outer scope calls eval, we do not need to check more
1316  // context extensions.
1317  if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1318  s = s->outer_scope();
1319  }
1320 
1321  if (s->is_eval_scope()) {
1322  Label loop, fast;
1323  if (!current.is(next)) {
1324  __ Move(next, current);
1325  }
1326  __ bind(&loop);
1327  // Terminate at global context.
1328  __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1329  __ LoadRoot(t0, Heap::kGlobalContextMapRootIndex);
1330  __ Branch(&fast, eq, temp, Operand(t0));
1331  // Check that extension is NULL.
1332  __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1333  __ Branch(slow, ne, temp, Operand(zero_reg));
1334  // Load next context in chain.
1335  __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1336  __ Branch(&loop);
1337  __ bind(&fast);
1338  }
1339 
1340  __ lw(a0, GlobalObjectOperand());
1341  __ li(a2, Operand(var->name()));
1342  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1343  ? RelocInfo::CODE_TARGET
1344  : RelocInfo::CODE_TARGET_CONTEXT;
1345  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1346  CallIC(ic, mode);
1347 }
1348 
1349 
1350 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1351  Label* slow) {
1352  ASSERT(var->IsContextSlot());
1353  Register context = cp;
1354  Register next = a3;
1355  Register temp = t0;
1356 
1357  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1358  if (s->num_heap_slots() > 0) {
1359  if (s->calls_non_strict_eval()) {
1360  // Check that extension is NULL.
1361  __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1362  __ Branch(slow, ne, temp, Operand(zero_reg));
1363  }
1364  __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1365  // Walk the rest of the chain without clobbering cp.
1366  context = next;
1367  }
1368  }
1369  // Check that last extension is NULL.
1370  __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1371  __ Branch(slow, ne, temp, Operand(zero_reg));
1372 
1373  // This function is used only for loads, not stores, so it's safe to
1374  // return an cp-based operand (the write barrier cannot be allowed to
1375  // destroy the cp register).
1376  return ContextOperand(context, var->index());
1377 }
1378 
1379 
1380 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1381  TypeofState typeof_state,
1382  Label* slow,
1383  Label* done) {
1384  // Generate fast-case code for variables that might be shadowed by
1385  // eval-introduced variables. Eval is used a lot without
1386  // introducing variables. In those cases, we do not want to
1387  // perform a runtime call for all variables in the scope
1388  // containing the eval.
1389  if (var->mode() == DYNAMIC_GLOBAL) {
1390  EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1391  __ Branch(done);
1392  } else if (var->mode() == DYNAMIC_LOCAL) {
1393  Variable* local = var->local_if_not_shadowed();
1394  __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1395  if (local->mode() == CONST ||
1396  local->mode() == CONST_HARMONY ||
1397  local->mode() == LET) {
1398  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1399  __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1400  if (local->mode() == CONST) {
1401  __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1402  __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1403  } else { // LET || CONST_HARMONY
1404  __ Branch(done, ne, at, Operand(zero_reg));
1405  __ li(a0, Operand(var->name()));
1406  __ push(a0);
1407  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1408  }
1409  }
1410  __ Branch(done);
1411  }
1412 }
1413 
1414 
1415 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1416  // Record position before possible IC call.
1417  SetSourcePosition(proxy->position());
1418  Variable* var = proxy->var();
1419 
1420  // Three cases: global variables, lookup variables, and all other types of
1421  // variables.
1422  switch (var->location()) {
1423  case Variable::UNALLOCATED: {
1424  Comment cmnt(masm_, "Global variable");
1425  // Use inline caching. Variable name is passed in a2 and the global
1426  // object (receiver) in a0.
1427  __ lw(a0, GlobalObjectOperand());
1428  __ li(a2, Operand(var->name()));
1429  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1430  CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1431  context()->Plug(v0);
1432  break;
1433  }
1434 
1435  case Variable::PARAMETER:
1436  case Variable::LOCAL:
1437  case Variable::CONTEXT: {
1438  Comment cmnt(masm_, var->IsContextSlot()
1439  ? "Context variable"
1440  : "Stack variable");
1441  if (var->binding_needs_init()) {
1442  // var->scope() may be NULL when the proxy is located in eval code and
1443  // refers to a potential outside binding. Currently those bindings are
1444  // always looked up dynamically, i.e. in that case
1445  // var->location() == LOOKUP.
1446  // always holds.
1447  ASSERT(var->scope() != NULL);
1448 
1449  // Check if the binding really needs an initialization check. The check
1450  // can be skipped in the following situation: we have a LET or CONST
1451  // binding in harmony mode, both the Variable and the VariableProxy have
1452  // the same declaration scope (i.e. they are both in global code, in the
1453  // same function or in the same eval code) and the VariableProxy is in
1454  // the source physically located after the initializer of the variable.
1455  //
1456  // We cannot skip any initialization checks for CONST in non-harmony
1457  // mode because const variables may be declared but never initialized:
1458  // if (false) { const x; }; var y = x;
1459  //
1460  // The condition on the declaration scopes is a conservative check for
1461  // nested functions that access a binding and are called before the
1462  // binding is initialized:
1463  // function() { f(); let x = 1; function f() { x = 2; } }
1464  //
1465  bool skip_init_check;
1466  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1467  skip_init_check = false;
1468  } else {
1469  // Check that we always have valid source position.
1470  ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1471  ASSERT(proxy->position() != RelocInfo::kNoPosition);
1472  skip_init_check = var->mode() != CONST &&
1473  var->initializer_position() < proxy->position();
1474  }
1475 
1476  if (!skip_init_check) {
1477  // Let and const need a read barrier.
1478  GetVar(v0, var);
1479  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1480  __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1481  if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1482  // Throw a reference error when using an uninitialized let/const
1483  // binding in harmony mode.
1484  Label done;
1485  __ Branch(&done, ne, at, Operand(zero_reg));
1486  __ li(a0, Operand(var->name()));
1487  __ push(a0);
1488  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1489  __ bind(&done);
1490  } else {
1491  // Uninitalized const bindings outside of harmony mode are unholed.
1492  ASSERT(var->mode() == CONST);
1493  __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1494  __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1495  }
1496  context()->Plug(v0);
1497  break;
1498  }
1499  }
1500  context()->Plug(var);
1501  break;
1502  }
1503 
1504  case Variable::LOOKUP: {
1505  Label done, slow;
1506  // Generate code for loading from variables potentially shadowed
1507  // by eval-introduced variables.
1508  EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1509  __ bind(&slow);
1510  Comment cmnt(masm_, "Lookup variable");
1511  __ li(a1, Operand(var->name()));
1512  __ Push(cp, a1); // Context and name.
1513  __ CallRuntime(Runtime::kLoadContextSlot, 2);
1514  __ bind(&done);
1515  context()->Plug(v0);
1516  }
1517  }
1518 }
1519 
1520 
1521 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1522  Comment cmnt(masm_, "[ RegExpLiteral");
1523  Label materialized;
1524  // Registers will be used as follows:
1525  // t1 = materialized value (RegExp literal)
1526  // t0 = JS function, literals array
1527  // a3 = literal index
1528  // a2 = RegExp pattern
1529  // a1 = RegExp flags
1530  // a0 = RegExp literal clone
1533  int literal_offset =
1534  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1535  __ lw(t1, FieldMemOperand(t0, literal_offset));
1536  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1537  __ Branch(&materialized, ne, t1, Operand(at));
1538 
1539  // Create regexp literal using runtime function.
1540  // Result will be in v0.
1541  __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1542  __ li(a2, Operand(expr->pattern()));
1543  __ li(a1, Operand(expr->flags()));
1544  __ Push(t0, a3, a2, a1);
1545  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1546  __ mov(t1, v0);
1547 
1548  __ bind(&materialized);
1550  Label allocated, runtime_allocate;
1551  __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1552  __ jmp(&allocated);
1553 
1554  __ bind(&runtime_allocate);
1555  __ push(t1);
1556  __ li(a0, Operand(Smi::FromInt(size)));
1557  __ push(a0);
1558  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1559  __ pop(t1);
1560 
1561  __ bind(&allocated);
1562 
1563  // After this, registers are used as follows:
1564  // v0: Newly allocated regexp.
1565  // t1: Materialized regexp.
1566  // a2: temp.
1567  __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
1568  context()->Plug(v0);
1569 }
1570 
1571 
1572 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1573  if (expression == NULL) {
1574  __ LoadRoot(a1, Heap::kNullValueRootIndex);
1575  __ push(a1);
1576  } else {
1577  VisitForStackValue(expression);
1578  }
1579 }
1580 
1581 
1582 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1583  Comment cmnt(masm_, "[ ObjectLiteral");
1584  Handle<FixedArray> constant_properties = expr->constant_properties();
1587  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1588  __ li(a1, Operand(constant_properties));
1589  int flags = expr->fast_elements()
1592  flags |= expr->has_function()
1595  __ li(a0, Operand(Smi::FromInt(flags)));
1596  __ Push(a3, a2, a1, a0);
1597  int properties_count = constant_properties->length() / 2;
1598  if (expr->depth() > 1) {
1599  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1600  } else if (flags != ObjectLiteral::kFastElements ||
1602  __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1603  } else {
1604  FastCloneShallowObjectStub stub(properties_count);
1605  __ CallStub(&stub);
1606  }
1607 
1608  // If result_saved is true the result is on top of the stack. If
1609  // result_saved is false the result is in v0.
1610  bool result_saved = false;
1611 
1612  // Mark all computed expressions that are bound to a key that
1613  // is shadowed by a later occurrence of the same key. For the
1614  // marked expressions, no store code is emitted.
1615  expr->CalculateEmitStore(zone());
1616 
1617  AccessorTable accessor_table(isolate()->zone());
1618  for (int i = 0; i < expr->properties()->length(); i++) {
1619  ObjectLiteral::Property* property = expr->properties()->at(i);
1620  if (property->IsCompileTimeValue()) continue;
1621 
1622  Literal* key = property->key();
1623  Expression* value = property->value();
1624  if (!result_saved) {
1625  __ push(v0); // Save result on stack.
1626  result_saved = true;
1627  }
1628  switch (property->kind()) {
1630  UNREACHABLE();
1632  ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1633  // Fall through.
1635  if (key->handle()->IsSymbol()) {
1636  if (property->emit_store()) {
1637  VisitForAccumulatorValue(value);
1638  __ mov(a0, result_register());
1639  __ li(a2, Operand(key->handle()));
1640  __ lw(a1, MemOperand(sp));
1641  Handle<Code> ic = is_classic_mode()
1642  ? isolate()->builtins()->StoreIC_Initialize()
1643  : isolate()->builtins()->StoreIC_Initialize_Strict();
1644  CallIC(ic, RelocInfo::CODE_TARGET, key->id());
1645  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1646  } else {
1647  VisitForEffect(value);
1648  }
1649  break;
1650  }
1651  // Fall through.
1653  // Duplicate receiver on stack.
1654  __ lw(a0, MemOperand(sp));
1655  __ push(a0);
1656  VisitForStackValue(key);
1657  VisitForStackValue(value);
1658  if (property->emit_store()) {
1659  __ li(a0, Operand(Smi::FromInt(NONE))); // PropertyAttributes.
1660  __ push(a0);
1661  __ CallRuntime(Runtime::kSetProperty, 4);
1662  } else {
1663  __ Drop(3);
1664  }
1665  break;
1667  accessor_table.lookup(key)->second->getter = value;
1668  break;
1670  accessor_table.lookup(key)->second->setter = value;
1671  break;
1672  }
1673  }
1674 
1675  // Emit code to define accessors, using only a single call to the runtime for
1676  // each pair of corresponding getters and setters.
1677  for (AccessorTable::Iterator it = accessor_table.begin();
1678  it != accessor_table.end();
1679  ++it) {
1680  __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1681  __ push(a0);
1682  VisitForStackValue(it->first);
1683  EmitAccessor(it->second->getter);
1684  EmitAccessor(it->second->setter);
1685  __ li(a0, Operand(Smi::FromInt(NONE)));
1686  __ push(a0);
1687  __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1688  }
1689 
1690  if (expr->has_function()) {
1691  ASSERT(result_saved);
1692  __ lw(a0, MemOperand(sp));
1693  __ push(a0);
1694  __ CallRuntime(Runtime::kToFastProperties, 1);
1695  }
1696 
1697  if (result_saved) {
1698  context()->PlugTOS();
1699  } else {
1700  context()->Plug(v0);
1701  }
1702 }
1703 
1704 
1705 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1706  Comment cmnt(masm_, "[ ArrayLiteral");
1707 
1708  ZoneList<Expression*>* subexprs = expr->values();
1709  int length = subexprs->length();
1710 
1711  Handle<FixedArray> constant_elements = expr->constant_elements();
1712  ASSERT_EQ(2, constant_elements->length());
1713  ElementsKind constant_elements_kind =
1714  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1715  bool has_fast_elements =
1716  IsFastObjectElementsKind(constant_elements_kind);
1717  Handle<FixedArrayBase> constant_elements_values(
1718  FixedArrayBase::cast(constant_elements->get(1)));
1719 
1720  __ mov(a0, result_register());
1723  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1724  __ li(a1, Operand(constant_elements));
1725  __ Push(a3, a2, a1);
1726  if (has_fast_elements && constant_elements_values->map() ==
1727  isolate()->heap()->fixed_cow_array_map()) {
1728  FastCloneShallowArrayStub stub(
1730  __ CallStub(&stub);
1731  __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1732  1, a1, a2);
1733  } else if (expr->depth() > 1) {
1734  __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1736  __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1737  } else {
1738  ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1739  FLAG_smi_only_arrays);
1740  FastCloneShallowArrayStub::Mode mode = has_fast_elements
1743  FastCloneShallowArrayStub stub(mode, length);
1744  __ CallStub(&stub);
1745  }
1746 
1747  bool result_saved = false; // Is the result saved to the stack?
1748 
1749  // Emit code to evaluate all the non-constant subexpressions and to store
1750  // them into the newly cloned array.
1751  for (int i = 0; i < length; i++) {
1752  Expression* subexpr = subexprs->at(i);
1753  // If the subexpression is a literal or a simple materialized literal it
1754  // is already set in the cloned array.
1755  if (subexpr->AsLiteral() != NULL ||
1757  continue;
1758  }
1759 
1760  if (!result_saved) {
1761  __ push(v0);
1762  result_saved = true;
1763  }
1764 
1765  VisitForAccumulatorValue(subexpr);
1766 
1767  if (IsFastObjectElementsKind(constant_elements_kind)) {
1768  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1769  __ lw(t2, MemOperand(sp)); // Copy of array literal.
1771  __ sw(result_register(), FieldMemOperand(a1, offset));
1772  // Update the write barrier for the array store.
1773  __ RecordWriteField(a1, offset, result_register(), a2,
1776  } else {
1777  __ lw(a1, MemOperand(sp)); // Copy of array literal.
1779  __ li(a3, Operand(Smi::FromInt(i)));
1780  __ li(t0, Operand(Smi::FromInt(expr->literal_index())));
1781  __ mov(a0, result_register());
1782  StoreArrayLiteralElementStub stub;
1783  __ CallStub(&stub);
1784  }
1785 
1786  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1787  }
1788  if (result_saved) {
1789  context()->PlugTOS();
1790  } else {
1791  context()->Plug(v0);
1792  }
1793 }
1794 
1795 
1796 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1797  Comment cmnt(masm_, "[ Assignment");
1798  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1799  // on the left-hand side.
1800  if (!expr->target()->IsValidLeftHandSide()) {
1801  VisitForEffect(expr->target());
1802  return;
1803  }
1804 
1805  // Left-hand side can only be a property, a global or a (parameter or local)
1806  // slot.
1807  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1808  LhsKind assign_type = VARIABLE;
1809  Property* property = expr->target()->AsProperty();
1810  if (property != NULL) {
1811  assign_type = (property->key()->IsPropertyName())
1812  ? NAMED_PROPERTY
1813  : KEYED_PROPERTY;
1814  }
1815 
1816  // Evaluate LHS expression.
1817  switch (assign_type) {
1818  case VARIABLE:
1819  // Nothing to do here.
1820  break;
1821  case NAMED_PROPERTY:
1822  if (expr->is_compound()) {
1823  // We need the receiver both on the stack and in the accumulator.
1824  VisitForAccumulatorValue(property->obj());
1825  __ push(result_register());
1826  } else {
1827  VisitForStackValue(property->obj());
1828  }
1829  break;
1830  case KEYED_PROPERTY:
1831  // We need the key and receiver on both the stack and in v0 and a1.
1832  if (expr->is_compound()) {
1833  VisitForStackValue(property->obj());
1834  VisitForAccumulatorValue(property->key());
1835  __ lw(a1, MemOperand(sp, 0));
1836  __ push(v0);
1837  } else {
1838  VisitForStackValue(property->obj());
1839  VisitForStackValue(property->key());
1840  }
1841  break;
1842  }
1843 
1844  // For compound assignments we need another deoptimization point after the
1845  // variable/property load.
1846  if (expr->is_compound()) {
1847  { AccumulatorValueContext context(this);
1848  switch (assign_type) {
1849  case VARIABLE:
1850  EmitVariableLoad(expr->target()->AsVariableProxy());
1851  PrepareForBailout(expr->target(), TOS_REG);
1852  break;
1853  case NAMED_PROPERTY:
1854  EmitNamedPropertyLoad(property);
1855  PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1856  break;
1857  case KEYED_PROPERTY:
1858  EmitKeyedPropertyLoad(property);
1859  PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1860  break;
1861  }
1862  }
1863 
1864  Token::Value op = expr->binary_op();
1865  __ push(v0); // Left operand goes on the stack.
1866  VisitForAccumulatorValue(expr->value());
1867 
1868  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1869  ? OVERWRITE_RIGHT
1870  : NO_OVERWRITE;
1871  SetSourcePosition(expr->position() + 1);
1872  AccumulatorValueContext context(this);
1873  if (ShouldInlineSmiCase(op)) {
1874  EmitInlineSmiBinaryOp(expr->binary_operation(),
1875  op,
1876  mode,
1877  expr->target(),
1878  expr->value());
1879  } else {
1880  EmitBinaryOp(expr->binary_operation(), op, mode);
1881  }
1882 
1883  // Deoptimization point in case the binary operation may have side effects.
1884  PrepareForBailout(expr->binary_operation(), TOS_REG);
1885  } else {
1886  VisitForAccumulatorValue(expr->value());
1887  }
1888 
1889  // Record source position before possible IC call.
1890  SetSourcePosition(expr->position());
1891 
1892  // Store the value.
1893  switch (assign_type) {
1894  case VARIABLE:
1895  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1896  expr->op());
1897  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1898  context()->Plug(v0);
1899  break;
1900  case NAMED_PROPERTY:
1901  EmitNamedPropertyAssignment(expr);
1902  break;
1903  case KEYED_PROPERTY:
1904  EmitKeyedPropertyAssignment(expr);
1905  break;
1906  }
1907 }
1908 
1909 
1910 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1911  SetSourcePosition(prop->position());
1912  Literal* key = prop->key()->AsLiteral();
1913  __ mov(a0, result_register());
1914  __ li(a2, Operand(key->handle()));
1915  // Call load IC. It has arguments receiver and property name a0 and a2.
1916  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1917  CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1918 }
1919 
1920 
1921 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1922  SetSourcePosition(prop->position());
1923  __ mov(a0, result_register());
1924  // Call keyed load IC. It has arguments key and receiver in a0 and a1.
1925  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1926  CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1927 }
1928 
1929 
1930 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1931  Token::Value op,
1932  OverwriteMode mode,
1933  Expression* left_expr,
1934  Expression* right_expr) {
1935  Label done, smi_case, stub_call;
1936 
1937  Register scratch1 = a2;
1938  Register scratch2 = a3;
1939 
1940  // Get the arguments.
1941  Register left = a1;
1942  Register right = a0;
1943  __ pop(left);
1944  __ mov(a0, result_register());
1945 
1946  // Perform combined smi check on both operands.
1947  __ Or(scratch1, left, Operand(right));
1948  STATIC_ASSERT(kSmiTag == 0);
1949  JumpPatchSite patch_site(masm_);
1950  patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1951 
1952  __ bind(&stub_call);
1953  BinaryOpStub stub(op, mode);
1954  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1955  patch_site.EmitPatchInfo();
1956  __ jmp(&done);
1957 
1958  __ bind(&smi_case);
1959  // Smi case. This code works the same way as the smi-smi case in the type
1960  // recording binary operation stub, see
1961  // BinaryOpStub::GenerateSmiSmiOperation for comments.
1962  switch (op) {
1963  case Token::SAR:
1964  __ Branch(&stub_call);
1965  __ GetLeastBitsFromSmi(scratch1, right, 5);
1966  __ srav(right, left, scratch1);
1967  __ And(v0, right, Operand(~kSmiTagMask));
1968  break;
1969  case Token::SHL: {
1970  __ Branch(&stub_call);
1971  __ SmiUntag(scratch1, left);
1972  __ GetLeastBitsFromSmi(scratch2, right, 5);
1973  __ sllv(scratch1, scratch1, scratch2);
1974  __ Addu(scratch2, scratch1, Operand(0x40000000));
1975  __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1976  __ SmiTag(v0, scratch1);
1977  break;
1978  }
1979  case Token::SHR: {
1980  __ Branch(&stub_call);
1981  __ SmiUntag(scratch1, left);
1982  __ GetLeastBitsFromSmi(scratch2, right, 5);
1983  __ srlv(scratch1, scratch1, scratch2);
1984  __ And(scratch2, scratch1, 0xc0000000);
1985  __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
1986  __ SmiTag(v0, scratch1);
1987  break;
1988  }
1989  case Token::ADD:
1990  __ AdduAndCheckForOverflow(v0, left, right, scratch1);
1991  __ BranchOnOverflow(&stub_call, scratch1);
1992  break;
1993  case Token::SUB:
1994  __ SubuAndCheckForOverflow(v0, left, right, scratch1);
1995  __ BranchOnOverflow(&stub_call, scratch1);
1996  break;
1997  case Token::MUL: {
1998  __ SmiUntag(scratch1, right);
1999  __ Mult(left, scratch1);
2000  __ mflo(scratch1);
2001  __ mfhi(scratch2);
2002  __ sra(scratch1, scratch1, 31);
2003  __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
2004  __ mflo(v0);
2005  __ Branch(&done, ne, v0, Operand(zero_reg));
2006  __ Addu(scratch2, right, left);
2007  __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2008  ASSERT(Smi::FromInt(0) == 0);
2009  __ mov(v0, zero_reg);
2010  break;
2011  }
2012  case Token::BIT_OR:
2013  __ Or(v0, left, Operand(right));
2014  break;
2015  case Token::BIT_AND:
2016  __ And(v0, left, Operand(right));
2017  break;
2018  case Token::BIT_XOR:
2019  __ Xor(v0, left, Operand(right));
2020  break;
2021  default:
2022  UNREACHABLE();
2023  }
2024 
2025  __ bind(&done);
2026  context()->Plug(v0);
2027 }
2028 
2029 
2030 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2031  Token::Value op,
2032  OverwriteMode mode) {
2033  __ mov(a0, result_register());
2034  __ pop(a1);
2035  BinaryOpStub stub(op, mode);
2036  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2037  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
2038  patch_site.EmitPatchInfo();
2039  context()->Plug(v0);
2040 }
2041 
2042 
2043 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2044  // Invalid left-hand sides are rewritten to have a 'throw
2045  // ReferenceError' on the left-hand side.
2046  if (!expr->IsValidLeftHandSide()) {
2047  VisitForEffect(expr);
2048  return;
2049  }
2050 
2051  // Left-hand side can only be a property, a global or a (parameter or local)
2052  // slot.
2053  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2054  LhsKind assign_type = VARIABLE;
2055  Property* prop = expr->AsProperty();
2056  if (prop != NULL) {
2057  assign_type = (prop->key()->IsPropertyName())
2058  ? NAMED_PROPERTY
2059  : KEYED_PROPERTY;
2060  }
2061 
2062  switch (assign_type) {
2063  case VARIABLE: {
2064  Variable* var = expr->AsVariableProxy()->var();
2065  EffectContext context(this);
2066  EmitVariableAssignment(var, Token::ASSIGN);
2067  break;
2068  }
2069  case NAMED_PROPERTY: {
2070  __ push(result_register()); // Preserve value.
2071  VisitForAccumulatorValue(prop->obj());
2072  __ mov(a1, result_register());
2073  __ pop(a0); // Restore value.
2074  __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
2075  Handle<Code> ic = is_classic_mode()
2076  ? isolate()->builtins()->StoreIC_Initialize()
2077  : isolate()->builtins()->StoreIC_Initialize_Strict();
2078  CallIC(ic);
2079  break;
2080  }
2081  case KEYED_PROPERTY: {
2082  __ push(result_register()); // Preserve value.
2083  VisitForStackValue(prop->obj());
2084  VisitForAccumulatorValue(prop->key());
2085  __ mov(a1, result_register());
2086  __ pop(a2);
2087  __ pop(a0); // Restore value.
2088  Handle<Code> ic = is_classic_mode()
2089  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2090  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2091  CallIC(ic);
2092  break;
2093  }
2094  }
2095  context()->Plug(v0);
2096 }
2097 
2098 
2099 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2100  Token::Value op) {
2101  if (var->IsUnallocated()) {
2102  // Global var, const, or let.
2103  __ mov(a0, result_register());
2104  __ li(a2, Operand(var->name()));
2105  __ lw(a1, GlobalObjectOperand());
2106  Handle<Code> ic = is_classic_mode()
2107  ? isolate()->builtins()->StoreIC_Initialize()
2108  : isolate()->builtins()->StoreIC_Initialize_Strict();
2109  CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2110 
2111  } else if (op == Token::INIT_CONST) {
2112  // Const initializers need a write barrier.
2113  ASSERT(!var->IsParameter()); // No const parameters.
2114  if (var->IsStackLocal()) {
2115  Label skip;
2116  __ lw(a1, StackOperand(var));
2117  __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2118  __ Branch(&skip, ne, a1, Operand(t0));
2119  __ sw(result_register(), StackOperand(var));
2120  __ bind(&skip);
2121  } else {
2122  ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2123  // Like var declarations, const declarations are hoisted to function
2124  // scope. However, unlike var initializers, const initializers are
2125  // able to drill a hole to that function context, even from inside a
2126  // 'with' context. We thus bypass the normal static scope lookup for
2127  // var->IsContextSlot().
2128  __ push(v0);
2129  __ li(a0, Operand(var->name()));
2130  __ Push(cp, a0); // Context and name.
2131  __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2132  }
2133 
2134  } else if (var->mode() == LET && op != Token::INIT_LET) {
2135  // Non-initializing assignment to let variable needs a write barrier.
2136  if (var->IsLookupSlot()) {
2137  __ push(v0); // Value.
2138  __ li(a1, Operand(var->name()));
2139  __ li(a0, Operand(Smi::FromInt(language_mode())));
2140  __ Push(cp, a1, a0); // Context, name, strict mode.
2141  __ CallRuntime(Runtime::kStoreContextSlot, 4);
2142  } else {
2143  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2144  Label assign;
2145  MemOperand location = VarOperand(var, a1);
2146  __ lw(a3, location);
2147  __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2148  __ Branch(&assign, ne, a3, Operand(t0));
2149  __ li(a3, Operand(var->name()));
2150  __ push(a3);
2151  __ CallRuntime(Runtime::kThrowReferenceError, 1);
2152  // Perform the assignment.
2153  __ bind(&assign);
2154  __ sw(result_register(), location);
2155  if (var->IsContextSlot()) {
2156  // RecordWrite may destroy all its register arguments.
2157  __ mov(a3, result_register());
2158  int offset = Context::SlotOffset(var->index());
2159  __ RecordWriteContextSlot(
2160  a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2161  }
2162  }
2163 
2164  } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2165  // Assignment to var or initializing assignment to let/const
2166  // in harmony mode.
2167  if (var->IsStackAllocated() || var->IsContextSlot()) {
2168  MemOperand location = VarOperand(var, a1);
2169  if (FLAG_debug_code && op == Token::INIT_LET) {
2170  // Check for an uninitialized let binding.
2171  __ lw(a2, location);
2172  __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2173  __ Check(eq, "Let binding re-initialization.", a2, Operand(t0));
2174  }
2175  // Perform the assignment.
2176  __ sw(v0, location);
2177  if (var->IsContextSlot()) {
2178  __ mov(a3, v0);
2179  int offset = Context::SlotOffset(var->index());
2180  __ RecordWriteContextSlot(
2181  a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2182  }
2183  } else {
2184  ASSERT(var->IsLookupSlot());
2185  __ push(v0); // Value.
2186  __ li(a1, Operand(var->name()));
2187  __ li(a0, Operand(Smi::FromInt(language_mode())));
2188  __ Push(cp, a1, a0); // Context, name, strict mode.
2189  __ CallRuntime(Runtime::kStoreContextSlot, 4);
2190  }
2191  }
2192  // Non-initializing assignments to consts are ignored.
2193 }
2194 
2195 
2196 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2197  // Assignment to a property, using a named store IC.
2198  Property* prop = expr->target()->AsProperty();
2199  ASSERT(prop != NULL);
2200  ASSERT(prop->key()->AsLiteral() != NULL);
2201 
2202  // If the assignment starts a block of assignments to the same object,
2203  // change to slow case to avoid the quadratic behavior of repeatedly
2204  // adding fast properties.
2205  if (expr->starts_initialization_block()) {
2206  __ push(result_register());
2207  __ lw(t0, MemOperand(sp, kPointerSize)); // Receiver is now under value.
2208  __ push(t0);
2209  __ CallRuntime(Runtime::kToSlowProperties, 1);
2210  __ pop(result_register());
2211  }
2212 
2213  // Record source code position before IC call.
2214  SetSourcePosition(expr->position());
2215  __ mov(a0, result_register()); // Load the value.
2216  __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
2217  // Load receiver to a1. Leave a copy in the stack if needed for turning the
2218  // receiver into fast case.
2219  if (expr->ends_initialization_block()) {
2220  __ lw(a1, MemOperand(sp));
2221  } else {
2222  __ pop(a1);
2223  }
2224 
2225  Handle<Code> ic = is_classic_mode()
2226  ? isolate()->builtins()->StoreIC_Initialize()
2227  : isolate()->builtins()->StoreIC_Initialize_Strict();
2228  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2229 
2230  // If the assignment ends an initialization block, revert to fast case.
2231  if (expr->ends_initialization_block()) {
2232  __ push(v0); // Result of assignment, saved even if not needed.
2233  // Receiver is under the result value.
2234  __ lw(t0, MemOperand(sp, kPointerSize));
2235  __ push(t0);
2236  __ CallRuntime(Runtime::kToFastProperties, 1);
2237  __ pop(v0);
2238  __ Drop(1);
2239  }
2240  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2241  context()->Plug(v0);
2242 }
2243 
2244 
2245 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2246  // Assignment to a property, using a keyed store IC.
2247 
2248  // If the assignment starts a block of assignments to the same object,
2249  // change to slow case to avoid the quadratic behavior of repeatedly
2250  // adding fast properties.
2251  if (expr->starts_initialization_block()) {
2252  __ push(result_register());
2253  // Receiver is now under the key and value.
2254  __ lw(t0, MemOperand(sp, 2 * kPointerSize));
2255  __ push(t0);
2256  __ CallRuntime(Runtime::kToSlowProperties, 1);
2257  __ pop(result_register());
2258  }
2259 
2260  // Record source code position before IC call.
2261  SetSourcePosition(expr->position());
2262  // Call keyed store IC.
2263  // The arguments are:
2264  // - a0 is the value,
2265  // - a1 is the key,
2266  // - a2 is the receiver.
2267  __ mov(a0, result_register());
2268  __ pop(a1); // Key.
2269  // Load receiver to a2. Leave a copy in the stack if needed for turning the
2270  // receiver into fast case.
2271  if (expr->ends_initialization_block()) {
2272  __ lw(a2, MemOperand(sp));
2273  } else {
2274  __ pop(a2);
2275  }
2276 
2277  Handle<Code> ic = is_classic_mode()
2278  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2279  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2280  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2281 
2282  // If the assignment ends an initialization block, revert to fast case.
2283  if (expr->ends_initialization_block()) {
2284  __ push(v0); // Result of assignment, saved even if not needed.
2285  // Receiver is under the result value.
2286  __ lw(t0, MemOperand(sp, kPointerSize));
2287  __ push(t0);
2288  __ CallRuntime(Runtime::kToFastProperties, 1);
2289  __ pop(v0);
2290  __ Drop(1);
2291  }
2292  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2293  context()->Plug(v0);
2294 }
2295 
2296 
2297 void FullCodeGenerator::VisitProperty(Property* expr) {
2298  Comment cmnt(masm_, "[ Property");
2299  Expression* key = expr->key();
2300 
2301  if (key->IsPropertyName()) {
2302  VisitForAccumulatorValue(expr->obj());
2303  EmitNamedPropertyLoad(expr);
2304  context()->Plug(v0);
2305  } else {
2306  VisitForStackValue(expr->obj());
2307  VisitForAccumulatorValue(expr->key());
2308  __ pop(a1);
2309  EmitKeyedPropertyLoad(expr);
2310  context()->Plug(v0);
2311  }
2312 }
2313 
2314 
2315 void FullCodeGenerator::CallIC(Handle<Code> code,
2316  RelocInfo::Mode rmode,
2317  unsigned ast_id) {
2318  ic_total_count_++;
2319  __ Call(code, rmode, ast_id);
2320 }
2321 
2322 
2323 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2324  Handle<Object> name,
2325  RelocInfo::Mode mode) {
2326  // Code common for calls using the IC.
2327  ZoneList<Expression*>* args = expr->arguments();
2328  int arg_count = args->length();
2329  { PreservePositionScope scope(masm()->positions_recorder());
2330  for (int i = 0; i < arg_count; i++) {
2331  VisitForStackValue(args->at(i));
2332  }
2333  __ li(a2, Operand(name));
2334  }
2335  // Record source position for debugger.
2336  SetSourcePosition(expr->position());
2337  // Call the IC initialization code.
2338  Handle<Code> ic =
2339  isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2340  CallIC(ic, mode, expr->id());
2341  RecordJSReturnSite(expr);
2342  // Restore context register.
2344  context()->Plug(v0);
2345 }
2346 
2347 
2348 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2349  Expression* key) {
2350  // Load the key.
2351  VisitForAccumulatorValue(key);
2352 
2353  // Swap the name of the function and the receiver on the stack to follow
2354  // the calling convention for call ICs.
2355  __ pop(a1);
2356  __ push(v0);
2357  __ push(a1);
2358 
2359  // Code common for calls using the IC.
2360  ZoneList<Expression*>* args = expr->arguments();
2361  int arg_count = args->length();
2362  { PreservePositionScope scope(masm()->positions_recorder());
2363  for (int i = 0; i < arg_count; i++) {
2364  VisitForStackValue(args->at(i));
2365  }
2366  }
2367  // Record source position for debugger.
2368  SetSourcePosition(expr->position());
2369  // Call the IC initialization code.
2370  Handle<Code> ic =
2371  isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2372  __ lw(a2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
2373  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2374  RecordJSReturnSite(expr);
2375  // Restore context register.
2377  context()->DropAndPlug(1, v0); // Drop the key still on the stack.
2378 }
2379 
2380 
2381 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2382  // Code common for calls using the call stub.
2383  ZoneList<Expression*>* args = expr->arguments();
2384  int arg_count = args->length();
2385  { PreservePositionScope scope(masm()->positions_recorder());
2386  for (int i = 0; i < arg_count; i++) {
2387  VisitForStackValue(args->at(i));
2388  }
2389  }
2390  // Record source position for debugger.
2391  SetSourcePosition(expr->position());
2392 
2393  // Record call targets in unoptimized code, but not in the snapshot.
2394  if (!Serializer::enabled()) {
2395  flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2396  Handle<Object> uninitialized =
2398  Handle<JSGlobalPropertyCell> cell =
2399  isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2400  RecordTypeFeedbackCell(expr->id(), cell);
2401  __ li(a2, Operand(cell));
2402  }
2403 
2404  CallFunctionStub stub(arg_count, flags);
2405  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2406  __ CallStub(&stub);
2407  RecordJSReturnSite(expr);
2408  // Restore context register.
2410  context()->DropAndPlug(1, v0);
2411 }
2412 
2413 
2414 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2415  // Push copy of the first argument or undefined if it doesn't exist.
2416  if (arg_count > 0) {
2417  __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2418  } else {
2419  __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2420  }
2421  __ push(a1);
2422 
2423  // Push the receiver of the enclosing function.
2424  int receiver_offset = 2 + info_->scope()->num_parameters();
2425  __ lw(a1, MemOperand(fp, receiver_offset * kPointerSize));
2426  __ push(a1);
2427  // Push the language mode.
2428  __ li(a1, Operand(Smi::FromInt(language_mode())));
2429  __ push(a1);
2430 
2431  // Push the start position of the scope the calls resides in.
2432  __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2433  __ push(a1);
2434 
2435  // Do the runtime call.
2436  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2437 }
2438 
2439 
2440 void FullCodeGenerator::VisitCall(Call* expr) {
2441 #ifdef DEBUG
2442  // We want to verify that RecordJSReturnSite gets called on all paths
2443  // through this function. Avoid early returns.
2444  expr->return_is_recorded_ = false;
2445 #endif
2446 
2447  Comment cmnt(masm_, "[ Call");
2448  Expression* callee = expr->expression();
2449  VariableProxy* proxy = callee->AsVariableProxy();
2450  Property* property = callee->AsProperty();
2451 
2452  if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2453  // In a call to eval, we first call %ResolvePossiblyDirectEval to
2454  // resolve the function we need to call and the receiver of the
2455  // call. Then we call the resolved function using the given
2456  // arguments.
2457  ZoneList<Expression*>* args = expr->arguments();
2458  int arg_count = args->length();
2459 
2460  { PreservePositionScope pos_scope(masm()->positions_recorder());
2461  VisitForStackValue(callee);
2462  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2463  __ push(a2); // Reserved receiver slot.
2464 
2465  // Push the arguments.
2466  for (int i = 0; i < arg_count; i++) {
2467  VisitForStackValue(args->at(i));
2468  }
2469 
2470  // Push a copy of the function (found below the arguments) and
2471  // resolve eval.
2472  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2473  __ push(a1);
2474  EmitResolvePossiblyDirectEval(arg_count);
2475 
2476  // The runtime call returns a pair of values in v0 (function) and
2477  // v1 (receiver). Touch up the stack with the right values.
2478  __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2479  __ sw(v1, MemOperand(sp, arg_count * kPointerSize));
2480  }
2481  // Record source position for debugger.
2482  SetSourcePosition(expr->position());
2483  CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2484  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2485  __ CallStub(&stub);
2486  RecordJSReturnSite(expr);
2487  // Restore context register.
2489  context()->DropAndPlug(1, v0);
2490  } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2491  // Push global object as receiver for the call IC.
2492  __ lw(a0, GlobalObjectOperand());
2493  __ push(a0);
2494  EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2495  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2496  // Call to a lookup slot (dynamically introduced variable).
2497  Label slow, done;
2498 
2499  { PreservePositionScope scope(masm()->positions_recorder());
2500  // Generate code for loading from variables potentially shadowed
2501  // by eval-introduced variables.
2502  EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2503  }
2504 
2505  __ bind(&slow);
2506  // Call the runtime to find the function to call (returned in v0)
2507  // and the object holding it (returned in v1).
2508  __ push(context_register());
2509  __ li(a2, Operand(proxy->name()));
2510  __ push(a2);
2511  __ CallRuntime(Runtime::kLoadContextSlot, 2);
2512  __ Push(v0, v1); // Function, receiver.
2513 
2514  // If fast case code has been generated, emit code to push the
2515  // function and receiver and have the slow path jump around this
2516  // code.
2517  if (done.is_linked()) {
2518  Label call;
2519  __ Branch(&call);
2520  __ bind(&done);
2521  // Push function.
2522  __ push(v0);
2523  // The receiver is implicitly the global receiver. Indicate this
2524  // by passing the hole to the call function stub.
2525  __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
2526  __ push(a1);
2527  __ bind(&call);
2528  }
2529 
2530  // The receiver is either the global receiver or an object found
2531  // by LoadContextSlot. That object could be the hole if the
2532  // receiver is implicitly the global object.
2533  EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2534  } else if (property != NULL) {
2535  { PreservePositionScope scope(masm()->positions_recorder());
2536  VisitForStackValue(property->obj());
2537  }
2538  if (property->key()->IsPropertyName()) {
2539  EmitCallWithIC(expr,
2540  property->key()->AsLiteral()->handle(),
2541  RelocInfo::CODE_TARGET);
2542  } else {
2543  EmitKeyedCallWithIC(expr, property->key());
2544  }
2545  } else {
2546  // Call to an arbitrary expression not handled specially above.
2547  { PreservePositionScope scope(masm()->positions_recorder());
2548  VisitForStackValue(callee);
2549  }
2550  // Load global receiver object.
2551  __ lw(a1, GlobalObjectOperand());
2553  __ push(a1);
2554  // Emit function call.
2555  EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2556  }
2557 
2558 #ifdef DEBUG
2559  // RecordJSReturnSite should have been called.
2560  ASSERT(expr->return_is_recorded_);
2561 #endif
2562 }
2563 
2564 
2565 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2566  Comment cmnt(masm_, "[ CallNew");
2567  // According to ECMA-262, section 11.2.2, page 44, the function
2568  // expression in new calls must be evaluated before the
2569  // arguments.
2570 
2571  // Push constructor on the stack. If it's not a function it's used as
2572  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2573  // ignored.
2574  VisitForStackValue(expr->expression());
2575 
2576  // Push the arguments ("left-to-right") on the stack.
2577  ZoneList<Expression*>* args = expr->arguments();
2578  int arg_count = args->length();
2579  for (int i = 0; i < arg_count; i++) {
2580  VisitForStackValue(args->at(i));
2581  }
2582 
2583  // Call the construct call builtin that handles allocation and
2584  // constructor invocation.
2585  SetSourcePosition(expr->position());
2586 
2587  // Load function and argument count into a1 and a0.
2588  __ li(a0, Operand(arg_count));
2589  __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2590 
2591  // Record call targets in unoptimized code, but not in the snapshot.
2593  if (!Serializer::enabled()) {
2594  flags = RECORD_CALL_TARGET;
2595  Handle<Object> uninitialized =
2597  Handle<JSGlobalPropertyCell> cell =
2598  isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2599  RecordTypeFeedbackCell(expr->id(), cell);
2600  __ li(a2, Operand(cell));
2601  } else {
2602  flags = NO_CALL_FUNCTION_FLAGS;
2603  }
2604 
2605  CallConstructStub stub(flags);
2606  __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2607  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2608  context()->Plug(v0);
2609 }
2610 
2611 
2612 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2613  ZoneList<Expression*>* args = expr->arguments();
2614  ASSERT(args->length() == 1);
2615 
2616  VisitForAccumulatorValue(args->at(0));
2617 
2618  Label materialize_true, materialize_false;
2619  Label* if_true = NULL;
2620  Label* if_false = NULL;
2621  Label* fall_through = NULL;
2622  context()->PrepareTest(&materialize_true, &materialize_false,
2623  &if_true, &if_false, &fall_through);
2624 
2625  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2626  __ And(t0, v0, Operand(kSmiTagMask));
2627  Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
2628 
2629  context()->Plug(if_true, if_false);
2630 }
2631 
2632 
2633 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2634  ZoneList<Expression*>* args = expr->arguments();
2635  ASSERT(args->length() == 1);
2636 
2637  VisitForAccumulatorValue(args->at(0));
2638 
2639  Label materialize_true, materialize_false;
2640  Label* if_true = NULL;
2641  Label* if_false = NULL;
2642  Label* fall_through = NULL;
2643  context()->PrepareTest(&materialize_true, &materialize_false,
2644  &if_true, &if_false, &fall_through);
2645 
2646  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2647  __ And(at, v0, Operand(kSmiTagMask | 0x80000000));
2648  Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
2649 
2650  context()->Plug(if_true, if_false);
2651 }
2652 
2653 
2654 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2655  ZoneList<Expression*>* args = expr->arguments();
2656  ASSERT(args->length() == 1);
2657 
2658  VisitForAccumulatorValue(args->at(0));
2659 
2660  Label materialize_true, materialize_false;
2661  Label* if_true = NULL;
2662  Label* if_false = NULL;
2663  Label* fall_through = NULL;
2664  context()->PrepareTest(&materialize_true, &materialize_false,
2665  &if_true, &if_false, &fall_through);
2666 
2667  __ JumpIfSmi(v0, if_false);
2668  __ LoadRoot(at, Heap::kNullValueRootIndex);
2669  __ Branch(if_true, eq, v0, Operand(at));
2671  // Undetectable objects behave like undefined when tested with typeof.
2672  __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
2673  __ And(at, a1, Operand(1 << Map::kIsUndetectable));
2674  __ Branch(if_false, ne, at, Operand(zero_reg));
2676  __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2677  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2678  Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
2679  if_true, if_false, fall_through);
2680 
2681  context()->Plug(if_true, if_false);
2682 }
2683 
2684 
2685 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2686  ZoneList<Expression*>* args = expr->arguments();
2687  ASSERT(args->length() == 1);
2688 
2689  VisitForAccumulatorValue(args->at(0));
2690 
2691  Label materialize_true, materialize_false;
2692  Label* if_true = NULL;
2693  Label* if_false = NULL;
2694  Label* fall_through = NULL;
2695  context()->PrepareTest(&materialize_true, &materialize_false,
2696  &if_true, &if_false, &fall_through);
2697 
2698  __ JumpIfSmi(v0, if_false);
2699  __ GetObjectType(v0, a1, a1);
2700  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2701  Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
2702  if_true, if_false, fall_through);
2703 
2704  context()->Plug(if_true, if_false);
2705 }
2706 
2707 
2708 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2709  ZoneList<Expression*>* args = expr->arguments();
2710  ASSERT(args->length() == 1);
2711 
2712  VisitForAccumulatorValue(args->at(0));
2713 
2714  Label materialize_true, materialize_false;
2715  Label* if_true = NULL;
2716  Label* if_false = NULL;
2717  Label* fall_through = NULL;
2718  context()->PrepareTest(&materialize_true, &materialize_false,
2719  &if_true, &if_false, &fall_through);
2720 
2721  __ JumpIfSmi(v0, if_false);
2723  __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
2724  __ And(at, a1, Operand(1 << Map::kIsUndetectable));
2725  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2726  Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
2727 
2728  context()->Plug(if_true, if_false);
2729 }
2730 
2731 
2732 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2733  CallRuntime* expr) {
2734  ZoneList<Expression*>* args = expr->arguments();
2735  ASSERT(args->length() == 1);
2736 
2737  VisitForAccumulatorValue(args->at(0));
2738 
2739  Label materialize_true, materialize_false;
2740  Label* if_true = NULL;
2741  Label* if_false = NULL;
2742  Label* fall_through = NULL;
2743  context()->PrepareTest(&materialize_true, &materialize_false,
2744  &if_true, &if_false, &fall_through);
2745 
2746  if (FLAG_debug_code) __ AbortIfSmi(v0);
2747 
2750  __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf);
2751  __ Branch(if_true, ne, t0, Operand(zero_reg));
2752 
2753  // Check for fast case object. Generate false result for slow case object.
2756  __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
2757  __ Branch(if_false, eq, a2, Operand(t0));
2758 
2759  // Look for valueOf symbol in the descriptor array, and indicate false if
2760  // found. The type is not checked, so if it is a transition it is a false
2761  // negative.
2762  __ LoadInstanceDescriptors(a1, t0);
2764  // t0: descriptor array
2765  // a3: length of descriptor array
2766  // Calculate the end of the descriptor array.
2767  STATIC_ASSERT(kSmiTag == 0);
2768  STATIC_ASSERT(kSmiTagSize == 1);
2769  STATIC_ASSERT(kPointerSize == 4);
2770  __ Addu(a2, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2771  __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize);
2772  __ Addu(a2, a2, t1);
2773 
2774  // Calculate location of the first key name.
2775  __ Addu(t0,
2776  t0,
2778  DescriptorArray::kFirstIndex * kPointerSize));
2779  // Loop through all the keys in the descriptor array. If one of these is the
2780  // symbol valueOf the result is false.
2781  Label entry, loop;
2782  // The use of t2 to store the valueOf symbol asumes that it is not otherwise
2783  // used in the loop below.
2784  __ LoadRoot(t2, Heap::kvalue_of_symbolRootIndex);
2785  __ jmp(&entry);
2786  __ bind(&loop);
2787  __ lw(a3, MemOperand(t0, 0));
2788  __ Branch(if_false, eq, a3, Operand(t2));
2789  __ Addu(t0, t0, Operand(kPointerSize));
2790  __ bind(&entry);
2791  __ Branch(&loop, ne, t0, Operand(a2));
2792 
2793  // If a valueOf property is not found on the object check that it's
2794  // prototype is the un-modified String prototype. If not result is false.
2796  __ JumpIfSmi(a2, if_false);
2801  __ Branch(if_false, ne, a2, Operand(a3));
2802 
2803  // Set the bit in the map to indicate that it has been checked safe for
2804  // default valueOf and set true result.
2806  __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2808  __ jmp(if_true);
2809 
2810  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2811  context()->Plug(if_true, if_false);
2812 }
2813 
2814 
2815 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2816  ZoneList<Expression*>* args = expr->arguments();
2817  ASSERT(args->length() == 1);
2818 
2819  VisitForAccumulatorValue(args->at(0));
2820 
2821  Label materialize_true, materialize_false;
2822  Label* if_true = NULL;
2823  Label* if_false = NULL;
2824  Label* fall_through = NULL;
2825  context()->PrepareTest(&materialize_true, &materialize_false,
2826  &if_true, &if_false, &fall_through);
2827 
2828  __ JumpIfSmi(v0, if_false);
2829  __ GetObjectType(v0, a1, a2);
2830  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2831  __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
2832  __ Branch(if_false);
2833 
2834  context()->Plug(if_true, if_false);
2835 }
2836 
2837 
2838 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2839  ZoneList<Expression*>* args = expr->arguments();
2840  ASSERT(args->length() == 1);
2841 
2842  VisitForAccumulatorValue(args->at(0));
2843 
2844  Label materialize_true, materialize_false;
2845  Label* if_true = NULL;
2846  Label* if_false = NULL;
2847  Label* fall_through = NULL;
2848  context()->PrepareTest(&materialize_true, &materialize_false,
2849  &if_true, &if_false, &fall_through);
2850 
2851  __ JumpIfSmi(v0, if_false);
2852  __ GetObjectType(v0, a1, a1);
2853  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2854  Split(eq, a1, Operand(JS_ARRAY_TYPE),
2855  if_true, if_false, fall_through);
2856 
2857  context()->Plug(if_true, if_false);
2858 }
2859 
2860 
2861 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2862  ZoneList<Expression*>* args = expr->arguments();
2863  ASSERT(args->length() == 1);
2864 
2865  VisitForAccumulatorValue(args->at(0));
2866 
2867  Label materialize_true, materialize_false;
2868  Label* if_true = NULL;
2869  Label* if_false = NULL;
2870  Label* fall_through = NULL;
2871  context()->PrepareTest(&materialize_true, &materialize_false,
2872  &if_true, &if_false, &fall_through);
2873 
2874  __ JumpIfSmi(v0, if_false);
2875  __ GetObjectType(v0, a1, a1);
2876  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2877  Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
2878 
2879  context()->Plug(if_true, if_false);
2880 }
2881 
2882 
2883 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2884  ASSERT(expr->arguments()->length() == 0);
2885 
2886  Label materialize_true, materialize_false;
2887  Label* if_true = NULL;
2888  Label* if_false = NULL;
2889  Label* fall_through = NULL;
2890  context()->PrepareTest(&materialize_true, &materialize_false,
2891  &if_true, &if_false, &fall_through);
2892 
2893  // Get the frame pointer for the calling frame.
2895 
2896  // Skip the arguments adaptor frame if it exists.
2897  Label check_frame_marker;
2899  __ Branch(&check_frame_marker, ne,
2902 
2903  // Check the marker in the calling frame.
2904  __ bind(&check_frame_marker);
2906  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2907  Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
2908  if_true, if_false, fall_through);
2909 
2910  context()->Plug(if_true, if_false);
2911 }
2912 
2913 
2914 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2915  ZoneList<Expression*>* args = expr->arguments();
2916  ASSERT(args->length() == 2);
2917 
2918  // Load the two objects into registers and perform the comparison.
2919  VisitForStackValue(args->at(0));
2920  VisitForAccumulatorValue(args->at(1));
2921 
2922  Label materialize_true, materialize_false;
2923  Label* if_true = NULL;
2924  Label* if_false = NULL;
2925  Label* fall_through = NULL;
2926  context()->PrepareTest(&materialize_true, &materialize_false,
2927  &if_true, &if_false, &fall_through);
2928 
2929  __ pop(a1);
2930  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2931  Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
2932 
2933  context()->Plug(if_true, if_false);
2934 }
2935 
2936 
2937 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2938  ZoneList<Expression*>* args = expr->arguments();
2939  ASSERT(args->length() == 1);
2940 
2941  // ArgumentsAccessStub expects the key in a1 and the formal
2942  // parameter count in a0.
2943  VisitForAccumulatorValue(args->at(0));
2944  __ mov(a1, v0);
2945  __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2946  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2947  __ CallStub(&stub);
2948  context()->Plug(v0);
2949 }
2950 
2951 
2952 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2953  ASSERT(expr->arguments()->length() == 0);
2954  Label exit;
2955  // Get the number of formal parameters.
2956  __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2957 
2958  // Check if the calling frame is an arguments adaptor frame.
2961  __ Branch(&exit, ne, a3,
2963 
2964  // Arguments adaptor case: Read the arguments length from the
2965  // adaptor frame.
2967 
2968  __ bind(&exit);
2969  context()->Plug(v0);
2970 }
2971 
2972 
2973 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2974  ZoneList<Expression*>* args = expr->arguments();
2975  ASSERT(args->length() == 1);
2976  Label done, null, function, non_function_constructor;
2977 
2978  VisitForAccumulatorValue(args->at(0));
2979 
2980  // If the object is a smi, we return null.
2981  __ JumpIfSmi(v0, &null);
2982 
2983  // Check that the object is a JS object but take special care of JS
2984  // functions to make sure they have 'Function' as their class.
2985  // Assume that there are only two callable types, and one of them is at
2986  // either end of the type range for JS object types. Saves extra comparisons.
2988  __ GetObjectType(v0, v0, a1); // Map is now in v0.
2989  __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
2990 
2993  __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
2994 
2996  LAST_SPEC_OBJECT_TYPE - 1);
2997  __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
2998  // Assume that there is no larger type.
3000 
3001  // Check if the constructor in the map is a JS function.
3003  __ GetObjectType(v0, a1, a1);
3004  __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
3005 
3006  // v0 now contains the constructor function. Grab the
3007  // instance class name from there.
3010  __ Branch(&done);
3011 
3012  // Functions have class 'Function'.
3013  __ bind(&function);
3014  __ LoadRoot(v0, Heap::kfunction_class_symbolRootIndex);
3015  __ jmp(&done);
3016 
3017  // Objects with a non-function constructor have class 'Object'.
3018  __ bind(&non_function_constructor);
3019  __ LoadRoot(v0, Heap::kObject_symbolRootIndex);
3020  __ jmp(&done);
3021 
3022  // Non-JS objects have class null.
3023  __ bind(&null);
3024  __ LoadRoot(v0, Heap::kNullValueRootIndex);
3025 
3026  // All done.
3027  __ bind(&done);
3028 
3029  context()->Plug(v0);
3030 }
3031 
3032 
3033 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3034  // Conditionally generate a log call.
3035  // Args:
3036  // 0 (literal string): The type of logging (corresponds to the flags).
3037  // This is used to determine whether or not to generate the log call.
3038  // 1 (string): Format string. Access the string at argument index 2
3039  // with '%2s' (see Logger::LogRuntime for all the formats).
3040  // 2 (array): Arguments to the format string.
3041  ZoneList<Expression*>* args = expr->arguments();
3042  ASSERT_EQ(args->length(), 3);
3043  if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
3044  VisitForStackValue(args->at(1));
3045  VisitForStackValue(args->at(2));
3046  __ CallRuntime(Runtime::kLog, 2);
3047  }
3048 
3049  // Finally, we're expected to leave a value on the top of the stack.
3050  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3051  context()->Plug(v0);
3052 }
3053 
3054 
3055 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
3056  ASSERT(expr->arguments()->length() == 0);
3057  Label slow_allocate_heapnumber;
3058  Label heapnumber_allocated;
3059 
3060  // Save the new heap number in callee-saved register s0, since
3061  // we call out to external C code below.
3062  __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3063  __ AllocateHeapNumber(s0, a1, a2, t6, &slow_allocate_heapnumber);
3064  __ jmp(&heapnumber_allocated);
3065 
3066  __ bind(&slow_allocate_heapnumber);
3067 
3068  // Allocate a heap number.
3069  __ CallRuntime(Runtime::kNumberAlloc, 0);
3070  __ mov(s0, v0); // Save result in s0, so it is saved thru CFunc call.
3071 
3072  __ bind(&heapnumber_allocated);
3073 
3074  // Convert 32 random bits in v0 to 0.(32 random bits) in a double
3075  // by computing:
3076  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
3078  __ PrepareCallCFunction(1, a0);
3081  __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3082 
3083  CpuFeatures::Scope scope(FPU);
3084  // 0x41300000 is the top half of 1.0 x 2^20 as a double.
3085  __ li(a1, Operand(0x41300000));
3086  // Move 0x41300000xxxxxxxx (x = random bits in v0) to FPU.
3087  __ Move(f12, v0, a1);
3088  // Move 0x4130000000000000 to FPU.
3089  __ Move(f14, zero_reg, a1);
3090  // Subtract and store the result in the heap number.
3091  __ sub_d(f0, f12, f14);
3093  __ mov(v0, s0);
3094  } else {
3095  __ PrepareCallCFunction(2, a0);
3096  __ mov(a0, s0);
3099  __ CallCFunction(
3100  ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
3101  }
3102 
3103  context()->Plug(v0);
3104 }
3105 
3106 
3107 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3108  // Load the arguments on the stack and call the stub.
3109  SubStringStub stub;
3110  ZoneList<Expression*>* args = expr->arguments();
3111  ASSERT(args->length() == 3);
3112  VisitForStackValue(args->at(0));
3113  VisitForStackValue(args->at(1));
3114  VisitForStackValue(args->at(2));
3115  __ CallStub(&stub);
3116  context()->Plug(v0);
3117 }
3118 
3119 
3120 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3121  // Load the arguments on the stack and call the stub.
3122  RegExpExecStub stub;
3123  ZoneList<Expression*>* args = expr->arguments();
3124  ASSERT(args->length() == 4);
3125  VisitForStackValue(args->at(0));
3126  VisitForStackValue(args->at(1));
3127  VisitForStackValue(args->at(2));
3128  VisitForStackValue(args->at(3));
3129  __ CallStub(&stub);
3130  context()->Plug(v0);
3131 }
3132 
3133 
3134 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3135  ZoneList<Expression*>* args = expr->arguments();
3136  ASSERT(args->length() == 1);
3137 
3138  VisitForAccumulatorValue(args->at(0)); // Load the object.
3139 
3140  Label done;
3141  // If the object is a smi return the object.
3142  __ JumpIfSmi(v0, &done);
3143  // If the object is not a value type, return the object.
3144  __ GetObjectType(v0, a1, a1);
3145  __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3146 
3148 
3149  __ bind(&done);
3150  context()->Plug(v0);
3151 }
3152 
3153 
3154 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3155  ZoneList<Expression*>* args = expr->arguments();
3156  ASSERT(args->length() == 2);
3157  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3158  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3159 
3160  VisitForAccumulatorValue(args->at(0)); // Load the object.
3161 
3162  Label runtime, done;
3163  Register object = v0;
3164  Register result = v0;
3165  Register scratch0 = t5;
3166  Register scratch1 = a1;
3167 
3168 #ifdef DEBUG
3169  __ AbortIfSmi(object);
3170  __ GetObjectType(object, scratch1, scratch1);
3171  __ Assert(eq, "Trying to get date field from non-date.",
3172  scratch1, Operand(JS_DATE_TYPE));
3173 #endif
3174 
3175  if (index->value() == 0) {
3176  __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
3177  } else {
3178  if (index->value() < JSDate::kFirstUncachedField) {
3179  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3180  __ li(scratch1, Operand(stamp));
3181  __ lw(scratch1, MemOperand(scratch1));
3182  __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3183  __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3184  __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
3185  kPointerSize * index->value()));
3186  __ jmp(&done);
3187  }
3188  __ bind(&runtime);
3189  __ PrepareCallCFunction(2, scratch1);
3190  __ li(a1, Operand(index));
3191  __ Move(a0, object);
3192  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3193  __ bind(&done);
3194  }
3195 
3196  context()->Plug(v0);
3197 }
3198 
3199 
3200 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3201  // Load the arguments on the stack and call the runtime function.
3202  ZoneList<Expression*>* args = expr->arguments();
3203  ASSERT(args->length() == 2);
3204  VisitForStackValue(args->at(0));
3205  VisitForStackValue(args->at(1));
3207  MathPowStub stub(MathPowStub::ON_STACK);
3208  __ CallStub(&stub);
3209  } else {
3210  __ CallRuntime(Runtime::kMath_pow, 2);
3211  }
3212  context()->Plug(v0);
3213 }
3214 
3215 
3216 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3217  ZoneList<Expression*>* args = expr->arguments();
3218  ASSERT(args->length() == 2);
3219 
3220  VisitForStackValue(args->at(0)); // Load the object.
3221  VisitForAccumulatorValue(args->at(1)); // Load the value.
3222  __ pop(a1); // v0 = value. a1 = object.
3223 
3224  Label done;
3225  // If the object is a smi, return the value.
3226  __ JumpIfSmi(a1, &done);
3227 
3228  // If the object is not a value type, return the value.
3229  __ GetObjectType(a1, a2, a2);
3230  __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3231 
3232  // Store the value.
3234  // Update the write barrier. Save the value as it will be
3235  // overwritten by the write barrier code and is needed afterward.
3236  __ mov(a2, v0);
3237  __ RecordWriteField(
3239 
3240  __ bind(&done);
3241  context()->Plug(v0);
3242 }
3243 
3244 
3245 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3246  ZoneList<Expression*>* args = expr->arguments();
3247  ASSERT_EQ(args->length(), 1);
3248 
3249  // Load the argument on the stack and call the stub.
3250  VisitForStackValue(args->at(0));
3251 
3252  NumberToStringStub stub;
3253  __ CallStub(&stub);
3254  context()->Plug(v0);
3255 }
3256 
3257 
3258 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3259  ZoneList<Expression*>* args = expr->arguments();
3260  ASSERT(args->length() == 1);
3261 
3262  VisitForAccumulatorValue(args->at(0));
3263 
3264  Label done;
3265  StringCharFromCodeGenerator generator(v0, a1);
3266  generator.GenerateFast(masm_);
3267  __ jmp(&done);
3268 
3269  NopRuntimeCallHelper call_helper;
3270  generator.GenerateSlow(masm_, call_helper);
3271 
3272  __ bind(&done);
3273  context()->Plug(a1);
3274 }
3275 
3276 
3277 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3278  ZoneList<Expression*>* args = expr->arguments();
3279  ASSERT(args->length() == 2);
3280 
3281  VisitForStackValue(args->at(0));
3282  VisitForAccumulatorValue(args->at(1));
3283  __ mov(a0, result_register());
3284 
3285  Register object = a1;
3286  Register index = a0;
3287  Register result = v0;
3288 
3289  __ pop(object);
3290 
3291  Label need_conversion;
3292  Label index_out_of_range;
3293  Label done;
3294  StringCharCodeAtGenerator generator(object,
3295  index,
3296  result,
3297  &need_conversion,
3298  &need_conversion,
3299  &index_out_of_range,
3301  generator.GenerateFast(masm_);
3302  __ jmp(&done);
3303 
3304  __ bind(&index_out_of_range);
3305  // When the index is out of range, the spec requires us to return
3306  // NaN.
3307  __ LoadRoot(result, Heap::kNanValueRootIndex);
3308  __ jmp(&done);
3309 
3310  __ bind(&need_conversion);
3311  // Load the undefined value into the result register, which will
3312  // trigger conversion.
3313  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3314  __ jmp(&done);
3315 
3316  NopRuntimeCallHelper call_helper;
3317  generator.GenerateSlow(masm_, call_helper);
3318 
3319  __ bind(&done);
3320  context()->Plug(result);
3321 }
3322 
3323 
3324 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3325  ZoneList<Expression*>* args = expr->arguments();
3326  ASSERT(args->length() == 2);
3327 
3328  VisitForStackValue(args->at(0));
3329  VisitForAccumulatorValue(args->at(1));
3330  __ mov(a0, result_register());
3331 
3332  Register object = a1;
3333  Register index = a0;
3334  Register scratch = a3;
3335  Register result = v0;
3336 
3337  __ pop(object);
3338 
3339  Label need_conversion;
3340  Label index_out_of_range;
3341  Label done;
3342  StringCharAtGenerator generator(object,
3343  index,
3344  scratch,
3345  result,
3346  &need_conversion,
3347  &need_conversion,
3348  &index_out_of_range,
3350  generator.GenerateFast(masm_);
3351  __ jmp(&done);
3352 
3353  __ bind(&index_out_of_range);
3354  // When the index is out of range, the spec requires us to return
3355  // the empty string.
3356  __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3357  __ jmp(&done);
3358 
3359  __ bind(&need_conversion);
3360  // Move smi zero into the result register, which will trigger
3361  // conversion.
3362  __ li(result, Operand(Smi::FromInt(0)));
3363  __ jmp(&done);
3364 
3365  NopRuntimeCallHelper call_helper;
3366  generator.GenerateSlow(masm_, call_helper);
3367 
3368  __ bind(&done);
3369  context()->Plug(result);
3370 }
3371 
3372 
3373 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3374  ZoneList<Expression*>* args = expr->arguments();
3375  ASSERT_EQ(2, args->length());
3376  VisitForStackValue(args->at(0));
3377  VisitForStackValue(args->at(1));
3378 
3379  StringAddStub stub(NO_STRING_ADD_FLAGS);
3380  __ CallStub(&stub);
3381  context()->Plug(v0);
3382 }
3383 
3384 
3385 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3386  ZoneList<Expression*>* args = expr->arguments();
3387  ASSERT_EQ(2, args->length());
3388 
3389  VisitForStackValue(args->at(0));
3390  VisitForStackValue(args->at(1));
3391 
3392  StringCompareStub stub;
3393  __ CallStub(&stub);
3394  context()->Plug(v0);
3395 }
3396 
3397 
3398 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3399  // Load the argument on the stack and call the stub.
3400  TranscendentalCacheStub stub(TranscendentalCache::SIN,
3402  ZoneList<Expression*>* args = expr->arguments();
3403  ASSERT(args->length() == 1);
3404  VisitForStackValue(args->at(0));
3405  __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
3406  __ CallStub(&stub);
3407  context()->Plug(v0);
3408 }
3409 
3410 
3411 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3412  // Load the argument on the stack and call the stub.
3413  TranscendentalCacheStub stub(TranscendentalCache::COS,
3415  ZoneList<Expression*>* args = expr->arguments();
3416  ASSERT(args->length() == 1);
3417  VisitForStackValue(args->at(0));
3418  __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
3419  __ CallStub(&stub);
3420  context()->Plug(v0);
3421 }
3422 
3423 
3424 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3425  // Load the argument on the stack and call the stub.
3426  TranscendentalCacheStub stub(TranscendentalCache::TAN,
3428  ZoneList<Expression*>* args = expr->arguments();
3429  ASSERT(args->length() == 1);
3430  VisitForStackValue(args->at(0));
3431  __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
3432  __ CallStub(&stub);
3433  context()->Plug(v0);
3434 }
3435 
3436 
3437 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3438  // Load the argument on the stack and call the stub.
3439  TranscendentalCacheStub stub(TranscendentalCache::LOG,
3441  ZoneList<Expression*>* args = expr->arguments();
3442  ASSERT(args->length() == 1);
3443  VisitForStackValue(args->at(0));
3444  __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
3445  __ CallStub(&stub);
3446  context()->Plug(v0);
3447 }
3448 
3449 
3450 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3451  // Load the argument on the stack and call the runtime function.
3452  ZoneList<Expression*>* args = expr->arguments();
3453  ASSERT(args->length() == 1);
3454  VisitForStackValue(args->at(0));
3455  __ CallRuntime(Runtime::kMath_sqrt, 1);
3456  context()->Plug(v0);
3457 }
3458 
3459 
3460 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3461  ZoneList<Expression*>* args = expr->arguments();
3462  ASSERT(args->length() >= 2);
3463 
3464  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3465  for (int i = 0; i < arg_count + 1; i++) {
3466  VisitForStackValue(args->at(i));
3467  }
3468  VisitForAccumulatorValue(args->last()); // Function.
3469 
3470  // Check for proxy.
3471  Label proxy, done;
3472  __ GetObjectType(v0, a1, a1);
3473  __ Branch(&proxy, eq, a1, Operand(JS_FUNCTION_PROXY_TYPE));
3474 
3475  // InvokeFunction requires the function in a1. Move it in there.
3476  __ mov(a1, result_register());
3477  ParameterCount count(arg_count);
3478  __ InvokeFunction(a1, count, CALL_FUNCTION,
3479  NullCallWrapper(), CALL_AS_METHOD);
3481  __ jmp(&done);
3482 
3483  __ bind(&proxy);
3484  __ push(v0);
3485  __ CallRuntime(Runtime::kCall, args->length());
3486  __ bind(&done);
3487 
3488  context()->Plug(v0);
3489 }
3490 
3491 
3492 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3493  RegExpConstructResultStub stub;
3494  ZoneList<Expression*>* args = expr->arguments();
3495  ASSERT(args->length() == 3);
3496  VisitForStackValue(args->at(0));
3497  VisitForStackValue(args->at(1));
3498  VisitForStackValue(args->at(2));
3499  __ CallStub(&stub);
3500  context()->Plug(v0);
3501 }
3502 
3503 
3504 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3505  ZoneList<Expression*>* args = expr->arguments();
3506  ASSERT_EQ(2, args->length());
3507 
3508  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3509  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3510 
3511  Handle<FixedArray> jsfunction_result_caches(
3512  isolate()->global_context()->jsfunction_result_caches());
3513  if (jsfunction_result_caches->length() <= cache_id) {
3514  __ Abort("Attempt to use undefined cache.");
3515  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3516  context()->Plug(v0);
3517  return;
3518  }
3519 
3520  VisitForAccumulatorValue(args->at(1));
3521 
3522  Register key = v0;
3523  Register cache = a1;
3526  __ lw(cache,
3529  __ lw(cache,
3531 
3532 
3533  Label done, not_found;
3534  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3536  // a2 now holds finger offset as a smi.
3537  __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3538  // a3 now points to the start of fixed array elements.
3539  __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize);
3540  __ addu(a3, a3, at);
3541  // a3 now points to key of indexed element of cache.
3542  __ lw(a2, MemOperand(a3));
3543  __ Branch(&not_found, ne, key, Operand(a2));
3544 
3545  __ lw(v0, MemOperand(a3, kPointerSize));
3546  __ Branch(&done);
3547 
3548  __ bind(&not_found);
3549  // Call runtime to perform the lookup.
3550  __ Push(cache, key);
3551  __ CallRuntime(Runtime::kGetFromCache, 2);
3552 
3553  __ bind(&done);
3554  context()->Plug(v0);
3555 }
3556 
3557 
3558 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3559  ZoneList<Expression*>* args = expr->arguments();
3560  ASSERT_EQ(2, args->length());
3561 
3562  Register right = v0;
3563  Register left = a1;
3564  Register tmp = a2;
3565  Register tmp2 = a3;
3566 
3567  VisitForStackValue(args->at(0));
3568  VisitForAccumulatorValue(args->at(1)); // Result (right) in v0.
3569  __ pop(left);
3570 
3571  Label done, fail, ok;
3572  __ Branch(&ok, eq, left, Operand(right));
3573  // Fail if either is a non-HeapObject.
3574  __ And(tmp, left, Operand(right));
3575  __ JumpIfSmi(tmp, &fail);
3576  __ lw(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
3577  __ lbu(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
3578  __ Branch(&fail, ne, tmp2, Operand(JS_REGEXP_TYPE));
3579  __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3580  __ Branch(&fail, ne, tmp, Operand(tmp2));
3581  __ lw(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
3582  __ lw(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
3583  __ Branch(&ok, eq, tmp, Operand(tmp2));
3584  __ bind(&fail);
3585  __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3586  __ jmp(&done);
3587  __ bind(&ok);
3588  __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3589  __ bind(&done);
3590 
3591  context()->Plug(v0);
3592 }
3593 
3594 
3595 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3596  ZoneList<Expression*>* args = expr->arguments();
3597  VisitForAccumulatorValue(args->at(0));
3598 
3599  Label materialize_true, materialize_false;
3600  Label* if_true = NULL;
3601  Label* if_false = NULL;
3602  Label* fall_through = NULL;
3603  context()->PrepareTest(&materialize_true, &materialize_false,
3604  &if_true, &if_false, &fall_through);
3605 
3607  __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3608 
3609  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3610  Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3611 
3612  context()->Plug(if_true, if_false);
3613 }
3614 
3615 
3616 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3617  ZoneList<Expression*>* args = expr->arguments();
3618  ASSERT(args->length() == 1);
3619  VisitForAccumulatorValue(args->at(0));
3620 
3621  if (FLAG_debug_code) {
3622  __ AbortIfNotString(v0);
3623  }
3624 
3626  __ IndexFromHash(v0, v0);
3627 
3628  context()->Plug(v0);
3629 }
3630 
3631 
3632 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3633  Label bailout, done, one_char_separator, long_separator,
3634  non_trivial_array, not_size_one_array, loop,
3635  empty_separator_loop, one_char_separator_loop,
3636  one_char_separator_loop_entry, long_separator_loop;
3637  ZoneList<Expression*>* args = expr->arguments();
3638  ASSERT(args->length() == 2);
3639  VisitForStackValue(args->at(1));
3640  VisitForAccumulatorValue(args->at(0));
3641 
3642  // All aliases of the same register have disjoint lifetimes.
3643  Register array = v0;
3644  Register elements = no_reg; // Will be v0.
3645  Register result = no_reg; // Will be v0.
3646  Register separator = a1;
3647  Register array_length = a2;
3648  Register result_pos = no_reg; // Will be a2.
3649  Register string_length = a3;
3650  Register string = t0;
3651  Register element = t1;
3652  Register elements_end = t2;
3653  Register scratch1 = t3;
3654  Register scratch2 = t5;
3655  Register scratch3 = t4;
3656 
3657  // Separator operand is on the stack.
3658  __ pop(separator);
3659 
3660  // Check that the array is a JSArray.
3661  __ JumpIfSmi(array, &bailout);
3662  __ GetObjectType(array, scratch1, scratch2);
3663  __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
3664 
3665  // Check that the array has fast elements.
3666  __ CheckFastElements(scratch1, scratch2, &bailout);
3667 
3668  // If the array has length zero, return the empty string.
3669  __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3670  __ SmiUntag(array_length);
3671  __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
3672  __ LoadRoot(v0, Heap::kEmptyStringRootIndex);
3673  __ Branch(&done);
3674 
3675  __ bind(&non_trivial_array);
3676 
3677  // Get the FixedArray containing array's elements.
3678  elements = array;
3679  __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3680  array = no_reg; // End of array's live range.
3681 
3682  // Check that all array elements are sequential ASCII strings, and
3683  // accumulate the sum of their lengths, as a smi-encoded value.
3684  __ mov(string_length, zero_reg);
3685  __ Addu(element,
3686  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3687  __ sll(elements_end, array_length, kPointerSizeLog2);
3688  __ Addu(elements_end, element, elements_end);
3689  // Loop condition: while (element < elements_end).
3690  // Live values in registers:
3691  // elements: Fixed array of strings.
3692  // array_length: Length of the fixed array of strings (not smi)
3693  // separator: Separator string
3694  // string_length: Accumulated sum of string lengths (smi).
3695  // element: Current array element.
3696  // elements_end: Array end.
3697  if (FLAG_debug_code) {
3698  __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin",
3699  array_length, Operand(zero_reg));
3700  }
3701  __ bind(&loop);
3702  __ lw(string, MemOperand(element));
3703  __ Addu(element, element, kPointerSize);
3704  __ JumpIfSmi(string, &bailout);
3705  __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3706  __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3707  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3708  __ lw(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
3709  __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
3710  __ BranchOnOverflow(&bailout, scratch3);
3711  __ Branch(&loop, lt, element, Operand(elements_end));
3712 
3713  // If array_length is 1, return elements[0], a string.
3714  __ Branch(&not_size_one_array, ne, array_length, Operand(1));
3715  __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3716  __ Branch(&done);
3717 
3718  __ bind(&not_size_one_array);
3719 
3720  // Live values in registers:
3721  // separator: Separator string
3722  // array_length: Length of the array.
3723  // string_length: Sum of string lengths (smi).
3724  // elements: FixedArray of strings.
3725 
3726  // Check that the separator is a flat ASCII string.
3727  __ JumpIfSmi(separator, &bailout);
3728  __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3729  __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3730  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3731 
3732  // Add (separator length times array_length) - separator length to the
3733  // string_length to get the length of the result string. array_length is not
3734  // smi but the other values are, so the result is a smi.
3735  __ lw(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3736  __ Subu(string_length, string_length, Operand(scratch1));
3737  __ Mult(array_length, scratch1);
3738  // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3739  // zero.
3740  __ mfhi(scratch2);
3741  __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
3742  __ mflo(scratch2);
3743  __ And(scratch3, scratch2, Operand(0x80000000));
3744  __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
3745  __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
3746  __ BranchOnOverflow(&bailout, scratch3);
3747  __ SmiUntag(string_length);
3748 
3749  // Get first element in the array to free up the elements register to be used
3750  // for the result.
3751  __ Addu(element,
3752  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3753  result = elements; // End of live range for elements.
3754  elements = no_reg;
3755  // Live values in registers:
3756  // element: First array element
3757  // separator: Separator string
3758  // string_length: Length of result string (not smi)
3759  // array_length: Length of the array.
3760  __ AllocateAsciiString(result,
3761  string_length,
3762  scratch1,
3763  scratch2,
3764  elements_end,
3765  &bailout);
3766  // Prepare for looping. Set up elements_end to end of the array. Set
3767  // result_pos to the position of the result where to write the first
3768  // character.
3769  __ sll(elements_end, array_length, kPointerSizeLog2);
3770  __ Addu(elements_end, element, elements_end);
3771  result_pos = array_length; // End of live range for array_length.
3772  array_length = no_reg;
3773  __ Addu(result_pos,
3774  result,
3776 
3777  // Check the length of the separator.
3778  __ lw(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3779  __ li(at, Operand(Smi::FromInt(1)));
3780  __ Branch(&one_char_separator, eq, scratch1, Operand(at));
3781  __ Branch(&long_separator, gt, scratch1, Operand(at));
3782 
3783  // Empty separator case.
3784  __ bind(&empty_separator_loop);
3785  // Live values in registers:
3786  // result_pos: the position to which we are currently copying characters.
3787  // element: Current array element.
3788  // elements_end: Array end.
3789 
3790  // Copy next array element to the result.
3791  __ lw(string, MemOperand(element));
3792  __ Addu(element, element, kPointerSize);
3793  __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
3794  __ SmiUntag(string_length);
3795  __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag);
3796  __ CopyBytes(string, result_pos, string_length, scratch1);
3797  // End while (element < elements_end).
3798  __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
3799  ASSERT(result.is(v0));
3800  __ Branch(&done);
3801 
3802  // One-character separator case.
3803  __ bind(&one_char_separator);
3804  // Replace separator with its ASCII character value.
3805  __ lbu(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
3806  // Jump into the loop after the code that copies the separator, so the first
3807  // element is not preceded by a separator.
3808  __ jmp(&one_char_separator_loop_entry);
3809 
3810  __ bind(&one_char_separator_loop);
3811  // Live values in registers:
3812  // result_pos: the position to which we are currently copying characters.
3813  // element: Current array element.
3814  // elements_end: Array end.
3815  // separator: Single separator ASCII char (in lower byte).
3816 
3817  // Copy the separator character to the result.
3818  __ sb(separator, MemOperand(result_pos));
3819  __ Addu(result_pos, result_pos, 1);
3820 
3821  // Copy next array element to the result.
3822  __ bind(&one_char_separator_loop_entry);
3823  __ lw(string, MemOperand(element));
3824  __ Addu(element, element, kPointerSize);
3825  __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
3826  __ SmiUntag(string_length);
3827  __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag);
3828  __ CopyBytes(string, result_pos, string_length, scratch1);
3829  // End while (element < elements_end).
3830  __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
3831  ASSERT(result.is(v0));
3832  __ Branch(&done);
3833 
3834  // Long separator case (separator is more than one character). Entry is at the
3835  // label long_separator below.
3836  __ bind(&long_separator_loop);
3837  // Live values in registers:
3838  // result_pos: the position to which we are currently copying characters.
3839  // element: Current array element.
3840  // elements_end: Array end.
3841  // separator: Separator string.
3842 
3843  // Copy the separator to the result.
3844  __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset));
3845  __ SmiUntag(string_length);
3846  __ Addu(string,
3847  separator,
3849  __ CopyBytes(string, result_pos, string_length, scratch1);
3850 
3851  __ bind(&long_separator);
3852  __ lw(string, MemOperand(element));
3853  __ Addu(element, element, kPointerSize);
3854  __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
3855  __ SmiUntag(string_length);
3856  __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag);
3857  __ CopyBytes(string, result_pos, string_length, scratch1);
3858  // End while (element < elements_end).
3859  __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
3860  ASSERT(result.is(v0));
3861  __ Branch(&done);
3862 
3863  __ bind(&bailout);
3864  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3865  __ bind(&done);
3866  context()->Plug(v0);
3867 }
3868 
3869 
3870 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3871  Handle<String> name = expr->name();
3872  if (name->length() > 0 && name->Get(0) == '_') {
3873  Comment cmnt(masm_, "[ InlineRuntimeCall");
3874  EmitInlineRuntimeCall(expr);
3875  return;
3876  }
3877 
3878  Comment cmnt(masm_, "[ CallRuntime");
3879  ZoneList<Expression*>* args = expr->arguments();
3880 
3881  if (expr->is_jsruntime()) {
3882  // Prepare for calling JS runtime function.
3883  __ lw(a0, GlobalObjectOperand());
3885  __ push(a0);
3886  }
3887 
3888  // Push the arguments ("left-to-right").
3889  int arg_count = args->length();
3890  for (int i = 0; i < arg_count; i++) {
3891  VisitForStackValue(args->at(i));
3892  }
3893 
3894  if (expr->is_jsruntime()) {
3895  // Call the JS runtime function.
3896  __ li(a2, Operand(expr->name()));
3897  RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3898  Handle<Code> ic =
3899  isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3900  CallIC(ic, mode, expr->id());
3901  // Restore context register.
3903  } else {
3904  // Call the C runtime function.
3905  __ CallRuntime(expr->function(), arg_count);
3906  }
3907  context()->Plug(v0);
3908 }
3909 
3910 
3911 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3912  switch (expr->op()) {
3913  case Token::DELETE: {
3914  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3915  Property* property = expr->expression()->AsProperty();
3916  VariableProxy* proxy = expr->expression()->AsVariableProxy();
3917 
3918  if (property != NULL) {
3919  VisitForStackValue(property->obj());
3920  VisitForStackValue(property->key());
3921  StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
3923  __ li(a1, Operand(Smi::FromInt(strict_mode_flag)));
3924  __ push(a1);
3925  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3926  context()->Plug(v0);
3927  } else if (proxy != NULL) {
3928  Variable* var = proxy->var();
3929  // Delete of an unqualified identifier is disallowed in strict mode
3930  // but "delete this" is allowed.
3931  ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
3932  if (var->IsUnallocated()) {
3933  __ lw(a2, GlobalObjectOperand());
3934  __ li(a1, Operand(var->name()));
3935  __ li(a0, Operand(Smi::FromInt(kNonStrictMode)));
3936  __ Push(a2, a1, a0);
3937  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3938  context()->Plug(v0);
3939  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3940  // Result of deleting non-global, non-dynamic variables is false.
3941  // The subexpression does not have side effects.
3942  context()->Plug(var->is_this());
3943  } else {
3944  // Non-global variable. Call the runtime to try to delete from the
3945  // context where the variable was introduced.
3946  __ push(context_register());
3947  __ li(a2, Operand(var->name()));
3948  __ push(a2);
3949  __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3950  context()->Plug(v0);
3951  }
3952  } else {
3953  // Result of deleting non-property, non-variable reference is true.
3954  // The subexpression may have side effects.
3955  VisitForEffect(expr->expression());
3956  context()->Plug(true);
3957  }
3958  break;
3959  }
3960 
3961  case Token::VOID: {
3962  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3963  VisitForEffect(expr->expression());
3964  context()->Plug(Heap::kUndefinedValueRootIndex);
3965  break;
3966  }
3967 
3968  case Token::NOT: {
3969  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3970  if (context()->IsEffect()) {
3971  // Unary NOT has no side effects so it's only necessary to visit the
3972  // subexpression. Match the optimizing compiler by not branching.
3973  VisitForEffect(expr->expression());
3974  } else if (context()->IsTest()) {
3975  const TestContext* test = TestContext::cast(context());
3976  // The labels are swapped for the recursive call.
3977  VisitForControl(expr->expression(),
3978  test->false_label(),
3979  test->true_label(),
3980  test->fall_through());
3981  context()->Plug(test->true_label(), test->false_label());
3982  } else {
3983  // We handle value contexts explicitly rather than simply visiting
3984  // for control and plugging the control flow into the context,
3985  // because we need to prepare a pair of extra administrative AST ids
3986  // for the optimizing compiler.
3987  ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3988  Label materialize_true, materialize_false, done;
3989  VisitForControl(expr->expression(),
3990  &materialize_false,
3991  &materialize_true,
3992  &materialize_true);
3993  __ bind(&materialize_true);
3994  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3995  __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3996  if (context()->IsStackValue()) __ push(v0);
3997  __ jmp(&done);
3998  __ bind(&materialize_false);
3999  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4000  __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4001  if (context()->IsStackValue()) __ push(v0);
4002  __ bind(&done);
4003  }
4004  break;
4005  }
4006 
4007  case Token::TYPEOF: {
4008  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4009  { StackValueContext context(this);
4010  VisitForTypeofValue(expr->expression());
4011  }
4012  __ CallRuntime(Runtime::kTypeof, 1);
4013  context()->Plug(v0);
4014  break;
4015  }
4016 
4017  case Token::ADD: {
4018  Comment cmt(masm_, "[ UnaryOperation (ADD)");
4019  VisitForAccumulatorValue(expr->expression());
4020  Label no_conversion;
4021  __ JumpIfSmi(result_register(), &no_conversion);
4022  __ mov(a0, result_register());
4023  ToNumberStub convert_stub;
4024  __ CallStub(&convert_stub);
4025  __ bind(&no_conversion);
4026  context()->Plug(result_register());
4027  break;
4028  }
4029 
4030  case Token::SUB:
4031  EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
4032  break;
4033 
4034  case Token::BIT_NOT:
4035  EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
4036  break;
4037 
4038  default:
4039  UNREACHABLE();
4040  }
4041 }
4042 
4043 
4044 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
4045  const char* comment) {
4046  // TODO(svenpanne): Allowing format strings in Comment would be nice here...
4047  Comment cmt(masm_, comment);
4048  bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
4049  UnaryOverwriteMode overwrite =
4050  can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
4051  UnaryOpStub stub(expr->op(), overwrite);
4052  // GenericUnaryOpStub expects the argument to be in a0.
4053  VisitForAccumulatorValue(expr->expression());
4054  SetSourcePosition(expr->position());
4055  __ mov(a0, result_register());
4056  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
4057  context()->Plug(v0);
4058 }
4059 
4060 
4061 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4062  Comment cmnt(masm_, "[ CountOperation");
4063  SetSourcePosition(expr->position());
4064 
4065  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
4066  // as the left-hand side.
4067  if (!expr->expression()->IsValidLeftHandSide()) {
4068  VisitForEffect(expr->expression());
4069  return;
4070  }
4071 
4072  // Expression can only be a property, a global or a (parameter or local)
4073  // slot.
4074  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4075  LhsKind assign_type = VARIABLE;
4076  Property* prop = expr->expression()->AsProperty();
4077  // In case of a property we use the uninitialized expression context
4078  // of the key to detect a named property.
4079  if (prop != NULL) {
4080  assign_type =
4081  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4082  }
4083 
4084  // Evaluate expression and get value.
4085  if (assign_type == VARIABLE) {
4086  ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4087  AccumulatorValueContext context(this);
4088  EmitVariableLoad(expr->expression()->AsVariableProxy());
4089  } else {
4090  // Reserve space for result of postfix operation.
4091  if (expr->is_postfix() && !context()->IsEffect()) {
4092  __ li(at, Operand(Smi::FromInt(0)));
4093  __ push(at);
4094  }
4095  if (assign_type == NAMED_PROPERTY) {
4096  // Put the object both on the stack and in the accumulator.
4097  VisitForAccumulatorValue(prop->obj());
4098  __ push(v0);
4099  EmitNamedPropertyLoad(prop);
4100  } else {
4101  VisitForStackValue(prop->obj());
4102  VisitForAccumulatorValue(prop->key());
4103  __ lw(a1, MemOperand(sp, 0));
4104  __ push(v0);
4105  EmitKeyedPropertyLoad(prop);
4106  }
4107  }
4108 
4109  // We need a second deoptimization point after loading the value
4110  // in case evaluating the property load my have a side effect.
4111  if (assign_type == VARIABLE) {
4112  PrepareForBailout(expr->expression(), TOS_REG);
4113  } else {
4114  PrepareForBailoutForId(expr->CountId(), TOS_REG);
4115  }
4116 
4117  // Call ToNumber only if operand is not a smi.
4118  Label no_conversion;
4119  __ JumpIfSmi(v0, &no_conversion);
4120  __ mov(a0, v0);
4121  ToNumberStub convert_stub;
4122  __ CallStub(&convert_stub);
4123  __ bind(&no_conversion);
4124 
4125  // Save result for postfix expressions.
4126  if (expr->is_postfix()) {
4127  if (!context()->IsEffect()) {
4128  // Save the result on the stack. If we have a named or keyed property
4129  // we store the result under the receiver that is currently on top
4130  // of the stack.
4131  switch (assign_type) {
4132  case VARIABLE:
4133  __ push(v0);
4134  break;
4135  case NAMED_PROPERTY:
4136  __ sw(v0, MemOperand(sp, kPointerSize));
4137  break;
4138  case KEYED_PROPERTY:
4139  __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4140  break;
4141  }
4142  }
4143  }
4144  __ mov(a0, result_register());
4145 
4146  // Inline smi case if we are in a loop.
4147  Label stub_call, done;
4148  JumpPatchSite patch_site(masm_);
4149 
4150  int count_value = expr->op() == Token::INC ? 1 : -1;
4151  __ li(a1, Operand(Smi::FromInt(count_value)));
4152 
4153  if (ShouldInlineSmiCase(expr->op())) {
4154  __ AdduAndCheckForOverflow(v0, a0, a1, t0);
4155  __ BranchOnOverflow(&stub_call, t0); // Do stub on overflow.
4156 
4157  // We could eliminate this smi check if we split the code at
4158  // the first smi check before calling ToNumber.
4159  patch_site.EmitJumpIfSmi(v0, &done);
4160  __ bind(&stub_call);
4161  }
4162 
4163  // Record position before stub call.
4164  SetSourcePosition(expr->position());
4165 
4166  BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
4167  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
4168  patch_site.EmitPatchInfo();
4169  __ bind(&done);
4170 
4171  // Store the value returned in v0.
4172  switch (assign_type) {
4173  case VARIABLE:
4174  if (expr->is_postfix()) {
4175  { EffectContext context(this);
4176  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4177  Token::ASSIGN);
4178  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4179  context.Plug(v0);
4180  }
4181  // For all contexts except EffectConstant we have the result on
4182  // top of the stack.
4183  if (!context()->IsEffect()) {
4184  context()->PlugTOS();
4185  }
4186  } else {
4187  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4188  Token::ASSIGN);
4189  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4190  context()->Plug(v0);
4191  }
4192  break;
4193  case NAMED_PROPERTY: {
4194  __ mov(a0, result_register()); // Value.
4195  __ li(a2, Operand(prop->key()->AsLiteral()->handle())); // Name.
4196  __ pop(a1); // Receiver.
4197  Handle<Code> ic = is_classic_mode()
4198  ? isolate()->builtins()->StoreIC_Initialize()
4199  : isolate()->builtins()->StoreIC_Initialize_Strict();
4200  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4201  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4202  if (expr->is_postfix()) {
4203  if (!context()->IsEffect()) {
4204  context()->PlugTOS();
4205  }
4206  } else {
4207  context()->Plug(v0);
4208  }
4209  break;
4210  }
4211  case KEYED_PROPERTY: {
4212  __ mov(a0, result_register()); // Value.
4213  __ pop(a1); // Key.
4214  __ pop(a2); // Receiver.
4215  Handle<Code> ic = is_classic_mode()
4216  ? isolate()->builtins()->KeyedStoreIC_Initialize()
4217  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4218  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4219  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4220  if (expr->is_postfix()) {
4221  if (!context()->IsEffect()) {
4222  context()->PlugTOS();
4223  }
4224  } else {
4225  context()->Plug(v0);
4226  }
4227  break;
4228  }
4229  }
4230 }
4231 
4232 
4233 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4234  ASSERT(!context()->IsEffect());
4235  ASSERT(!context()->IsTest());
4236  VariableProxy* proxy = expr->AsVariableProxy();
4237  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4238  Comment cmnt(masm_, "Global variable");
4239  __ lw(a0, GlobalObjectOperand());
4240  __ li(a2, Operand(proxy->name()));
4241  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4242  // Use a regular load, not a contextual load, to avoid a reference
4243  // error.
4244  CallIC(ic);
4245  PrepareForBailout(expr, TOS_REG);
4246  context()->Plug(v0);
4247  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4248  Label done, slow;
4249 
4250  // Generate code for loading from variables potentially shadowed
4251  // by eval-introduced variables.
4252  EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4253 
4254  __ bind(&slow);
4255  __ li(a0, Operand(proxy->name()));
4256  __ Push(cp, a0);
4257  __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4258  PrepareForBailout(expr, TOS_REG);
4259  __ bind(&done);
4260 
4261  context()->Plug(v0);
4262  } else {
4263  // This expression cannot throw a reference error at the top level.
4264  VisitInDuplicateContext(expr);
4265  }
4266 }
4267 
4268 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4269  Expression* sub_expr,
4270  Handle<String> check) {
4271  Label materialize_true, materialize_false;
4272  Label* if_true = NULL;
4273  Label* if_false = NULL;
4274  Label* fall_through = NULL;
4275  context()->PrepareTest(&materialize_true, &materialize_false,
4276  &if_true, &if_false, &fall_through);
4277 
4278  { AccumulatorValueContext context(this);
4279  VisitForTypeofValue(sub_expr);
4280  }
4281  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4282 
4283  if (check->Equals(isolate()->heap()->number_symbol())) {
4284  __ JumpIfSmi(v0, if_true);
4286  __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4287  Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4288  } else if (check->Equals(isolate()->heap()->string_symbol())) {
4289  __ JumpIfSmi(v0, if_false);
4290  // Check for undetectable objects => false.
4291  __ GetObjectType(v0, v0, a1);
4292  __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
4293  __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4294  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4295  Split(eq, a1, Operand(zero_reg),
4296  if_true, if_false, fall_through);
4297  } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4298  __ LoadRoot(at, Heap::kTrueValueRootIndex);
4299  __ Branch(if_true, eq, v0, Operand(at));
4300  __ LoadRoot(at, Heap::kFalseValueRootIndex);
4301  Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4302  } else if (FLAG_harmony_typeof &&
4303  check->Equals(isolate()->heap()->null_symbol())) {
4304  __ LoadRoot(at, Heap::kNullValueRootIndex);
4305  Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4306  } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4307  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4308  __ Branch(if_true, eq, v0, Operand(at));
4309  __ JumpIfSmi(v0, if_false);
4310  // Check for undetectable objects => true.
4312  __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4313  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4314  Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4315  } else if (check->Equals(isolate()->heap()->function_symbol())) {
4316  __ JumpIfSmi(v0, if_false);
4318  __ GetObjectType(v0, v0, a1);
4319  __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
4320  Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
4321  if_true, if_false, fall_through);
4322  } else if (check->Equals(isolate()->heap()->object_symbol())) {
4323  __ JumpIfSmi(v0, if_false);
4324  if (!FLAG_harmony_typeof) {
4325  __ LoadRoot(at, Heap::kNullValueRootIndex);
4326  __ Branch(if_true, eq, v0, Operand(at));
4327  }
4328  // Check for JS objects => true.
4329  __ GetObjectType(v0, v0, a1);
4330  __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
4332  __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
4333  // Check for undetectable objects => false.
4334  __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4335  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4336  Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4337  } else {
4338  if (if_false != fall_through) __ jmp(if_false);
4339  }
4340  context()->Plug(if_true, if_false);
4341 }
4342 
4343 
4344 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4345  Comment cmnt(masm_, "[ CompareOperation");
4346  SetSourcePosition(expr->position());
4347 
4348  // First we try a fast inlined version of the compare when one of
4349  // the operands is a literal.
4350  if (TryLiteralCompare(expr)) return;
4351 
4352  // Always perform the comparison for its control flow. Pack the result
4353  // into the expression's context after the comparison is performed.
4354  Label materialize_true, materialize_false;
4355  Label* if_true = NULL;
4356  Label* if_false = NULL;
4357  Label* fall_through = NULL;
4358  context()->PrepareTest(&materialize_true, &materialize_false,
4359  &if_true, &if_false, &fall_through);
4360 
4361  Token::Value op = expr->op();
4362  VisitForStackValue(expr->left());
4363  switch (op) {
4364  case Token::IN:
4365  VisitForStackValue(expr->right());
4366  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4367  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4368  __ LoadRoot(t0, Heap::kTrueValueRootIndex);
4369  Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
4370  break;
4371 
4372  case Token::INSTANCEOF: {
4373  VisitForStackValue(expr->right());
4374  InstanceofStub stub(InstanceofStub::kNoFlags);
4375  __ CallStub(&stub);
4376  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4377  // The stub returns 0 for true.
4378  Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
4379  break;
4380  }
4381 
4382  default: {
4383  VisitForAccumulatorValue(expr->right());
4384  Condition cc = eq;
4385  switch (op) {
4386  case Token::EQ_STRICT:
4387  case Token::EQ:
4388  cc = eq;
4389  break;
4390  case Token::LT:
4391  cc = lt;
4392  break;
4393  case Token::GT:
4394  cc = gt;
4395  break;
4396  case Token::LTE:
4397  cc = le;
4398  break;
4399  case Token::GTE:
4400  cc = ge;
4401  break;
4402  case Token::IN:
4403  case Token::INSTANCEOF:
4404  default:
4405  UNREACHABLE();
4406  }
4407  __ mov(a0, result_register());
4408  __ pop(a1);
4409 
4410  bool inline_smi_code = ShouldInlineSmiCase(op);
4411  JumpPatchSite patch_site(masm_);
4412  if (inline_smi_code) {
4413  Label slow_case;
4414  __ Or(a2, a0, Operand(a1));
4415  patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4416  Split(cc, a1, Operand(a0), if_true, if_false, NULL);
4417  __ bind(&slow_case);
4418  }
4419  // Record position and call the compare IC.
4420  SetSourcePosition(expr->position());
4421  Handle<Code> ic = CompareIC::GetUninitialized(op);
4422  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4423  patch_site.EmitPatchInfo();
4424  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4425  Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4426  }
4427  }
4428 
4429  // Convert the result of the comparison into one expected for this
4430  // expression's context.
4431  context()->Plug(if_true, if_false);
4432 }
4433 
4434 
4435 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4436  Expression* sub_expr,
4437  NilValue nil) {
4438  Label materialize_true, materialize_false;
4439  Label* if_true = NULL;
4440  Label* if_false = NULL;
4441  Label* fall_through = NULL;
4442  context()->PrepareTest(&materialize_true, &materialize_false,
4443  &if_true, &if_false, &fall_through);
4444 
4445  VisitForAccumulatorValue(sub_expr);
4446  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4447  Heap::RootListIndex nil_value = nil == kNullValue ?
4448  Heap::kNullValueRootIndex :
4449  Heap::kUndefinedValueRootIndex;
4450  __ mov(a0, result_register());
4451  __ LoadRoot(a1, nil_value);
4452  if (expr->op() == Token::EQ_STRICT) {
4453  Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
4454  } else {
4455  Heap::RootListIndex other_nil_value = nil == kNullValue ?
4456  Heap::kUndefinedValueRootIndex :
4457  Heap::kNullValueRootIndex;
4458  __ Branch(if_true, eq, a0, Operand(a1));
4459  __ LoadRoot(a1, other_nil_value);
4460  __ Branch(if_true, eq, a0, Operand(a1));
4461  __ JumpIfSmi(a0, if_false);
4462  // It can be an undetectable object.
4464  __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
4465  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4466  Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4467  }
4468  context()->Plug(if_true, if_false);
4469 }
4470 
4471 
4472 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4474  context()->Plug(v0);
4475 }
4476 
4477 
4478 Register FullCodeGenerator::result_register() {
4479  return v0;
4480 }
4481 
4482 
4483 Register FullCodeGenerator::context_register() {
4484  return cp;
4485 }
4486 
4487 
4488 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4489  ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4490  __ sw(value, MemOperand(fp, frame_offset));
4491 }
4492 
4493 
4494 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4495  __ lw(dst, ContextOperand(cp, context_index));
4496 }
4497 
4498 
4499 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4500  Scope* declaration_scope = scope()->DeclarationScope();
4501  if (declaration_scope->is_global_scope() ||
4502  declaration_scope->is_module_scope()) {
4503  // Contexts nested in the global context have a canonical empty function
4504  // as their closure, not the anonymous closure containing the global
4505  // code. Pass a smi sentinel and let the runtime look up the empty
4506  // function.
4507  __ li(at, Operand(Smi::FromInt(0)));
4508  } else if (declaration_scope->is_eval_scope()) {
4509  // Contexts created by a call to eval have the same closure as the
4510  // context calling eval, not the anonymous closure containing the eval
4511  // code. Fetch it from the context.
4513  } else {
4514  ASSERT(declaration_scope->is_function_scope());
4516  }
4517  __ push(at);
4518 }
4519 
4520 
4521 // ----------------------------------------------------------------------------
4522 // Non-local control flow support.
4523 
4524 void FullCodeGenerator::EnterFinallyBlock() {
4525  ASSERT(!result_register().is(a1));
4526  // Store result register while executing finally block.
4527  __ push(result_register());
4528  // Cook return address in link register to stack (smi encoded Code* delta).
4529  __ Subu(a1, ra, Operand(masm_->CodeObject()));
4531  STATIC_ASSERT(0 == kSmiTag);
4532  __ Addu(a1, a1, Operand(a1)); // Convert to smi.
4533 
4534  // Store result register while executing finally block.
4535  __ push(a1);
4536 
4537  // Store pending message while executing finally block.
4538  ExternalReference pending_message_obj =
4539  ExternalReference::address_of_pending_message_obj(isolate());
4540  __ li(at, Operand(pending_message_obj));
4541  __ lw(a1, MemOperand(at));
4542  __ push(a1);
4543 
4544  ExternalReference has_pending_message =
4545  ExternalReference::address_of_has_pending_message(isolate());
4546  __ li(at, Operand(has_pending_message));
4547  __ lw(a1, MemOperand(at));
4548  __ push(a1);
4549 
4550  ExternalReference pending_message_script =
4551  ExternalReference::address_of_pending_message_script(isolate());
4552  __ li(at, Operand(pending_message_script));
4553  __ lw(a1, MemOperand(at));
4554  __ push(a1);
4555 }
4556 
4557 
4558 void FullCodeGenerator::ExitFinallyBlock() {
4559  ASSERT(!result_register().is(a1));
4560  // Restore pending message from stack.
4561  __ pop(a1);
4562  ExternalReference pending_message_script =
4563  ExternalReference::address_of_pending_message_script(isolate());
4564  __ li(at, Operand(pending_message_script));
4565  __ sw(a1, MemOperand(at));
4566 
4567  __ pop(a1);
4568  ExternalReference has_pending_message =
4569  ExternalReference::address_of_has_pending_message(isolate());
4570  __ li(at, Operand(has_pending_message));
4571  __ sw(a1, MemOperand(at));
4572 
4573  __ pop(a1);
4574  ExternalReference pending_message_obj =
4575  ExternalReference::address_of_pending_message_obj(isolate());
4576  __ li(at, Operand(pending_message_obj));
4577  __ sw(a1, MemOperand(at));
4578 
4579  // Restore result register from stack.
4580  __ pop(a1);
4581 
4582  // Uncook return address and return.
4583  __ pop(result_register());
4585  __ sra(a1, a1, 1); // Un-smi-tag value.
4586  __ Addu(at, a1, Operand(masm_->CodeObject()));
4587  __ Jump(at);
4588 }
4589 
4590 
4591 #undef __
4592 
4593 #define __ ACCESS_MASM(masm())
4594 
4595 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4596  int* stack_depth,
4597  int* context_length) {
4598  // The macros used here must preserve the result register.
4599 
4600  // Because the handler block contains the context of the finally
4601  // code, we can restore it directly from there for the finally code
4602  // rather than iteratively unwinding contexts via their previous
4603  // links.
4604  __ Drop(*stack_depth); // Down to the handler block.
4605  if (*context_length > 0) {
4606  // Restore the context to its dedicated register and the stack.
4609  }
4610  __ PopTryHandler();
4611  __ Call(finally_entry_);
4612 
4613  *stack_depth = 0;
4614  *context_length = 0;
4615  return previous_;
4616 }
4617 
4618 
4619 #undef __
4620 
4621 } } // namespace v8::internal
4622 
4623 #endif // V8_TARGET_ARCH_MIPS
const Register cp
static const int kBitFieldOffset
Definition: objects.h:4994
Scope * DeclarationScope()
Definition: scopes.cc:699
int InstructionsGeneratedSince(Label *label)
const intptr_t kSmiTagMask
Definition: v8.h:3855
VariableDeclaration * function() const
Definition: scopes.h:323
static int SlotOffset(int index)
Definition: contexts.h:408
static const int kBuiltinsOffset
Definition: objects.h:6083
static String * cast(Object *obj)
void mov(Register rd, Register rt)
static const int kDeclarationsId
Definition: ast.h:202
const FPURegister f0
static Smi * FromInt(int value)
Definition: objects-inl.h:973
bool IsFastObjectElementsKind(ElementsKind kind)
const int kImm16Mask
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
static const int kDataOffset
Definition: objects.h:6432
static const int kGlobalReceiverOffset
Definition: objects.h:6085
int SizeOfCodeGeneratedSince(Label *label)
T Max(T a, T b)
Definition: utils.h:222
Scope * outer_scope() const
Definition: scopes.h:347
Flag flags[]
Definition: flags.cc:1467
int int32_t
Definition: unicode.cc:47
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:5052
static bool IsSupported(CpuFeature f)
static bool enabled()
Definition: serialize.h:480
static const int kSize
Definition: objects.h:6433
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:246
static const int kInObjectFieldCount
Definition: objects.h:6487
const char * comment() const
Definition: flags.cc:1362
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3902
#define POINTER_SIZE_ALIGN(value)
Definition: v8globals.h:401
static const int kMaximumSlots
Definition: code-stubs.h:343
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
Definition: objects.h:5609
static const int kGlobalContextOffset
Definition: objects.h:6084
Variable * parameter(int index) const
Definition: scopes.h:330
PropertyAttributes
MemOperand ContextOperand(Register context, int index)
static const int kFunctionEntryId
Definition: ast.h:198
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
Definition: scopes.cc:689
static const int kHashFieldOffset
Definition: objects.h:7099
#define IN
const Register sp
void MultiPop(RegList regs)
static const int kLiteralsOffset
Definition: objects.h:5987
#define UNREACHABLE()
Definition: checks.h:50
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
Definition: objects.h:7098
static const int kValueOffset
Definition: objects.h:1307
Variable * arguments() const
Definition: scopes.h:338
static const int kForInSlowCaseMarker
Definition: objects.h:4149
NilValue
Definition: v8.h:141
const int kPointerSize
Definition: globals.h:234
static const int kForInFastCaseMarker
Definition: objects.h:4148
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:5011
const int kHeapObjectTag
Definition: v8.h:3848
void Jump(Register target, Condition cond=al)
#define __
static const int kCacheStampOffset
Definition: objects.h:6280
static TestContext * cast(AstContext *context)
Definition: hydrogen.h:690
static const int kPropertiesOffset
Definition: objects.h:2113
static Register from_code(int code)
const SwVfpRegister s0
static const int kHeaderSize
Definition: objects.h:7282
static const int kElementsOffset
Definition: objects.h:2114
static const int kContainsCachedArrayIndexMask
Definition: objects.h:7154
#define BASE_EMBEDDED
Definition: allocation.h:68
friend class BlockTrampolinePoolScope
Vector< const char > CStrVector(const char *data)
Definition: utils.h:525
static int OffsetOfElementAt(int index)
Definition: objects.h:2291
static const int kLengthOffset
Definition: objects.h:8111
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
Definition: objects.h:2233
static const int kEnumerationIndexOffset
Definition: objects.h:2622
static const int kMapOffset
Definition: objects.h:1219
static const int kValueOffset
Definition: objects.h:6272
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:2627
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:536
static const int kLengthOffset
Definition: objects.h:2232
MemOperand FieldMemOperand(Register object, int offset)
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:168
const int kSmiShiftSize
Definition: v8.h:3899
const int kSmiTagSize
Definition: v8.h:3854
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
Definition: objects-inl.h:3682
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:708
static const int kJSReturnSequenceInstructions
static const int kConstructorOffset
Definition: objects.h:4954
const int kSmiTag
Definition: v8.h:3853
#define ASSERT_NE(v1, v2)
Definition: checks.h:272
static const int kIsUndetectable
Definition: objects.h:5005
const FPURegister f12
static bool ShouldGenerateLog(Expression *type)
Definition: codegen.cc:153
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
static const int kPrototypeOffset
Definition: objects.h:4953
const Register no_reg
static const int kValueOffset
Definition: objects.h:6188
const Register fp
T Min(T a, T b)
Definition: utils.h:229
static const int kSharedFunctionInfoOffset
Definition: objects.h:5984
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:1669
const FPURegister f14
static const int kMaxValue
Definition: objects.h:1006
static const int kBitField2Offset
Definition: objects.h:4995
#define VOID
static Handle< Code > GetUninitialized(Token::Value op)
Definition: ic.cc:2544
void check(i::Vector< const char > string)
FlagType type() const
Definition: flags.cc:1358
static const int kFirstIndex
Definition: objects.h:2611
static const int kInstanceTypeOffset
Definition: objects.h:4992
TypeofState
Definition: codegen.h:70