v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
full-codegen-mips.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_MIPS
31 
32 // Note on Mips implementation:
33 //
34 // The result_register() for mips is the 'v0' register, which is defined
35 // by the ABI to contain function return values. However, the first
36 // parameter to a function is defined to be 'a0'. So there are many
37 // places where we have to move a previous result in v0 to a0 for the
38 // next call: mov(a0, v0). This is not needed on the other architectures.
39 
40 #include "code-stubs.h"
41 #include "codegen.h"
42 #include "compiler.h"
43 #include "debug.h"
44 #include "full-codegen.h"
45 #include "isolate-inl.h"
46 #include "parser.h"
47 #include "scopes.h"
48 #include "stub-cache.h"
49 
50 #include "mips/code-stubs-mips.h"
52 
53 namespace v8 {
54 namespace internal {
55 
56 #define __ ACCESS_MASM(masm_)
57 
58 
59 // A patch site is a location in the code which it is possible to patch. This
60 // class has a number of methods to emit the code which is patchable and the
61 // method EmitPatchInfo to record a marker back to the patchable code. This
62 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
63 // (raw 16 bit immediate value is used) is the delta from the pc to the first
64 // instruction of the patchable code.
65 // The marker instruction is effectively a NOP (dest is zero_reg) and will
66 // never be emitted by normal code.
67 class JumpPatchSite BASE_EMBEDDED {
68  public:
69  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
70 #ifdef DEBUG
71  info_emitted_ = false;
72 #endif
73  }
74 
75  ~JumpPatchSite() {
76  ASSERT(patch_site_.is_bound() == info_emitted_);
77  }
78 
79  // When initially emitting this ensure that a jump is always generated to skip
80  // the inlined smi code.
81  void EmitJumpIfNotSmi(Register reg, Label* target) {
82  ASSERT(!patch_site_.is_bound() && !info_emitted_);
83  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
84  __ bind(&patch_site_);
85  __ andi(at, reg, 0);
86  // Always taken before patched.
87  __ BranchShort(target, eq, at, Operand(zero_reg));
88  }
89 
90  // When initially emitting this ensure that a jump is never generated to skip
91  // the inlined smi code.
92  void EmitJumpIfSmi(Register reg, Label* target) {
93  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
94  ASSERT(!patch_site_.is_bound() && !info_emitted_);
95  __ bind(&patch_site_);
96  __ andi(at, reg, 0);
97  // Never taken before patched.
98  __ BranchShort(target, ne, at, Operand(zero_reg));
99  }
100 
101  void EmitPatchInfo() {
102  if (patch_site_.is_bound()) {
103  int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
104  Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
105  __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
106 #ifdef DEBUG
107  info_emitted_ = true;
108 #endif
109  } else {
110  __ nop(); // Signals no inlined code.
111  }
112  }
113 
114  private:
115  MacroAssembler* masm_;
116  Label patch_site_;
117 #ifdef DEBUG
118  bool info_emitted_;
119 #endif
120 };
121 
122 
123 static void EmitStackCheck(MacroAssembler* masm_,
124  Register stack_limit_scratch,
125  int pointers = 0,
126  Register scratch = sp) {
127  Isolate* isolate = masm_->isolate();
128  Label ok;
129  ASSERT(scratch.is(sp) == (pointers == 0));
130  if (pointers != 0) {
131  __ Subu(scratch, sp, Operand(pointers * kPointerSize));
132  }
133  __ LoadRoot(stack_limit_scratch, Heap::kStackLimitRootIndex);
134  __ Branch(&ok, hs, scratch, Operand(stack_limit_scratch));
135  PredictableCodeSizeScope predictable(masm_, 4 * Assembler::kInstrSize);
136  __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
137  __ bind(&ok);
138 }
139 
140 
141 // Generate code for a JS function. On entry to the function the receiver
142 // and arguments have been pushed on the stack left to right. The actual
143 // argument count matches the formal parameter count expected by the
144 // function.
145 //
146 // The live registers are:
147 // o a1: the JS function object being called (i.e. ourselves)
148 // o cp: our context
149 // o fp: our caller's frame pointer
150 // o sp: stack pointer
151 // o ra: return address
152 //
153 // The function builds a JS frame. Please see JavaScriptFrameConstants in
154 // frames-mips.h for its layout.
155 void FullCodeGenerator::Generate() {
156  CompilationInfo* info = info_;
157  handler_table_ =
158  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
159 
160  InitializeFeedbackVector();
161 
162  profiling_counter_ = isolate()->factory()->NewCell(
163  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
164  SetFunctionPosition(function());
165  Comment cmnt(masm_, "[ function compiled by full code generator");
166 
168 
169 #ifdef DEBUG
170  if (strlen(FLAG_stop_at) > 0 &&
171  info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
172  __ stop("stop-at");
173  }
174 #endif
175 
176  // Sloppy mode functions and builtins need to replace the receiver with the
177  // global proxy when called as functions (without an explicit receiver
178  // object).
179  if (info->strict_mode() == SLOPPY && !info->is_native()) {
180  Label ok;
181  int receiver_offset = info->scope()->num_parameters() * kPointerSize;
182  __ lw(at, MemOperand(sp, receiver_offset));
183  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
184  __ Branch(&ok, ne, a2, Operand(at));
185 
186  __ lw(a2, GlobalObjectOperand());
188 
189  __ sw(a2, MemOperand(sp, receiver_offset));
190 
191  __ bind(&ok);
192  }
193 
194  // Open a frame scope to indicate that there is a frame on the stack. The
195  // MANUAL indicates that the scope shouldn't actually generate code to set up
196  // the frame (that is done below).
197  FrameScope frame_scope(masm_, StackFrame::MANUAL);
198 
199  info->set_prologue_offset(masm_->pc_offset());
200  __ Prologue(BUILD_FUNCTION_FRAME);
201  info->AddNoFrameRange(0, masm_->pc_offset());
202 
203  { Comment cmnt(masm_, "[ Allocate locals");
204  int locals_count = info->scope()->num_stack_slots();
205  // Generators allocate locals, if any, in context slots.
206  ASSERT(!info->function()->is_generator() || locals_count == 0);
207  if (locals_count > 0) {
208  if (locals_count >= 128) {
209  EmitStackCheck(masm_, a2, locals_count, t5);
210  }
211  __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
212  int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
213  if (locals_count >= kMaxPushes) {
214  int loop_iterations = locals_count / kMaxPushes;
215  __ li(a2, Operand(loop_iterations));
216  Label loop_header;
217  __ bind(&loop_header);
218  // Do pushes.
219  __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
220  for (int i = 0; i < kMaxPushes; i++) {
221  __ sw(t5, MemOperand(sp, i * kPointerSize));
222  }
223  // Continue loop if not done.
224  __ Subu(a2, a2, Operand(1));
225  __ Branch(&loop_header, ne, a2, Operand(zero_reg));
226  }
227  int remaining = locals_count % kMaxPushes;
228  // Emit the remaining pushes.
229  __ Subu(sp, sp, Operand(remaining * kPointerSize));
230  for (int i = 0; i < remaining; i++) {
231  __ sw(t5, MemOperand(sp, i * kPointerSize));
232  }
233  }
234  }
235 
236  bool function_in_register = true;
237 
238  // Possibly allocate a local context.
239  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
240  if (heap_slots > 0) {
241  Comment cmnt(masm_, "[ Allocate context");
242  // Argument to NewContext is the function, which is still in a1.
243  if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
244  __ push(a1);
245  __ Push(info->scope()->GetScopeInfo());
246  __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
247  } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
248  FastNewContextStub stub(heap_slots);
249  __ CallStub(&stub);
250  } else {
251  __ push(a1);
252  __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
253  }
254  function_in_register = false;
255  // Context is returned in v0. It replaces the context passed to us.
256  // It's saved in the stack and kept live in cp.
257  __ mov(cp, v0);
259  // Copy any necessary parameters into the context.
260  int num_parameters = info->scope()->num_parameters();
261  for (int i = 0; i < num_parameters; i++) {
262  Variable* var = scope()->parameter(i);
263  if (var->IsContextSlot()) {
264  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
265  (num_parameters - 1 - i) * kPointerSize;
266  // Load parameter from stack.
267  __ lw(a0, MemOperand(fp, parameter_offset));
268  // Store it in the context.
269  MemOperand target = ContextOperand(cp, var->index());
270  __ sw(a0, target);
271 
272  // Update the write barrier.
273  __ RecordWriteContextSlot(
274  cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
275  }
276  }
277  }
278 
279  Variable* arguments = scope()->arguments();
280  if (arguments != NULL) {
281  // Function uses arguments object.
282  Comment cmnt(masm_, "[ Allocate arguments object");
283  if (!function_in_register) {
284  // Load this again, if it's used by the local context below.
286  } else {
287  __ mov(a3, a1);
288  }
289  // Receiver is just before the parameters on the caller's stack.
290  int num_parameters = info->scope()->num_parameters();
291  int offset = num_parameters * kPointerSize;
292  __ Addu(a2, fp,
293  Operand(StandardFrameConstants::kCallerSPOffset + offset));
294  __ li(a1, Operand(Smi::FromInt(num_parameters)));
295  __ Push(a3, a2, a1);
296 
297  // Arguments to ArgumentsAccessStub:
298  // function, receiver address, parameter count.
299  // The stub will rewrite receiever and parameter count if the previous
300  // stack frame was an arguments adapter frame.
302  if (strict_mode() == STRICT) {
304  } else if (function()->has_duplicate_parameters()) {
306  } else {
308  }
309  ArgumentsAccessStub stub(type);
310  __ CallStub(&stub);
311 
312  SetVar(arguments, v0, a1, a2);
313  }
314 
315  if (FLAG_trace) {
316  __ CallRuntime(Runtime::kTraceEnter, 0);
317  }
318 
319  // Visit the declarations and body unless there is an illegal
320  // redeclaration.
321  if (scope()->HasIllegalRedeclaration()) {
322  Comment cmnt(masm_, "[ Declarations");
323  scope()->VisitIllegalRedeclaration(this);
324 
325  } else {
326  PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
327  { Comment cmnt(masm_, "[ Declarations");
328  // For named function expressions, declare the function name as a
329  // constant.
330  if (scope()->is_function_scope() && scope()->function() != NULL) {
331  VariableDeclaration* function = scope()->function();
332  ASSERT(function->proxy()->var()->mode() == CONST ||
333  function->proxy()->var()->mode() == CONST_LEGACY);
334  ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
335  VisitVariableDeclaration(function);
336  }
337  VisitDeclarations(scope()->declarations());
338  }
339 
340  { Comment cmnt(masm_, "[ Stack check");
341  PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
342  EmitStackCheck(masm_, at);
343  }
344 
345  { Comment cmnt(masm_, "[ Body");
346  ASSERT(loop_depth() == 0);
347  VisitStatements(function()->body());
348  ASSERT(loop_depth() == 0);
349  }
350  }
351 
352  // Always emit a 'return undefined' in case control fell off the end of
353  // the body.
354  { Comment cmnt(masm_, "[ return <undefined>;");
355  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
356  }
357  EmitReturnSequence();
358 }
359 
360 
361 void FullCodeGenerator::ClearAccumulator() {
362  ASSERT(Smi::FromInt(0) == 0);
363  __ mov(v0, zero_reg);
364 }
365 
366 
367 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
368  __ li(a2, Operand(profiling_counter_));
370  __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
372 }
373 
374 
375 void FullCodeGenerator::EmitProfilingCounterReset() {
376  int reset_value = FLAG_interrupt_budget;
377  if (isolate()->IsDebuggerActive()) {
378  // Detect debug break requests as soon as possible.
379  reset_value = FLAG_interrupt_budget >> 4;
380  }
381  __ li(a2, Operand(profiling_counter_));
382  __ li(a3, Operand(Smi::FromInt(reset_value)));
384 }
385 
386 
387 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
388  Label* back_edge_target) {
389  // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
390  // to make sure it is constant. Branch may emit a skip-or-jump sequence
391  // instead of the normal Branch. It seems that the "skip" part of that
392  // sequence is about as long as this Branch would be so it is safe to ignore
393  // that.
394  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
395  Comment cmnt(masm_, "[ Back edge bookkeeping");
396  Label ok;
397  ASSERT(back_edge_target->is_bound());
398  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
399  int weight = Min(kMaxBackEdgeWeight,
400  Max(1, distance / kCodeSizeMultiplier));
401  EmitProfilingCounterDecrement(weight);
402  __ slt(at, a3, zero_reg);
403  __ beq(at, zero_reg, &ok);
404  // Call will emit a li t9 first, so it is safe to use the delay slot.
405  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
406  // Record a mapping of this PC offset to the OSR id. This is used to find
407  // the AST id from the unoptimized code in order to use it as a key into
408  // the deoptimization input data found in the optimized code.
409  RecordBackEdge(stmt->OsrEntryId());
410  EmitProfilingCounterReset();
411 
412  __ bind(&ok);
413  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
414  // Record a mapping of the OSR id to this PC. This is used if the OSR
415  // entry becomes the target of a bailout. We don't expect it to be, but
416  // we want it to work if it is.
417  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
418 }
419 
420 
421 void FullCodeGenerator::EmitReturnSequence() {
422  Comment cmnt(masm_, "[ Return sequence");
423  if (return_label_.is_bound()) {
424  __ Branch(&return_label_);
425  } else {
426  __ bind(&return_label_);
427  if (FLAG_trace) {
428  // Push the return value on the stack as the parameter.
429  // Runtime::TraceExit returns its parameter in v0.
430  __ push(v0);
431  __ CallRuntime(Runtime::kTraceExit, 1);
432  }
433  // Pretend that the exit is a backwards jump to the entry.
434  int weight = 1;
435  if (info_->ShouldSelfOptimize()) {
436  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
437  } else {
438  int distance = masm_->pc_offset();
439  weight = Min(kMaxBackEdgeWeight,
440  Max(1, distance / kCodeSizeMultiplier));
441  }
442  EmitProfilingCounterDecrement(weight);
443  Label ok;
444  __ Branch(&ok, ge, a3, Operand(zero_reg));
445  __ push(v0);
446  __ Call(isolate()->builtins()->InterruptCheck(),
447  RelocInfo::CODE_TARGET);
448  __ pop(v0);
449  EmitProfilingCounterReset();
450  __ bind(&ok);
451 
452 #ifdef DEBUG
453  // Add a label for checking the size of the code used for returning.
454  Label check_exit_codesize;
455  masm_->bind(&check_exit_codesize);
456 #endif
457  // Make sure that the constant pool is not emitted inside of the return
458  // sequence.
459  { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
460  // Here we use masm_-> instead of the __ macro to avoid the code coverage
461  // tool from instrumenting as we rely on the code size here.
462  int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
463  CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
464  __ RecordJSReturn();
465  masm_->mov(sp, fp);
466  int no_frame_start = masm_->pc_offset();
467  masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
468  masm_->Addu(sp, sp, Operand(sp_delta));
469  masm_->Jump(ra);
470  info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
471  }
472 
473 #ifdef DEBUG
474  // Check that the size of the code used for returning is large enough
475  // for the debugger's requirements.
477  masm_->InstructionsGeneratedSince(&check_exit_codesize));
478 #endif
479  }
480 }
481 
482 
483 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
484  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
485 }
486 
487 
488 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
489  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
490  codegen()->GetVar(result_register(), var);
491 }
492 
493 
494 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
495  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
496  codegen()->GetVar(result_register(), var);
497  __ push(result_register());
498 }
499 
500 
501 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
502  // For simplicity we always test the accumulator register.
503  codegen()->GetVar(result_register(), var);
504  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
505  codegen()->DoTest(this);
506 }
507 
508 
509 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
510 }
511 
512 
513 void FullCodeGenerator::AccumulatorValueContext::Plug(
514  Heap::RootListIndex index) const {
515  __ LoadRoot(result_register(), index);
516 }
517 
518 
519 void FullCodeGenerator::StackValueContext::Plug(
520  Heap::RootListIndex index) const {
521  __ LoadRoot(result_register(), index);
522  __ push(result_register());
523 }
524 
525 
526 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
527  codegen()->PrepareForBailoutBeforeSplit(condition(),
528  true,
529  true_label_,
530  false_label_);
531  if (index == Heap::kUndefinedValueRootIndex ||
532  index == Heap::kNullValueRootIndex ||
533  index == Heap::kFalseValueRootIndex) {
534  if (false_label_ != fall_through_) __ Branch(false_label_);
535  } else if (index == Heap::kTrueValueRootIndex) {
536  if (true_label_ != fall_through_) __ Branch(true_label_);
537  } else {
538  __ LoadRoot(result_register(), index);
539  codegen()->DoTest(this);
540  }
541 }
542 
543 
544 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
545 }
546 
547 
548 void FullCodeGenerator::AccumulatorValueContext::Plug(
549  Handle<Object> lit) const {
550  __ li(result_register(), Operand(lit));
551 }
552 
553 
554 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
555  // Immediates cannot be pushed directly.
556  __ li(result_register(), Operand(lit));
557  __ push(result_register());
558 }
559 
560 
561 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
562  codegen()->PrepareForBailoutBeforeSplit(condition(),
563  true,
564  true_label_,
565  false_label_);
566  ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
567  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
568  if (false_label_ != fall_through_) __ Branch(false_label_);
569  } else if (lit->IsTrue() || lit->IsJSObject()) {
570  if (true_label_ != fall_through_) __ Branch(true_label_);
571  } else if (lit->IsString()) {
572  if (String::cast(*lit)->length() == 0) {
573  if (false_label_ != fall_through_) __ Branch(false_label_);
574  } else {
575  if (true_label_ != fall_through_) __ Branch(true_label_);
576  }
577  } else if (lit->IsSmi()) {
578  if (Smi::cast(*lit)->value() == 0) {
579  if (false_label_ != fall_through_) __ Branch(false_label_);
580  } else {
581  if (true_label_ != fall_through_) __ Branch(true_label_);
582  }
583  } else {
584  // For simplicity we always test the accumulator register.
585  __ li(result_register(), Operand(lit));
586  codegen()->DoTest(this);
587  }
588 }
589 
590 
591 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
592  Register reg) const {
593  ASSERT(count > 0);
594  __ Drop(count);
595 }
596 
597 
598 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
599  int count,
600  Register reg) const {
601  ASSERT(count > 0);
602  __ Drop(count);
603  __ Move(result_register(), reg);
604 }
605 
606 
607 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
608  Register reg) const {
609  ASSERT(count > 0);
610  if (count > 1) __ Drop(count - 1);
611  __ sw(reg, MemOperand(sp, 0));
612 }
613 
614 
615 void FullCodeGenerator::TestContext::DropAndPlug(int count,
616  Register reg) const {
617  ASSERT(count > 0);
618  // For simplicity we always test the accumulator register.
619  __ Drop(count);
620  __ Move(result_register(), reg);
621  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
622  codegen()->DoTest(this);
623 }
624 
625 
626 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
627  Label* materialize_false) const {
628  ASSERT(materialize_true == materialize_false);
629  __ bind(materialize_true);
630 }
631 
632 
633 void FullCodeGenerator::AccumulatorValueContext::Plug(
634  Label* materialize_true,
635  Label* materialize_false) const {
636  Label done;
637  __ bind(materialize_true);
638  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
639  __ Branch(&done);
640  __ bind(materialize_false);
641  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
642  __ bind(&done);
643 }
644 
645 
646 void FullCodeGenerator::StackValueContext::Plug(
647  Label* materialize_true,
648  Label* materialize_false) const {
649  Label done;
650  __ bind(materialize_true);
651  __ LoadRoot(at, Heap::kTrueValueRootIndex);
652  // Push the value as the following branch can clobber at in long branch mode.
653  __ push(at);
654  __ Branch(&done);
655  __ bind(materialize_false);
656  __ LoadRoot(at, Heap::kFalseValueRootIndex);
657  __ push(at);
658  __ bind(&done);
659 }
660 
661 
662 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
663  Label* materialize_false) const {
664  ASSERT(materialize_true == true_label_);
665  ASSERT(materialize_false == false_label_);
666 }
667 
668 
669 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
670 }
671 
672 
673 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
674  Heap::RootListIndex value_root_index =
675  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
676  __ LoadRoot(result_register(), value_root_index);
677 }
678 
679 
680 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
681  Heap::RootListIndex value_root_index =
682  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
683  __ LoadRoot(at, value_root_index);
684  __ push(at);
685 }
686 
687 
688 void FullCodeGenerator::TestContext::Plug(bool flag) const {
689  codegen()->PrepareForBailoutBeforeSplit(condition(),
690  true,
691  true_label_,
692  false_label_);
693  if (flag) {
694  if (true_label_ != fall_through_) __ Branch(true_label_);
695  } else {
696  if (false_label_ != fall_through_) __ Branch(false_label_);
697  }
698 }
699 
700 
701 void FullCodeGenerator::DoTest(Expression* condition,
702  Label* if_true,
703  Label* if_false,
704  Label* fall_through) {
705  __ mov(a0, result_register());
706  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
707  CallIC(ic, condition->test_id());
708  __ mov(at, zero_reg);
709  Split(ne, v0, Operand(at), if_true, if_false, fall_through);
710 }
711 
712 
713 void FullCodeGenerator::Split(Condition cc,
714  Register lhs,
715  const Operand& rhs,
716  Label* if_true,
717  Label* if_false,
718  Label* fall_through) {
719  if (if_false == fall_through) {
720  __ Branch(if_true, cc, lhs, rhs);
721  } else if (if_true == fall_through) {
722  __ Branch(if_false, NegateCondition(cc), lhs, rhs);
723  } else {
724  __ Branch(if_true, cc, lhs, rhs);
725  __ Branch(if_false);
726  }
727 }
728 
729 
730 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
731  ASSERT(var->IsStackAllocated());
732  // Offset is negative because higher indexes are at lower addresses.
733  int offset = -var->index() * kPointerSize;
734  // Adjust by a (parameter or local) base offset.
735  if (var->IsParameter()) {
736  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
737  } else {
739  }
740  return MemOperand(fp, offset);
741 }
742 
743 
744 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
745  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
746  if (var->IsContextSlot()) {
747  int context_chain_length = scope()->ContextChainLength(var->scope());
748  __ LoadContext(scratch, context_chain_length);
749  return ContextOperand(scratch, var->index());
750  } else {
751  return StackOperand(var);
752  }
753 }
754 
755 
756 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
757  // Use destination as scratch.
758  MemOperand location = VarOperand(var, dest);
759  __ lw(dest, location);
760 }
761 
762 
763 void FullCodeGenerator::SetVar(Variable* var,
764  Register src,
765  Register scratch0,
766  Register scratch1) {
767  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
768  ASSERT(!scratch0.is(src));
769  ASSERT(!scratch0.is(scratch1));
770  ASSERT(!scratch1.is(src));
771  MemOperand location = VarOperand(var, scratch0);
772  __ sw(src, location);
773  // Emit the write barrier code if the location is in the heap.
774  if (var->IsContextSlot()) {
775  __ RecordWriteContextSlot(scratch0,
776  location.offset(),
777  src,
778  scratch1,
781  }
782 }
783 
784 
785 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
786  bool should_normalize,
787  Label* if_true,
788  Label* if_false) {
789  // Only prepare for bailouts before splits if we're in a test
790  // context. Otherwise, we let the Visit function deal with the
791  // preparation to avoid preparing with the same AST id twice.
792  if (!context()->IsTest() || !info_->IsOptimizable()) return;
793 
794  Label skip;
795  if (should_normalize) __ Branch(&skip);
796  PrepareForBailout(expr, TOS_REG);
797  if (should_normalize) {
798  __ LoadRoot(t0, Heap::kTrueValueRootIndex);
799  Split(eq, a0, Operand(t0), if_true, if_false, NULL);
800  __ bind(&skip);
801  }
802 }
803 
804 
805 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
806  // The variable in the declaration always resides in the current function
807  // context.
808  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
809  if (generate_debug_code_) {
810  // Check that we're not inside a with or catch context.
812  __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
813  __ Check(ne, kDeclarationInWithContext,
814  a1, Operand(t0));
815  __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
816  __ Check(ne, kDeclarationInCatchContext,
817  a1, Operand(t0));
818  }
819 }
820 
821 
822 void FullCodeGenerator::VisitVariableDeclaration(
823  VariableDeclaration* declaration) {
824  // If it was not possible to allocate the variable at compile time, we
825  // need to "declare" it at runtime to make sure it actually exists in the
826  // local context.
827  VariableProxy* proxy = declaration->proxy();
828  VariableMode mode = declaration->mode();
829  Variable* variable = proxy->var();
830  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
831  switch (variable->location()) {
833  globals_->Add(variable->name(), zone());
834  globals_->Add(variable->binding_needs_init()
835  ? isolate()->factory()->the_hole_value()
836  : isolate()->factory()->undefined_value(),
837  zone());
838  break;
839 
840  case Variable::PARAMETER:
841  case Variable::LOCAL:
842  if (hole_init) {
843  Comment cmnt(masm_, "[ VariableDeclaration");
844  __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
845  __ sw(t0, StackOperand(variable));
846  }
847  break;
848 
849  case Variable::CONTEXT:
850  if (hole_init) {
851  Comment cmnt(masm_, "[ VariableDeclaration");
852  EmitDebugCheckDeclarationContext(variable);
853  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
854  __ sw(at, ContextOperand(cp, variable->index()));
855  // No write barrier since the_hole_value is in old space.
856  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
857  }
858  break;
859 
860  case Variable::LOOKUP: {
861  Comment cmnt(masm_, "[ VariableDeclaration");
862  __ li(a2, Operand(variable->name()));
863  // Declaration nodes are always introduced in one of four modes.
865  PropertyAttributes attr =
867  __ li(a1, Operand(Smi::FromInt(attr)));
868  // Push initial value, if any.
869  // Note: For variables we must not push an initial value (such as
870  // 'undefined') because we may have a (legal) redeclaration and we
871  // must not destroy the current value.
872  if (hole_init) {
873  __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
874  __ Push(cp, a2, a1, a0);
875  } else {
876  ASSERT(Smi::FromInt(0) == 0);
877  __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
878  __ Push(cp, a2, a1, a0);
879  }
880  __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
881  break;
882  }
883  }
884 }
885 
886 
887 void FullCodeGenerator::VisitFunctionDeclaration(
888  FunctionDeclaration* declaration) {
889  VariableProxy* proxy = declaration->proxy();
890  Variable* variable = proxy->var();
891  switch (variable->location()) {
892  case Variable::UNALLOCATED: {
893  globals_->Add(variable->name(), zone());
894  Handle<SharedFunctionInfo> function =
895  Compiler::BuildFunctionInfo(declaration->fun(), script());
896  // Check for stack-overflow exception.
897  if (function.is_null()) return SetStackOverflow();
898  globals_->Add(function, zone());
899  break;
900  }
901 
902  case Variable::PARAMETER:
903  case Variable::LOCAL: {
904  Comment cmnt(masm_, "[ FunctionDeclaration");
905  VisitForAccumulatorValue(declaration->fun());
906  __ sw(result_register(), StackOperand(variable));
907  break;
908  }
909 
910  case Variable::CONTEXT: {
911  Comment cmnt(masm_, "[ FunctionDeclaration");
912  EmitDebugCheckDeclarationContext(variable);
913  VisitForAccumulatorValue(declaration->fun());
914  __ sw(result_register(), ContextOperand(cp, variable->index()));
915  int offset = Context::SlotOffset(variable->index());
916  // We know that we have written a function, which is not a smi.
917  __ RecordWriteContextSlot(cp,
918  offset,
919  result_register(),
920  a2,
925  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
926  break;
927  }
928 
929  case Variable::LOOKUP: {
930  Comment cmnt(masm_, "[ FunctionDeclaration");
931  __ li(a2, Operand(variable->name()));
932  __ li(a1, Operand(Smi::FromInt(NONE)));
933  __ Push(cp, a2, a1);
934  // Push initial value for function declaration.
935  VisitForStackValue(declaration->fun());
936  __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
937  break;
938  }
939  }
940 }
941 
942 
943 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
944  Variable* variable = declaration->proxy()->var();
945  ASSERT(variable->location() == Variable::CONTEXT);
946  ASSERT(variable->interface()->IsFrozen());
947 
948  Comment cmnt(masm_, "[ ModuleDeclaration");
949  EmitDebugCheckDeclarationContext(variable);
950 
951  // Load instance object.
952  __ LoadContext(a1, scope_->ContextChainLength(scope_->GlobalScope()));
953  __ lw(a1, ContextOperand(a1, variable->interface()->Index()));
955 
956  // Assign it.
957  __ sw(a1, ContextOperand(cp, variable->index()));
958  // We know that we have written a module, which is not a smi.
959  __ RecordWriteContextSlot(cp,
960  Context::SlotOffset(variable->index()),
961  a1,
962  a3,
967  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
968 
969  // Traverse into body.
970  Visit(declaration->module());
971 }
972 
973 
974 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
975  VariableProxy* proxy = declaration->proxy();
976  Variable* variable = proxy->var();
977  switch (variable->location()) {
979  // TODO(rossberg)
980  break;
981 
982  case Variable::CONTEXT: {
983  Comment cmnt(masm_, "[ ImportDeclaration");
984  EmitDebugCheckDeclarationContext(variable);
985  // TODO(rossberg)
986  break;
987  }
988 
989  case Variable::PARAMETER:
990  case Variable::LOCAL:
991  case Variable::LOOKUP:
992  UNREACHABLE();
993  }
994 }
995 
996 
997 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
998  // TODO(rossberg)
999 }
1000 
1001 
1002 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
1003  // Call the runtime to declare the globals.
1004  // The context is the first argument.
1005  __ li(a1, Operand(pairs));
1006  __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
1007  __ Push(cp, a1, a0);
1008  __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
1009  // Return value is ignored.
1010 }
1011 
1012 
1013 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1014  // Call the runtime to declare the modules.
1015  __ Push(descriptions);
1016  __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
1017  // Return value is ignored.
1018 }
1019 
1020 
1021 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1022  Comment cmnt(masm_, "[ SwitchStatement");
1023  Breakable nested_statement(this, stmt);
1024  SetStatementPosition(stmt);
1025 
1026  // Keep the switch value on the stack until a case matches.
1027  VisitForStackValue(stmt->tag());
1028  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1029 
1030  ZoneList<CaseClause*>* clauses = stmt->cases();
1031  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1032 
1033  Label next_test; // Recycled for each test.
1034  // Compile all the tests with branches to their bodies.
1035  for (int i = 0; i < clauses->length(); i++) {
1036  CaseClause* clause = clauses->at(i);
1037  clause->body_target()->Unuse();
1038 
1039  // The default is not a test, but remember it as final fall through.
1040  if (clause->is_default()) {
1041  default_clause = clause;
1042  continue;
1043  }
1044 
1045  Comment cmnt(masm_, "[ Case comparison");
1046  __ bind(&next_test);
1047  next_test.Unuse();
1048 
1049  // Compile the label expression.
1050  VisitForAccumulatorValue(clause->label());
1051  __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
1052 
1053  // Perform the comparison as if via '==='.
1054  __ lw(a1, MemOperand(sp, 0)); // Switch value.
1055  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1056  JumpPatchSite patch_site(masm_);
1057  if (inline_smi_code) {
1058  Label slow_case;
1059  __ or_(a2, a1, a0);
1060  patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1061 
1062  __ Branch(&next_test, ne, a1, Operand(a0));
1063  __ Drop(1); // Switch value is no longer needed.
1064  __ Branch(clause->body_target());
1065 
1066  __ bind(&slow_case);
1067  }
1068 
1069  // Record position before stub call for type feedback.
1070  SetSourcePosition(clause->position());
1071  Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1072  CallIC(ic, clause->CompareId());
1073  patch_site.EmitPatchInfo();
1074 
1075  Label skip;
1076  __ Branch(&skip);
1077  PrepareForBailout(clause, TOS_REG);
1078  __ LoadRoot(at, Heap::kTrueValueRootIndex);
1079  __ Branch(&next_test, ne, v0, Operand(at));
1080  __ Drop(1);
1081  __ Branch(clause->body_target());
1082  __ bind(&skip);
1083 
1084  __ Branch(&next_test, ne, v0, Operand(zero_reg));
1085  __ Drop(1); // Switch value is no longer needed.
1086  __ Branch(clause->body_target());
1087  }
1088 
1089  // Discard the test value and jump to the default if present, otherwise to
1090  // the end of the statement.
1091  __ bind(&next_test);
1092  __ Drop(1); // Switch value is no longer needed.
1093  if (default_clause == NULL) {
1094  __ Branch(nested_statement.break_label());
1095  } else {
1096  __ Branch(default_clause->body_target());
1097  }
1098 
1099  // Compile all the case bodies.
1100  for (int i = 0; i < clauses->length(); i++) {
1101  Comment cmnt(masm_, "[ Case body");
1102  CaseClause* clause = clauses->at(i);
1103  __ bind(clause->body_target());
1104  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1105  VisitStatements(clause->statements());
1106  }
1107 
1108  __ bind(nested_statement.break_label());
1109  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1110 }
1111 
1112 
1113 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1114  Comment cmnt(masm_, "[ ForInStatement");
1115  int slot = stmt->ForInFeedbackSlot();
1116  SetStatementPosition(stmt);
1117 
1118  Label loop, exit;
1119  ForIn loop_statement(this, stmt);
1120  increment_loop_depth();
1121 
1122  // Get the object to enumerate over. If the object is null or undefined, skip
1123  // over the loop. See ECMA-262 version 5, section 12.6.4.
1124  VisitForAccumulatorValue(stmt->enumerable());
1125  __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1126  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1127  __ Branch(&exit, eq, a0, Operand(at));
1128  Register null_value = t1;
1129  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1130  __ Branch(&exit, eq, a0, Operand(null_value));
1131  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1132  __ mov(a0, v0);
1133  // Convert the object to a JS object.
1134  Label convert, done_convert;
1135  __ JumpIfSmi(a0, &convert);
1136  __ GetObjectType(a0, a1, a1);
1137  __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1138  __ bind(&convert);
1139  __ push(a0);
1140  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1141  __ mov(a0, v0);
1142  __ bind(&done_convert);
1143  __ push(a0);
1144 
1145  // Check for proxies.
1146  Label call_runtime;
1148  __ GetObjectType(a0, a1, a1);
1149  __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1150 
1151  // Check cache validity in generated code. This is a fast case for
1152  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1153  // guarantee cache validity, call the runtime system to check cache
1154  // validity or get the property names in a fixed array.
1155  __ CheckEnumCache(null_value, &call_runtime);
1156 
1157  // The enum cache is valid. Load the map of the object being
1158  // iterated over and use the cache for the iteration.
1159  Label use_cache;
1161  __ Branch(&use_cache);
1162 
1163  // Get the set of properties to enumerate.
1164  __ bind(&call_runtime);
1165  __ push(a0); // Duplicate the enumerable object on the stack.
1166  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1167 
1168  // If we got a map from the runtime call, we can do a fast
1169  // modification check. Otherwise, we got a fixed array, and we have
1170  // to do a slow check.
1171  Label fixed_array;
1173  __ LoadRoot(at, Heap::kMetaMapRootIndex);
1174  __ Branch(&fixed_array, ne, a2, Operand(at));
1175 
1176  // We got a map in register v0. Get the enumeration cache from it.
1177  Label no_descriptors;
1178  __ bind(&use_cache);
1179 
1180  __ EnumLength(a1, v0);
1181  __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1182 
1183  __ LoadInstanceDescriptors(v0, a2);
1186 
1187  // Set up the four remaining stack slots.
1188  __ li(a0, Operand(Smi::FromInt(0)));
1189  // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1190  __ Push(v0, a2, a1, a0);
1191  __ jmp(&loop);
1192 
1193  __ bind(&no_descriptors);
1194  __ Drop(1);
1195  __ jmp(&exit);
1196 
1197  // We got a fixed array in register v0. Iterate through that.
1198  Label non_proxy;
1199  __ bind(&fixed_array);
1200 
1201  Handle<Object> feedback = Handle<Object>(
1203  isolate());
1204  StoreFeedbackVectorSlot(slot, feedback);
1205  __ li(a1, FeedbackVector());
1208 
1209  __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1210  __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1212  __ GetObjectType(a2, a3, a3);
1213  __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1214  __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1215  __ bind(&non_proxy);
1216  __ Push(a1, v0); // Smi and array
1218  __ li(a0, Operand(Smi::FromInt(0)));
1219  __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1220 
1221  // Generate code for doing the condition check.
1222  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1223  __ bind(&loop);
1224  // Load the current count to a0, load the length to a1.
1225  __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1226  __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1227  __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1228 
1229  // Get the current entry of the array into register a3.
1230  __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1231  __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1232  __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
1233  __ addu(t0, a2, t0); // Array base + scaled (smi) index.
1234  __ lw(a3, MemOperand(t0)); // Current entry.
1235 
1236  // Get the expected map from the stack or a smi in the
1237  // permanent slow case into register a2.
1238  __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1239 
1240  // Check if the expected map still matches that of the enumerable.
1241  // If not, we may have to filter the key.
1242  Label update_each;
1243  __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1245  __ Branch(&update_each, eq, t0, Operand(a2));
1246 
1247  // For proxies, no filtering is done.
1248  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1249  ASSERT_EQ(Smi::FromInt(0), 0);
1250  __ Branch(&update_each, eq, a2, Operand(zero_reg));
1251 
1252  // Convert the entry to a string or (smi) 0 if it isn't a property
1253  // any more. If the property has been removed while iterating, we
1254  // just skip it.
1255  __ Push(a1, a3); // Enumerable and current entry.
1256  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1257  __ mov(a3, result_register());
1258  __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1259 
1260  // Update the 'each' property or variable from the possibly filtered
1261  // entry in register a3.
1262  __ bind(&update_each);
1263  __ mov(result_register(), a3);
1264  // Perform the assignment as if via '='.
1265  { EffectContext context(this);
1266  EmitAssignment(stmt->each());
1267  }
1268 
1269  // Generate code for the body of the loop.
1270  Visit(stmt->body());
1271 
1272  // Generate code for the going to the next element by incrementing
1273  // the index (smi) stored on top of the stack.
1274  __ bind(loop_statement.continue_label());
1275  __ pop(a0);
1276  __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1277  __ push(a0);
1278 
1279  EmitBackEdgeBookkeeping(stmt, &loop);
1280  __ Branch(&loop);
1281 
1282  // Remove the pointers stored on the stack.
1283  __ bind(loop_statement.break_label());
1284  __ Drop(5);
1285 
1286  // Exit and decrement the loop depth.
1287  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1288  __ bind(&exit);
1289  decrement_loop_depth();
1290 }
1291 
1292 
1293 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1294  Comment cmnt(masm_, "[ ForOfStatement");
1295  SetStatementPosition(stmt);
1296 
1297  Iteration loop_statement(this, stmt);
1298  increment_loop_depth();
1299 
1300  // var iterator = iterable[@@iterator]()
1301  VisitForAccumulatorValue(stmt->assign_iterator());
1302  __ mov(a0, v0);
1303 
1304  // As with for-in, skip the loop if the iterator is null or undefined.
1305  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1306  __ Branch(loop_statement.break_label(), eq, a0, Operand(at));
1307  __ LoadRoot(at, Heap::kNullValueRootIndex);
1308  __ Branch(loop_statement.break_label(), eq, a0, Operand(at));
1309 
1310  // Convert the iterator to a JS object.
1311  Label convert, done_convert;
1312  __ JumpIfSmi(a0, &convert);
1313  __ GetObjectType(a0, a1, a1);
1314  __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1315  __ bind(&convert);
1316  __ push(a0);
1317  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1318  __ mov(a0, v0);
1319  __ bind(&done_convert);
1320  __ push(a0);
1321 
1322  // Loop entry.
1323  __ bind(loop_statement.continue_label());
1324 
1325  // result = iterator.next()
1326  VisitForEffect(stmt->next_result());
1327 
1328  // if (result.done) break;
1329  Label result_not_done;
1330  VisitForControl(stmt->result_done(),
1331  loop_statement.break_label(),
1332  &result_not_done,
1333  &result_not_done);
1334  __ bind(&result_not_done);
1335 
1336  // each = result.value
1337  VisitForEffect(stmt->assign_each());
1338 
1339  // Generate code for the body of the loop.
1340  Visit(stmt->body());
1341 
1342  // Check stack before looping.
1343  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1344  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1345  __ jmp(loop_statement.continue_label());
1346 
1347  // Exit and decrement the loop depth.
1348  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1349  __ bind(loop_statement.break_label());
1350  decrement_loop_depth();
1351 }
1352 
1353 
1354 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1355  bool pretenure) {
1356  // Use the fast case closure allocation code that allocates in new
1357  // space for nested functions that don't need literals cloning. If
1358  // we're running with the --always-opt or the --prepare-always-opt
1359  // flag, we need to use the runtime function so that the new function
1360  // we are creating here gets a chance to have its code optimized and
1361  // doesn't just get a copy of the existing unoptimized code.
1362  if (!FLAG_always_opt &&
1363  !FLAG_prepare_always_opt &&
1364  !pretenure &&
1365  scope()->is_function_scope() &&
1366  info->num_literals() == 0) {
1367  FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1368  __ li(a2, Operand(info));
1369  __ CallStub(&stub);
1370  } else {
1371  __ li(a0, Operand(info));
1372  __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1373  : Heap::kFalseValueRootIndex);
1374  __ Push(cp, a0, a1);
1375  __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1376  }
1377  context()->Plug(v0);
1378 }
1379 
1380 
1381 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1382  Comment cmnt(masm_, "[ VariableProxy");
1383  EmitVariableLoad(expr);
1384 }
1385 
1386 
1387 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1388  TypeofState typeof_state,
1389  Label* slow) {
1390  Register current = cp;
1391  Register next = a1;
1392  Register temp = a2;
1393 
1394  Scope* s = scope();
1395  while (s != NULL) {
1396  if (s->num_heap_slots() > 0) {
1397  if (s->calls_sloppy_eval()) {
1398  // Check that extension is NULL.
1399  __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1400  __ Branch(slow, ne, temp, Operand(zero_reg));
1401  }
1402  // Load next context in chain.
1403  __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1404  // Walk the rest of the chain without clobbering cp.
1405  current = next;
1406  }
1407  // If no outer scope calls eval, we do not need to check more
1408  // context extensions.
1409  if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1410  s = s->outer_scope();
1411  }
1412 
1413  if (s->is_eval_scope()) {
1414  Label loop, fast;
1415  if (!current.is(next)) {
1416  __ Move(next, current);
1417  }
1418  __ bind(&loop);
1419  // Terminate at native context.
1420  __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1421  __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
1422  __ Branch(&fast, eq, temp, Operand(t0));
1423  // Check that extension is NULL.
1424  __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1425  __ Branch(slow, ne, temp, Operand(zero_reg));
1426  // Load next context in chain.
1427  __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1428  __ Branch(&loop);
1429  __ bind(&fast);
1430  }
1431 
1432  __ lw(a0, GlobalObjectOperand());
1433  __ li(a2, Operand(var->name()));
1434  ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1435  ? NOT_CONTEXTUAL
1436  : CONTEXTUAL;
1437  CallLoadIC(mode);
1438 }
1439 
1440 
1441 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1442  Label* slow) {
1443  ASSERT(var->IsContextSlot());
1444  Register context = cp;
1445  Register next = a3;
1446  Register temp = t0;
1447 
1448  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1449  if (s->num_heap_slots() > 0) {
1450  if (s->calls_sloppy_eval()) {
1451  // Check that extension is NULL.
1452  __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1453  __ Branch(slow, ne, temp, Operand(zero_reg));
1454  }
1455  __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1456  // Walk the rest of the chain without clobbering cp.
1457  context = next;
1458  }
1459  }
1460  // Check that last extension is NULL.
1461  __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1462  __ Branch(slow, ne, temp, Operand(zero_reg));
1463 
1464  // This function is used only for loads, not stores, so it's safe to
1465  // return an cp-based operand (the write barrier cannot be allowed to
1466  // destroy the cp register).
1467  return ContextOperand(context, var->index());
1468 }
1469 
1470 
1471 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1472  TypeofState typeof_state,
1473  Label* slow,
1474  Label* done) {
1475  // Generate fast-case code for variables that might be shadowed by
1476  // eval-introduced variables. Eval is used a lot without
1477  // introducing variables. In those cases, we do not want to
1478  // perform a runtime call for all variables in the scope
1479  // containing the eval.
1480  if (var->mode() == DYNAMIC_GLOBAL) {
1481  EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1482  __ Branch(done);
1483  } else if (var->mode() == DYNAMIC_LOCAL) {
1484  Variable* local = var->local_if_not_shadowed();
1485  __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1486  if (local->mode() == LET || local->mode() == CONST ||
1487  local->mode() == CONST_LEGACY) {
1488  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1489  __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1490  if (local->mode() == CONST_LEGACY) {
1491  __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1492  __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1493  } else { // LET || CONST
1494  __ Branch(done, ne, at, Operand(zero_reg));
1495  __ li(a0, Operand(var->name()));
1496  __ push(a0);
1497  __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1498  }
1499  }
1500  __ Branch(done);
1501  }
1502 }
1503 
1504 
1505 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1506  // Record position before possible IC call.
1507  SetSourcePosition(proxy->position());
1508  Variable* var = proxy->var();
1509 
1510  // Three cases: global variables, lookup variables, and all other types of
1511  // variables.
1512  switch (var->location()) {
1513  case Variable::UNALLOCATED: {
1514  Comment cmnt(masm_, "[ Global variable");
1515  // Use inline caching. Variable name is passed in a2 and the global
1516  // object (receiver) in a0.
1517  __ lw(a0, GlobalObjectOperand());
1518  __ li(a2, Operand(var->name()));
1519  CallLoadIC(CONTEXTUAL);
1520  context()->Plug(v0);
1521  break;
1522  }
1523 
1524  case Variable::PARAMETER:
1525  case Variable::LOCAL:
1526  case Variable::CONTEXT: {
1527  Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1528  : "[ Stack variable");
1529  if (var->binding_needs_init()) {
1530  // var->scope() may be NULL when the proxy is located in eval code and
1531  // refers to a potential outside binding. Currently those bindings are
1532  // always looked up dynamically, i.e. in that case
1533  // var->location() == LOOKUP.
1534  // always holds.
1535  ASSERT(var->scope() != NULL);
1536 
1537  // Check if the binding really needs an initialization check. The check
1538  // can be skipped in the following situation: we have a LET or CONST
1539  // binding in harmony mode, both the Variable and the VariableProxy have
1540  // the same declaration scope (i.e. they are both in global code, in the
1541  // same function or in the same eval code) and the VariableProxy is in
1542  // the source physically located after the initializer of the variable.
1543  //
1544  // We cannot skip any initialization checks for CONST in non-harmony
1545  // mode because const variables may be declared but never initialized:
1546  // if (false) { const x; }; var y = x;
1547  //
1548  // The condition on the declaration scopes is a conservative check for
1549  // nested functions that access a binding and are called before the
1550  // binding is initialized:
1551  // function() { f(); let x = 1; function f() { x = 2; } }
1552  //
1553  bool skip_init_check;
1554  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1555  skip_init_check = false;
1556  } else {
1557  // Check that we always have valid source position.
1558  ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1559  ASSERT(proxy->position() != RelocInfo::kNoPosition);
1560  skip_init_check = var->mode() != CONST_LEGACY &&
1561  var->initializer_position() < proxy->position();
1562  }
1563 
1564  if (!skip_init_check) {
1565  // Let and const need a read barrier.
1566  GetVar(v0, var);
1567  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1568  __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1569  if (var->mode() == LET || var->mode() == CONST) {
1570  // Throw a reference error when using an uninitialized let/const
1571  // binding in harmony mode.
1572  Label done;
1573  __ Branch(&done, ne, at, Operand(zero_reg));
1574  __ li(a0, Operand(var->name()));
1575  __ push(a0);
1576  __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1577  __ bind(&done);
1578  } else {
1579  // Uninitalized const bindings outside of harmony mode are unholed.
1580  ASSERT(var->mode() == CONST_LEGACY);
1581  __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1582  __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1583  }
1584  context()->Plug(v0);
1585  break;
1586  }
1587  }
1588  context()->Plug(var);
1589  break;
1590  }
1591 
1592  case Variable::LOOKUP: {
1593  Comment cmnt(masm_, "[ Lookup variable");
1594  Label done, slow;
1595  // Generate code for loading from variables potentially shadowed
1596  // by eval-introduced variables.
1597  EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1598  __ bind(&slow);
1599  __ li(a1, Operand(var->name()));
1600  __ Push(cp, a1); // Context and name.
1601  __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1602  __ bind(&done);
1603  context()->Plug(v0);
1604  }
1605  }
1606 }
1607 
1608 
1609 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1610  Comment cmnt(masm_, "[ RegExpLiteral");
1611  Label materialized;
1612  // Registers will be used as follows:
1613  // t1 = materialized value (RegExp literal)
1614  // t0 = JS function, literals array
1615  // a3 = literal index
1616  // a2 = RegExp pattern
1617  // a1 = RegExp flags
1618  // a0 = RegExp literal clone
1621  int literal_offset =
1622  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1623  __ lw(t1, FieldMemOperand(t0, literal_offset));
1624  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1625  __ Branch(&materialized, ne, t1, Operand(at));
1626 
1627  // Create regexp literal using runtime function.
1628  // Result will be in v0.
1629  __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1630  __ li(a2, Operand(expr->pattern()));
1631  __ li(a1, Operand(expr->flags()));
1632  __ Push(t0, a3, a2, a1);
1633  __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1634  __ mov(t1, v0);
1635 
1636  __ bind(&materialized);
1638  Label allocated, runtime_allocate;
1639  __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1640  __ jmp(&allocated);
1641 
1642  __ bind(&runtime_allocate);
1643  __ li(a0, Operand(Smi::FromInt(size)));
1644  __ Push(t1, a0);
1645  __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1646  __ pop(t1);
1647 
1648  __ bind(&allocated);
1649 
1650  // After this, registers are used as follows:
1651  // v0: Newly allocated regexp.
1652  // t1: Materialized regexp.
1653  // a2: temp.
1654  __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
1655  context()->Plug(v0);
1656 }
1657 
1658 
1659 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1660  if (expression == NULL) {
1661  __ LoadRoot(a1, Heap::kNullValueRootIndex);
1662  __ push(a1);
1663  } else {
1664  VisitForStackValue(expression);
1665  }
1666 }
1667 
1668 
1669 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1670  Comment cmnt(masm_, "[ ObjectLiteral");
1671 
1672  expr->BuildConstantProperties(isolate());
1673  Handle<FixedArray> constant_properties = expr->constant_properties();
1676  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1677  __ li(a1, Operand(constant_properties));
1678  int flags = expr->fast_elements()
1679  ? ObjectLiteral::kFastElements
1680  : ObjectLiteral::kNoFlags;
1681  flags |= expr->has_function()
1682  ? ObjectLiteral::kHasFunction
1683  : ObjectLiteral::kNoFlags;
1684  __ li(a0, Operand(Smi::FromInt(flags)));
1685  int properties_count = constant_properties->length() / 2;
1686  if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() ||
1687  flags != ObjectLiteral::kFastElements ||
1689  __ Push(a3, a2, a1, a0);
1690  __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1691  } else {
1692  FastCloneShallowObjectStub stub(properties_count);
1693  __ CallStub(&stub);
1694  }
1695 
1696  // If result_saved is true the result is on top of the stack. If
1697  // result_saved is false the result is in v0.
1698  bool result_saved = false;
1699 
1700  // Mark all computed expressions that are bound to a key that
1701  // is shadowed by a later occurrence of the same key. For the
1702  // marked expressions, no store code is emitted.
1703  expr->CalculateEmitStore(zone());
1704 
1705  AccessorTable accessor_table(zone());
1706  for (int i = 0; i < expr->properties()->length(); i++) {
1707  ObjectLiteral::Property* property = expr->properties()->at(i);
1708  if (property->IsCompileTimeValue()) continue;
1709 
1710  Literal* key = property->key();
1711  Expression* value = property->value();
1712  if (!result_saved) {
1713  __ push(v0); // Save result on stack.
1714  result_saved = true;
1715  }
1716  switch (property->kind()) {
1718  UNREACHABLE();
1719  case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1720  ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1721  // Fall through.
1722  case ObjectLiteral::Property::COMPUTED:
1723  if (key->value()->IsInternalizedString()) {
1724  if (property->emit_store()) {
1725  VisitForAccumulatorValue(value);
1726  __ mov(a0, result_register());
1727  __ li(a2, Operand(key->value()));
1728  __ lw(a1, MemOperand(sp));
1729  CallStoreIC(key->LiteralFeedbackId());
1730  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1731  } else {
1732  VisitForEffect(value);
1733  }
1734  break;
1735  }
1736  // Duplicate receiver on stack.
1737  __ lw(a0, MemOperand(sp));
1738  __ push(a0);
1739  VisitForStackValue(key);
1740  VisitForStackValue(value);
1741  if (property->emit_store()) {
1742  __ li(a0, Operand(Smi::FromInt(NONE))); // PropertyAttributes.
1743  __ push(a0);
1744  __ CallRuntime(Runtime::kSetProperty, 4);
1745  } else {
1746  __ Drop(3);
1747  }
1748  break;
1749  case ObjectLiteral::Property::PROTOTYPE:
1750  // Duplicate receiver on stack.
1751  __ lw(a0, MemOperand(sp));
1752  __ push(a0);
1753  VisitForStackValue(value);
1754  if (property->emit_store()) {
1755  __ CallRuntime(Runtime::kSetPrototype, 2);
1756  } else {
1757  __ Drop(2);
1758  }
1759  break;
1760  case ObjectLiteral::Property::GETTER:
1761  accessor_table.lookup(key)->second->getter = value;
1762  break;
1763  case ObjectLiteral::Property::SETTER:
1764  accessor_table.lookup(key)->second->setter = value;
1765  break;
1766  }
1767  }
1768 
1769  // Emit code to define accessors, using only a single call to the runtime for
1770  // each pair of corresponding getters and setters.
1771  for (AccessorTable::Iterator it = accessor_table.begin();
1772  it != accessor_table.end();
1773  ++it) {
1774  __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1775  __ push(a0);
1776  VisitForStackValue(it->first);
1777  EmitAccessor(it->second->getter);
1778  EmitAccessor(it->second->setter);
1779  __ li(a0, Operand(Smi::FromInt(NONE)));
1780  __ push(a0);
1781  __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1782  }
1783 
1784  if (expr->has_function()) {
1785  ASSERT(result_saved);
1786  __ lw(a0, MemOperand(sp));
1787  __ push(a0);
1788  __ CallRuntime(Runtime::kToFastProperties, 1);
1789  }
1790 
1791  if (result_saved) {
1792  context()->PlugTOS();
1793  } else {
1794  context()->Plug(v0);
1795  }
1796 }
1797 
1798 
1799 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1800  Comment cmnt(masm_, "[ ArrayLiteral");
1801 
1802  expr->BuildConstantElements(isolate());
1803  int flags = expr->depth() == 1
1804  ? ArrayLiteral::kShallowElements
1805  : ArrayLiteral::kNoFlags;
1806 
1807  ZoneList<Expression*>* subexprs = expr->values();
1808  int length = subexprs->length();
1809 
1810  Handle<FixedArray> constant_elements = expr->constant_elements();
1811  ASSERT_EQ(2, constant_elements->length());
1812  ElementsKind constant_elements_kind =
1813  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1814  bool has_fast_elements =
1815  IsFastObjectElementsKind(constant_elements_kind);
1816  Handle<FixedArrayBase> constant_elements_values(
1817  FixedArrayBase::cast(constant_elements->get(1)));
1818 
1819  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1820  if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1821  // If the only customer of allocation sites is transitioning, then
1822  // we can turn it off if we don't have anywhere else to transition to.
1823  allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1824  }
1825 
1826  __ mov(a0, result_register());
1829  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1830  __ li(a1, Operand(constant_elements));
1831  if (has_fast_elements && constant_elements_values->map() ==
1832  isolate()->heap()->fixed_cow_array_map()) {
1833  FastCloneShallowArrayStub stub(
1835  allocation_site_mode,
1836  length);
1837  __ CallStub(&stub);
1838  __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1839  1, a1, a2);
1840  } else if (expr->depth() > 1 || Serializer::enabled() ||
1842  __ li(a0, Operand(Smi::FromInt(flags)));
1843  __ Push(a3, a2, a1, a0);
1844  __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1845  } else {
1846  ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1847  FLAG_smi_only_arrays);
1850 
1851  if (has_fast_elements) {
1853  }
1854 
1855  FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1856  __ CallStub(&stub);
1857  }
1858 
1859  bool result_saved = false; // Is the result saved to the stack?
1860 
1861  // Emit code to evaluate all the non-constant subexpressions and to store
1862  // them into the newly cloned array.
1863  for (int i = 0; i < length; i++) {
1864  Expression* subexpr = subexprs->at(i);
1865  // If the subexpression is a literal or a simple materialized literal it
1866  // is already set in the cloned array.
1867  if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1868 
1869  if (!result_saved) {
1870  __ push(v0); // array literal
1871  __ Push(Smi::FromInt(expr->literal_index()));
1872  result_saved = true;
1873  }
1874 
1875  VisitForAccumulatorValue(subexpr);
1876 
1877  if (IsFastObjectElementsKind(constant_elements_kind)) {
1878  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1879  __ lw(t2, MemOperand(sp, kPointerSize)); // Copy of array literal.
1881  __ sw(result_register(), FieldMemOperand(a1, offset));
1882  // Update the write barrier for the array store.
1883  __ RecordWriteField(a1, offset, result_register(), a2,
1886  } else {
1887  __ li(a3, Operand(Smi::FromInt(i)));
1888  __ mov(a0, result_register());
1889  StoreArrayLiteralElementStub stub;
1890  __ CallStub(&stub);
1891  }
1892 
1893  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1894  }
1895  if (result_saved) {
1896  __ Pop(); // literal index
1897  context()->PlugTOS();
1898  } else {
1899  context()->Plug(v0);
1900  }
1901 }
1902 
1903 
1904 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1905  ASSERT(expr->target()->IsValidLeftHandSide());
1906 
1907  Comment cmnt(masm_, "[ Assignment");
1908 
1909  // Left-hand side can only be a property, a global or a (parameter or local)
1910  // slot.
1911  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1912  LhsKind assign_type = VARIABLE;
1913  Property* property = expr->target()->AsProperty();
1914  if (property != NULL) {
1915  assign_type = (property->key()->IsPropertyName())
1916  ? NAMED_PROPERTY
1917  : KEYED_PROPERTY;
1918  }
1919 
1920  // Evaluate LHS expression.
1921  switch (assign_type) {
1922  case VARIABLE:
1923  // Nothing to do here.
1924  break;
1925  case NAMED_PROPERTY:
1926  if (expr->is_compound()) {
1927  // We need the receiver both on the stack and in the accumulator.
1928  VisitForAccumulatorValue(property->obj());
1929  __ push(result_register());
1930  } else {
1931  VisitForStackValue(property->obj());
1932  }
1933  break;
1934  case KEYED_PROPERTY:
1935  // We need the key and receiver on both the stack and in v0 and a1.
1936  if (expr->is_compound()) {
1937  VisitForStackValue(property->obj());
1938  VisitForAccumulatorValue(property->key());
1939  __ lw(a1, MemOperand(sp, 0));
1940  __ push(v0);
1941  } else {
1942  VisitForStackValue(property->obj());
1943  VisitForStackValue(property->key());
1944  }
1945  break;
1946  }
1947 
1948  // For compound assignments we need another deoptimization point after the
1949  // variable/property load.
1950  if (expr->is_compound()) {
1951  { AccumulatorValueContext context(this);
1952  switch (assign_type) {
1953  case VARIABLE:
1954  EmitVariableLoad(expr->target()->AsVariableProxy());
1955  PrepareForBailout(expr->target(), TOS_REG);
1956  break;
1957  case NAMED_PROPERTY:
1958  EmitNamedPropertyLoad(property);
1959  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1960  break;
1961  case KEYED_PROPERTY:
1962  EmitKeyedPropertyLoad(property);
1963  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1964  break;
1965  }
1966  }
1967 
1968  Token::Value op = expr->binary_op();
1969  __ push(v0); // Left operand goes on the stack.
1970  VisitForAccumulatorValue(expr->value());
1971 
1972  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1973  ? OVERWRITE_RIGHT
1974  : NO_OVERWRITE;
1975  SetSourcePosition(expr->position() + 1);
1976  AccumulatorValueContext context(this);
1977  if (ShouldInlineSmiCase(op)) {
1978  EmitInlineSmiBinaryOp(expr->binary_operation(),
1979  op,
1980  mode,
1981  expr->target(),
1982  expr->value());
1983  } else {
1984  EmitBinaryOp(expr->binary_operation(), op, mode);
1985  }
1986 
1987  // Deoptimization point in case the binary operation may have side effects.
1988  PrepareForBailout(expr->binary_operation(), TOS_REG);
1989  } else {
1990  VisitForAccumulatorValue(expr->value());
1991  }
1992 
1993  // Record source position before possible IC call.
1994  SetSourcePosition(expr->position());
1995 
1996  // Store the value.
1997  switch (assign_type) {
1998  case VARIABLE:
1999  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2000  expr->op());
2001  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2002  context()->Plug(v0);
2003  break;
2004  case NAMED_PROPERTY:
2005  EmitNamedPropertyAssignment(expr);
2006  break;
2007  case KEYED_PROPERTY:
2008  EmitKeyedPropertyAssignment(expr);
2009  break;
2010  }
2011 }
2012 
2013 
2014 void FullCodeGenerator::VisitYield(Yield* expr) {
2015  Comment cmnt(masm_, "[ Yield");
2016  // Evaluate yielded value first; the initial iterator definition depends on
2017  // this. It stays on the stack while we update the iterator.
2018  VisitForStackValue(expr->expression());
2019 
2020  switch (expr->yield_kind()) {
2021  case Yield::SUSPEND:
2022  // Pop value from top-of-stack slot; box result into result register.
2023  EmitCreateIteratorResult(false);
2024  __ push(result_register());
2025  // Fall through.
2026  case Yield::INITIAL: {
2027  Label suspend, continuation, post_runtime, resume;
2028 
2029  __ jmp(&suspend);
2030 
2031  __ bind(&continuation);
2032  __ jmp(&resume);
2033 
2034  __ bind(&suspend);
2035  VisitForAccumulatorValue(expr->generator_object());
2036  ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2037  __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2040  __ mov(a1, cp);
2041  __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2043  __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2044  __ Branch(&post_runtime, eq, sp, Operand(a1));
2045  __ push(v0); // generator object
2046  __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2048  __ bind(&post_runtime);
2049  __ pop(result_register());
2050  EmitReturnSequence();
2051 
2052  __ bind(&resume);
2053  context()->Plug(result_register());
2054  break;
2055  }
2056 
2057  case Yield::FINAL: {
2058  VisitForAccumulatorValue(expr->generator_object());
2060  __ sw(a1, FieldMemOperand(result_register(),
2062  // Pop value from top-of-stack slot, box result into result register.
2063  EmitCreateIteratorResult(true);
2064  EmitUnwindBeforeReturn();
2065  EmitReturnSequence();
2066  break;
2067  }
2068 
2069  case Yield::DELEGATING: {
2070  VisitForStackValue(expr->generator_object());
2071 
2072  // Initial stack layout is as follows:
2073  // [sp + 1 * kPointerSize] iter
2074  // [sp + 0 * kPointerSize] g
2075 
2076  Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2077  Label l_next, l_call, l_loop;
2078  // Initial send value is undefined.
2079  __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2080  __ Branch(&l_next);
2081 
2082  // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2083  __ bind(&l_catch);
2084  __ mov(a0, v0);
2085  handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2086  __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw"
2087  __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2088  __ Push(a2, a3, a0); // "throw", iter, except
2089  __ jmp(&l_call);
2090 
2091  // try { received = %yield result }
2092  // Shuffle the received result above a try handler and yield it without
2093  // re-boxing.
2094  __ bind(&l_try);
2095  __ pop(a0); // result
2096  __ PushTryHandler(StackHandler::CATCH, expr->index());
2097  const int handler_size = StackHandlerConstants::kSize;
2098  __ push(a0); // result
2099  __ jmp(&l_suspend);
2100  __ bind(&l_continuation);
2101  __ mov(a0, v0);
2102  __ jmp(&l_resume);
2103  __ bind(&l_suspend);
2104  const int generator_object_depth = kPointerSize + handler_size;
2105  __ lw(a0, MemOperand(sp, generator_object_depth));
2106  __ push(a0); // g
2107  ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2108  __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2111  __ mov(a1, cp);
2112  __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2114  __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2116  __ pop(v0); // result
2117  EmitReturnSequence();
2118  __ mov(a0, v0);
2119  __ bind(&l_resume); // received in a0
2120  __ PopTryHandler();
2121 
2122  // receiver = iter; f = 'next'; arg = received;
2123  __ bind(&l_next);
2124  __ LoadRoot(a2, Heap::knext_stringRootIndex); // "next"
2125  __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2126  __ Push(a2, a3, a0); // "next", iter, received
2127 
2128  // result = receiver[f](arg);
2129  __ bind(&l_call);
2130  __ lw(a1, MemOperand(sp, kPointerSize));
2131  __ lw(a0, MemOperand(sp, 2 * kPointerSize));
2132  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2133  CallIC(ic, TypeFeedbackId::None());
2134  __ mov(a0, v0);
2135  __ mov(a1, a0);
2136  __ sw(a1, MemOperand(sp, 2 * kPointerSize));
2137  CallFunctionStub stub(1, CALL_AS_METHOD);
2138  __ CallStub(&stub);
2139 
2141  __ Drop(1); // The function is still on the stack; drop it.
2142 
2143  // if (!result.done) goto l_try;
2144  __ bind(&l_loop);
2145  __ mov(a0, v0);
2146  __ push(a0); // save result
2147  __ LoadRoot(a2, Heap::kdone_stringRootIndex); // "done"
2148  CallLoadIC(NOT_CONTEXTUAL); // result.done in v0
2149  __ mov(a0, v0);
2150  Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2151  CallIC(bool_ic);
2152  __ Branch(&l_try, eq, v0, Operand(zero_reg));
2153 
2154  // result.value
2155  __ pop(a0); // result
2156  __ LoadRoot(a2, Heap::kvalue_stringRootIndex); // "value"
2157  CallLoadIC(NOT_CONTEXTUAL); // result.value in v0
2158  context()->DropAndPlug(2, v0); // drop iter and g
2159  break;
2160  }
2161  }
2162 }
2163 
2164 
2165 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2166  Expression *value,
2167  JSGeneratorObject::ResumeMode resume_mode) {
2168  // The value stays in a0, and is ultimately read by the resumed generator, as
2169  // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
2170  // is read to throw the value when the resumed generator is already closed.
2171  // a1 will hold the generator object until the activation has been resumed.
2172  VisitForStackValue(generator);
2173  VisitForAccumulatorValue(value);
2174  __ pop(a1);
2175 
2176  // Check generator state.
2177  Label wrong_state, closed_state, done;
2181  __ Branch(&closed_state, eq, a3, Operand(zero_reg));
2182  __ Branch(&wrong_state, lt, a3, Operand(zero_reg));
2183 
2184  // Load suspended function and context.
2187 
2188  // Load receiver and store as the first argument.
2190  __ push(a2);
2191 
2192  // Push holes for the rest of the arguments to the generator function.
2194  __ lw(a3,
2196  __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2197  Label push_argument_holes, push_frame;
2198  __ bind(&push_argument_holes);
2199  __ Subu(a3, a3, Operand(Smi::FromInt(1)));
2200  __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2201  __ push(a2);
2202  __ jmp(&push_argument_holes);
2203 
2204  // Enter a new JavaScript frame, and initialize its slots as they were when
2205  // the generator was suspended.
2206  Label resume_frame;
2207  __ bind(&push_frame);
2208  __ Call(&resume_frame);
2209  __ jmp(&done);
2210  __ bind(&resume_frame);
2211  // ra = return address.
2212  // fp = caller's frame pointer.
2213  // cp = callee's context,
2214  // t0 = callee's JS function.
2215  __ Push(ra, fp, cp, t0);
2216  // Adjust FP to point to saved FP.
2217  __ Addu(fp, sp, 2 * kPointerSize);
2218 
2219  // Load the operand stack size.
2222  __ SmiUntag(a3);
2223 
2224  // If we are sending a value and there is no operand stack, we can jump back
2225  // in directly.
2226  if (resume_mode == JSGeneratorObject::NEXT) {
2227  Label slow_resume;
2228  __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2231  __ SmiUntag(a2);
2232  __ Addu(a3, a3, Operand(a2));
2235  __ Jump(a3);
2236  __ bind(&slow_resume);
2237  }
2238 
2239  // Otherwise, we push holes for the operand stack and call the runtime to fix
2240  // up the stack and the handlers.
2241  Label push_operand_holes, call_resume;
2242  __ bind(&push_operand_holes);
2243  __ Subu(a3, a3, Operand(1));
2244  __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2245  __ push(a2);
2246  __ Branch(&push_operand_holes);
2247  __ bind(&call_resume);
2248  ASSERT(!result_register().is(a1));
2249  __ Push(a1, result_register());
2250  __ Push(Smi::FromInt(resume_mode));
2251  __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
2252  // Not reached: the runtime call returns elsewhere.
2253  __ stop("not-reached");
2254 
2255  // Reach here when generator is closed.
2256  __ bind(&closed_state);
2257  if (resume_mode == JSGeneratorObject::NEXT) {
2258  // Return completed iterator result when generator is closed.
2259  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2260  __ push(a2);
2261  // Pop value from top-of-stack slot; box result into result register.
2262  EmitCreateIteratorResult(true);
2263  } else {
2264  // Throw the provided value.
2265  __ push(a0);
2266  __ CallRuntime(Runtime::kHiddenThrow, 1);
2267  }
2268  __ jmp(&done);
2269 
2270  // Throw error if we attempt to operate on a running generator.
2271  __ bind(&wrong_state);
2272  __ push(a1);
2273  __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
2274 
2275  __ bind(&done);
2276  context()->Plug(result_register());
2277 }
2278 
2279 
2280 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2281  Label gc_required;
2282  Label allocated;
2283 
2284  Handle<Map> map(isolate()->native_context()->generator_result_map());
2285 
2286  __ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT);
2287  __ jmp(&allocated);
2288 
2289  __ bind(&gc_required);
2290  __ Push(Smi::FromInt(map->instance_size()));
2291  __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
2292  __ lw(context_register(),
2294 
2295  __ bind(&allocated);
2296  __ li(a1, Operand(map));
2297  __ pop(a2);
2298  __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2299  __ li(t0, Operand(isolate()->factory()->empty_fixed_array()));
2300  ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2304  __ sw(a2,
2306  __ sw(a3,
2308 
2309  // Only the value field needs a write barrier, as the other values are in the
2310  // root set.
2312  a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2313 }
2314 
2315 
2316 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2317  SetSourcePosition(prop->position());
2318  Literal* key = prop->key()->AsLiteral();
2319  __ mov(a0, result_register());
2320  __ li(a2, Operand(key->value()));
2321  // Call load IC. It has arguments receiver and property name a0 and a2.
2322  CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2323 }
2324 
2325 
2326 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2327  SetSourcePosition(prop->position());
2328  __ mov(a0, result_register());
2329  // Call keyed load IC. It has arguments key and receiver in a0 and a1.
2330  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2331  CallIC(ic, prop->PropertyFeedbackId());
2332 }
2333 
2334 
2335 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2336  Token::Value op,
2337  OverwriteMode mode,
2338  Expression* left_expr,
2339  Expression* right_expr) {
2340  Label done, smi_case, stub_call;
2341 
2342  Register scratch1 = a2;
2343  Register scratch2 = a3;
2344 
2345  // Get the arguments.
2346  Register left = a1;
2347  Register right = a0;
2348  __ pop(left);
2349  __ mov(a0, result_register());
2350 
2351  // Perform combined smi check on both operands.
2352  __ Or(scratch1, left, Operand(right));
2353  STATIC_ASSERT(kSmiTag == 0);
2354  JumpPatchSite patch_site(masm_);
2355  patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2356 
2357  __ bind(&stub_call);
2358  BinaryOpICStub stub(op, mode);
2359  CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2360  patch_site.EmitPatchInfo();
2361  __ jmp(&done);
2362 
2363  __ bind(&smi_case);
2364  // Smi case. This code works the same way as the smi-smi case in the type
2365  // recording binary operation stub, see
2366  switch (op) {
2367  case Token::SAR:
2368  __ GetLeastBitsFromSmi(scratch1, right, 5);
2369  __ srav(right, left, scratch1);
2370  __ And(v0, right, Operand(~kSmiTagMask));
2371  break;
2372  case Token::SHL: {
2373  __ SmiUntag(scratch1, left);
2374  __ GetLeastBitsFromSmi(scratch2, right, 5);
2375  __ sllv(scratch1, scratch1, scratch2);
2376  __ Addu(scratch2, scratch1, Operand(0x40000000));
2377  __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2378  __ SmiTag(v0, scratch1);
2379  break;
2380  }
2381  case Token::SHR: {
2382  __ SmiUntag(scratch1, left);
2383  __ GetLeastBitsFromSmi(scratch2, right, 5);
2384  __ srlv(scratch1, scratch1, scratch2);
2385  __ And(scratch2, scratch1, 0xc0000000);
2386  __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2387  __ SmiTag(v0, scratch1);
2388  break;
2389  }
2390  case Token::ADD:
2391  __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2392  __ BranchOnOverflow(&stub_call, scratch1);
2393  break;
2394  case Token::SUB:
2395  __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2396  __ BranchOnOverflow(&stub_call, scratch1);
2397  break;
2398  case Token::MUL: {
2399  __ SmiUntag(scratch1, right);
2400  __ Mult(left, scratch1);
2401  __ mflo(scratch1);
2402  __ mfhi(scratch2);
2403  __ sra(scratch1, scratch1, 31);
2404  __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
2405  __ mflo(v0);
2406  __ Branch(&done, ne, v0, Operand(zero_reg));
2407  __ Addu(scratch2, right, left);
2408  __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2409  ASSERT(Smi::FromInt(0) == 0);
2410  __ mov(v0, zero_reg);
2411  break;
2412  }
2413  case Token::BIT_OR:
2414  __ Or(v0, left, Operand(right));
2415  break;
2416  case Token::BIT_AND:
2417  __ And(v0, left, Operand(right));
2418  break;
2419  case Token::BIT_XOR:
2420  __ Xor(v0, left, Operand(right));
2421  break;
2422  default:
2423  UNREACHABLE();
2424  }
2425 
2426  __ bind(&done);
2427  context()->Plug(v0);
2428 }
2429 
2430 
2431 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2432  Token::Value op,
2433  OverwriteMode mode) {
2434  __ mov(a0, result_register());
2435  __ pop(a1);
2436  BinaryOpICStub stub(op, mode);
2437  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2438  CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2439  patch_site.EmitPatchInfo();
2440  context()->Plug(v0);
2441 }
2442 
2443 
2444 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2445  ASSERT(expr->IsValidLeftHandSide());
2446 
2447  // Left-hand side can only be a property, a global or a (parameter or local)
2448  // slot.
2449  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2450  LhsKind assign_type = VARIABLE;
2451  Property* prop = expr->AsProperty();
2452  if (prop != NULL) {
2453  assign_type = (prop->key()->IsPropertyName())
2454  ? NAMED_PROPERTY
2455  : KEYED_PROPERTY;
2456  }
2457 
2458  switch (assign_type) {
2459  case VARIABLE: {
2460  Variable* var = expr->AsVariableProxy()->var();
2461  EffectContext context(this);
2462  EmitVariableAssignment(var, Token::ASSIGN);
2463  break;
2464  }
2465  case NAMED_PROPERTY: {
2466  __ push(result_register()); // Preserve value.
2467  VisitForAccumulatorValue(prop->obj());
2468  __ mov(a1, result_register());
2469  __ pop(a0); // Restore value.
2470  __ li(a2, Operand(prop->key()->AsLiteral()->value()));
2471  CallStoreIC();
2472  break;
2473  }
2474  case KEYED_PROPERTY: {
2475  __ push(result_register()); // Preserve value.
2476  VisitForStackValue(prop->obj());
2477  VisitForAccumulatorValue(prop->key());
2478  __ mov(a1, result_register());
2479  __ Pop(a0, a2); // a0 = restored value.
2480  Handle<Code> ic = strict_mode() == SLOPPY
2481  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2482  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2483  CallIC(ic);
2484  break;
2485  }
2486  }
2487  context()->Plug(v0);
2488 }
2489 
2490 
2491 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2492  Variable* var, MemOperand location) {
2493  __ sw(result_register(), location);
2494  if (var->IsContextSlot()) {
2495  // RecordWrite may destroy all its register arguments.
2496  __ Move(a3, result_register());
2497  int offset = Context::SlotOffset(var->index());
2498  __ RecordWriteContextSlot(
2499  a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2500  }
2501 }
2502 
2503 
2504 void FullCodeGenerator::EmitCallStoreContextSlot(
2505  Handle<String> name, StrictMode strict_mode) {
2506  __ li(a1, Operand(name));
2507  __ li(a0, Operand(Smi::FromInt(strict_mode)));
2508  __ Push(v0, cp, a1, a0); // Value, context, name, strict mode.
2509  __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2510 }
2511 
2512 
2513 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2514  if (var->IsUnallocated()) {
2515  // Global var, const, or let.
2516  __ mov(a0, result_register());
2517  __ li(a2, Operand(var->name()));
2518  __ lw(a1, GlobalObjectOperand());
2519  CallStoreIC();
2520 
2521  } else if (op == Token::INIT_CONST_LEGACY) {
2522  // Const initializers need a write barrier.
2523  ASSERT(!var->IsParameter()); // No const parameters.
2524  if (var->IsLookupSlot()) {
2525  __ li(a0, Operand(var->name()));
2526  __ Push(v0, cp, a0); // Context and name.
2527  __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2528  } else {
2529  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2530  Label skip;
2531  MemOperand location = VarOperand(var, a1);
2532  __ lw(a2, location);
2533  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2534  __ Branch(&skip, ne, a2, Operand(at));
2535  EmitStoreToStackLocalOrContextSlot(var, location);
2536  __ bind(&skip);
2537  }
2538 
2539  } else if (var->mode() == LET && op != Token::INIT_LET) {
2540  // Non-initializing assignment to let variable needs a write barrier.
2541  if (var->IsLookupSlot()) {
2542  EmitCallStoreContextSlot(var->name(), strict_mode());
2543  } else {
2544  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2545  Label assign;
2546  MemOperand location = VarOperand(var, a1);
2547  __ lw(a3, location);
2548  __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2549  __ Branch(&assign, ne, a3, Operand(t0));
2550  __ li(a3, Operand(var->name()));
2551  __ push(a3);
2552  __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2553  // Perform the assignment.
2554  __ bind(&assign);
2555  EmitStoreToStackLocalOrContextSlot(var, location);
2556  }
2557 
2558  } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2559  // Assignment to var or initializing assignment to let/const
2560  // in harmony mode.
2561  if (var->IsLookupSlot()) {
2562  EmitCallStoreContextSlot(var->name(), strict_mode());
2563  } else {
2564  ASSERT((var->IsStackAllocated() || var->IsContextSlot()));
2565  MemOperand location = VarOperand(var, a1);
2566  if (generate_debug_code_ && op == Token::INIT_LET) {
2567  // Check for an uninitialized let binding.
2568  __ lw(a2, location);
2569  __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2570  __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
2571  }
2572  EmitStoreToStackLocalOrContextSlot(var, location);
2573  }
2574  }
2575  // Non-initializing assignments to consts are ignored.
2576 }
2577 
2578 
2579 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2580  // Assignment to a property, using a named store IC.
2581  Property* prop = expr->target()->AsProperty();
2582  ASSERT(prop != NULL);
2583  ASSERT(prop->key()->AsLiteral() != NULL);
2584 
2585  // Record source code position before IC call.
2586  SetSourcePosition(expr->position());
2587  __ mov(a0, result_register()); // Load the value.
2588  __ li(a2, Operand(prop->key()->AsLiteral()->value()));
2589  __ pop(a1);
2590 
2591  CallStoreIC(expr->AssignmentFeedbackId());
2592 
2593  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2594  context()->Plug(v0);
2595 }
2596 
2597 
2598 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2599  // Assignment to a property, using a keyed store IC.
2600 
2601  // Record source code position before IC call.
2602  SetSourcePosition(expr->position());
2603  // Call keyed store IC.
2604  // The arguments are:
2605  // - a0 is the value,
2606  // - a1 is the key,
2607  // - a2 is the receiver.
2608  __ mov(a0, result_register());
2609  __ Pop(a2, a1); // a1 = key.
2610 
2611  Handle<Code> ic = strict_mode() == SLOPPY
2612  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2613  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2614  CallIC(ic, expr->AssignmentFeedbackId());
2615 
2616  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2617  context()->Plug(v0);
2618 }
2619 
2620 
2621 void FullCodeGenerator::VisitProperty(Property* expr) {
2622  Comment cmnt(masm_, "[ Property");
2623  Expression* key = expr->key();
2624 
2625  if (key->IsPropertyName()) {
2626  VisitForAccumulatorValue(expr->obj());
2627  EmitNamedPropertyLoad(expr);
2628  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2629  context()->Plug(v0);
2630  } else {
2631  VisitForStackValue(expr->obj());
2632  VisitForAccumulatorValue(expr->key());
2633  __ pop(a1);
2634  EmitKeyedPropertyLoad(expr);
2635  context()->Plug(v0);
2636  }
2637 }
2638 
2639 
2640 void FullCodeGenerator::CallIC(Handle<Code> code,
2641  TypeFeedbackId id) {
2642  ic_total_count_++;
2643  __ Call(code, RelocInfo::CODE_TARGET, id);
2644 }
2645 
2646 
2647 // Code common for calls using the IC.
2648 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2649  Expression* callee = expr->expression();
2650  ZoneList<Expression*>* args = expr->arguments();
2651  int arg_count = args->length();
2652 
2654  // Get the target function.
2655  if (callee->IsVariableProxy()) {
2656  { StackValueContext context(this);
2657  EmitVariableLoad(callee->AsVariableProxy());
2658  PrepareForBailout(callee, NO_REGISTERS);
2659  }
2660  // Push undefined as receiver. This is patched in the method prologue if it
2661  // is a sloppy mode method.
2662  __ Push(isolate()->factory()->undefined_value());
2663  flags = NO_CALL_FUNCTION_FLAGS;
2664  } else {
2665  // Load the function from the receiver.
2666  ASSERT(callee->IsProperty());
2667  __ lw(v0, MemOperand(sp, 0));
2668  EmitNamedPropertyLoad(callee->AsProperty());
2669  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2670  // Push the target function under the receiver.
2671  __ lw(at, MemOperand(sp, 0));
2672  __ push(at);
2673  __ sw(v0, MemOperand(sp, kPointerSize));
2674  flags = CALL_AS_METHOD;
2675  }
2676 
2677  // Load the arguments.
2678  { PreservePositionScope scope(masm()->positions_recorder());
2679  for (int i = 0; i < arg_count; i++) {
2680  VisitForStackValue(args->at(i));
2681  }
2682  }
2683  // Record source position for debugger.
2684  SetSourcePosition(expr->position());
2685  CallFunctionStub stub(arg_count, flags);
2686  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2687  __ CallStub(&stub);
2688 
2689  RecordJSReturnSite(expr);
2690 
2691  // Restore context register.
2693 
2694  context()->DropAndPlug(1, v0);
2695 }
2696 
2697 
2698 // Code common for calls using the IC.
2699 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2700  Expression* key) {
2701  // Load the key.
2702  VisitForAccumulatorValue(key);
2703 
2704  Expression* callee = expr->expression();
2705  ZoneList<Expression*>* args = expr->arguments();
2706  int arg_count = args->length();
2707 
2708  // Load the function from the receiver.
2709  ASSERT(callee->IsProperty());
2710  __ lw(a1, MemOperand(sp, 0));
2711  EmitKeyedPropertyLoad(callee->AsProperty());
2712  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2713 
2714  // Push the target function under the receiver.
2715  __ lw(at, MemOperand(sp, 0));
2716  __ push(at);
2717  __ sw(v0, MemOperand(sp, kPointerSize));
2718 
2719  { PreservePositionScope scope(masm()->positions_recorder());
2720  for (int i = 0; i < arg_count; i++) {
2721  VisitForStackValue(args->at(i));
2722  }
2723  }
2724 
2725  // Record source position for debugger.
2726  SetSourcePosition(expr->position());
2727  CallFunctionStub stub(arg_count, CALL_AS_METHOD);
2728  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2729  __ CallStub(&stub);
2730 
2731  RecordJSReturnSite(expr);
2732  // Restore context register.
2734 
2735  context()->DropAndPlug(1, v0);
2736 }
2737 
2738 
2739 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2740  // Code common for calls using the call stub.
2741  ZoneList<Expression*>* args = expr->arguments();
2742  int arg_count = args->length();
2743  { PreservePositionScope scope(masm()->positions_recorder());
2744  for (int i = 0; i < arg_count; i++) {
2745  VisitForStackValue(args->at(i));
2746  }
2747  }
2748  // Record source position for debugger.
2749  SetSourcePosition(expr->position());
2750 
2751  Handle<Object> uninitialized =
2753  StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2754  __ li(a2, FeedbackVector());
2755  __ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
2756 
2757  // Record call targets in unoptimized code.
2758  CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2759  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2760  __ CallStub(&stub);
2761  RecordJSReturnSite(expr);
2762  // Restore context register.
2764  context()->DropAndPlug(1, v0);
2765 }
2766 
2767 
2768 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2769  // t2: copy of the first argument or undefined if it doesn't exist.
2770  if (arg_count > 0) {
2771  __ lw(t2, MemOperand(sp, arg_count * kPointerSize));
2772  } else {
2773  __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
2774  }
2775 
2776  // t1: the receiver of the enclosing function.
2777  int receiver_offset = 2 + info_->scope()->num_parameters();
2778  __ lw(t1, MemOperand(fp, receiver_offset * kPointerSize));
2779 
2780  // t0: the strict mode.
2781  __ li(t0, Operand(Smi::FromInt(strict_mode())));
2782 
2783  // a1: the start position of the scope the calls resides in.
2784  __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2785 
2786  // Do the runtime call.
2787  __ Push(t2, t1, t0, a1);
2788  __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2789 }
2790 
2791 
2792 void FullCodeGenerator::VisitCall(Call* expr) {
2793 #ifdef DEBUG
2794  // We want to verify that RecordJSReturnSite gets called on all paths
2795  // through this function. Avoid early returns.
2796  expr->return_is_recorded_ = false;
2797 #endif
2798 
2799  Comment cmnt(masm_, "[ Call");
2800  Expression* callee = expr->expression();
2801  Call::CallType call_type = expr->GetCallType(isolate());
2802 
2803  if (call_type == Call::POSSIBLY_EVAL_CALL) {
2804  // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2805  // to resolve the function we need to call and the receiver of the
2806  // call. Then we call the resolved function using the given
2807  // arguments.
2808  ZoneList<Expression*>* args = expr->arguments();
2809  int arg_count = args->length();
2810 
2811  { PreservePositionScope pos_scope(masm()->positions_recorder());
2812  VisitForStackValue(callee);
2813  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2814  __ push(a2); // Reserved receiver slot.
2815 
2816  // Push the arguments.
2817  for (int i = 0; i < arg_count; i++) {
2818  VisitForStackValue(args->at(i));
2819  }
2820 
2821  // Push a copy of the function (found below the arguments) and
2822  // resolve eval.
2823  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2824  __ push(a1);
2825  EmitResolvePossiblyDirectEval(arg_count);
2826 
2827  // The runtime call returns a pair of values in v0 (function) and
2828  // v1 (receiver). Touch up the stack with the right values.
2829  __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2830  __ sw(v1, MemOperand(sp, arg_count * kPointerSize));
2831  }
2832  // Record source position for debugger.
2833  SetSourcePosition(expr->position());
2834  CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
2835  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2836  __ CallStub(&stub);
2837  RecordJSReturnSite(expr);
2838  // Restore context register.
2840  context()->DropAndPlug(1, v0);
2841  } else if (call_type == Call::GLOBAL_CALL) {
2842  EmitCallWithIC(expr);
2843  } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2844  // Call to a lookup slot (dynamically introduced variable).
2845  VariableProxy* proxy = callee->AsVariableProxy();
2846  Label slow, done;
2847 
2848  { PreservePositionScope scope(masm()->positions_recorder());
2849  // Generate code for loading from variables potentially shadowed
2850  // by eval-introduced variables.
2851  EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2852  }
2853 
2854  __ bind(&slow);
2855  // Call the runtime to find the function to call (returned in v0)
2856  // and the object holding it (returned in v1).
2857  ASSERT(!context_register().is(a2));
2858  __ li(a2, Operand(proxy->name()));
2859  __ Push(context_register(), a2);
2860  __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2861  __ Push(v0, v1); // Function, receiver.
2862 
2863  // If fast case code has been generated, emit code to push the
2864  // function and receiver and have the slow path jump around this
2865  // code.
2866  if (done.is_linked()) {
2867  Label call;
2868  __ Branch(&call);
2869  __ bind(&done);
2870  // Push function.
2871  __ push(v0);
2872  // The receiver is implicitly the global receiver. Indicate this
2873  // by passing the hole to the call function stub.
2874  __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2875  __ push(a1);
2876  __ bind(&call);
2877  }
2878 
2879  // The receiver is either the global receiver or an object found
2880  // by LoadContextSlot.
2881  EmitCallWithStub(expr);
2882  } else if (call_type == Call::PROPERTY_CALL) {
2883  Property* property = callee->AsProperty();
2884  { PreservePositionScope scope(masm()->positions_recorder());
2885  VisitForStackValue(property->obj());
2886  }
2887  if (property->key()->IsPropertyName()) {
2888  EmitCallWithIC(expr);
2889  } else {
2890  EmitKeyedCallWithIC(expr, property->key());
2891  }
2892  } else {
2893  ASSERT(call_type == Call::OTHER_CALL);
2894  // Call to an arbitrary expression not handled specially above.
2895  { PreservePositionScope scope(masm()->positions_recorder());
2896  VisitForStackValue(callee);
2897  }
2898  __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2899  __ push(a1);
2900  // Emit function call.
2901  EmitCallWithStub(expr);
2902  }
2903 
2904 #ifdef DEBUG
2905  // RecordJSReturnSite should have been called.
2906  ASSERT(expr->return_is_recorded_);
2907 #endif
2908 }
2909 
2910 
2911 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2912  Comment cmnt(masm_, "[ CallNew");
2913  // According to ECMA-262, section 11.2.2, page 44, the function
2914  // expression in new calls must be evaluated before the
2915  // arguments.
2916 
2917  // Push constructor on the stack. If it's not a function it's used as
2918  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2919  // ignored.
2920  VisitForStackValue(expr->expression());
2921 
2922  // Push the arguments ("left-to-right") on the stack.
2923  ZoneList<Expression*>* args = expr->arguments();
2924  int arg_count = args->length();
2925  for (int i = 0; i < arg_count; i++) {
2926  VisitForStackValue(args->at(i));
2927  }
2928 
2929  // Call the construct call builtin that handles allocation and
2930  // constructor invocation.
2931  SetSourcePosition(expr->position());
2932 
2933  // Load function and argument count into a1 and a0.
2934  __ li(a0, Operand(arg_count));
2935  __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2936 
2937  // Record call targets in unoptimized code.
2938  Handle<Object> uninitialized =
2940  StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2941  if (FLAG_pretenuring_call_new) {
2942  StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
2943  isolate()->factory()->NewAllocationSite());
2944  ASSERT(expr->AllocationSiteFeedbackSlot() ==
2945  expr->CallNewFeedbackSlot() + 1);
2946  }
2947 
2948  __ li(a2, FeedbackVector());
2949  __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
2950 
2951  CallConstructStub stub(RECORD_CALL_TARGET);
2952  __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2953  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2954  context()->Plug(v0);
2955 }
2956 
2957 
2958 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2959  ZoneList<Expression*>* args = expr->arguments();
2960  ASSERT(args->length() == 1);
2961 
2962  VisitForAccumulatorValue(args->at(0));
2963 
2964  Label materialize_true, materialize_false;
2965  Label* if_true = NULL;
2966  Label* if_false = NULL;
2967  Label* fall_through = NULL;
2968  context()->PrepareTest(&materialize_true, &materialize_false,
2969  &if_true, &if_false, &fall_through);
2970 
2971  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2972  __ SmiTst(v0, t0);
2973  Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
2974 
2975  context()->Plug(if_true, if_false);
2976 }
2977 
2978 
2979 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2980  ZoneList<Expression*>* args = expr->arguments();
2981  ASSERT(args->length() == 1);
2982 
2983  VisitForAccumulatorValue(args->at(0));
2984 
2985  Label materialize_true, materialize_false;
2986  Label* if_true = NULL;
2987  Label* if_false = NULL;
2988  Label* fall_through = NULL;
2989  context()->PrepareTest(&materialize_true, &materialize_false,
2990  &if_true, &if_false, &fall_through);
2991 
2992  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2993  __ NonNegativeSmiTst(v0, at);
2994  Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
2995 
2996  context()->Plug(if_true, if_false);
2997 }
2998 
2999 
3000 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3001  ZoneList<Expression*>* args = expr->arguments();
3002  ASSERT(args->length() == 1);
3003 
3004  VisitForAccumulatorValue(args->at(0));
3005 
3006  Label materialize_true, materialize_false;
3007  Label* if_true = NULL;
3008  Label* if_false = NULL;
3009  Label* fall_through = NULL;
3010  context()->PrepareTest(&materialize_true, &materialize_false,
3011  &if_true, &if_false, &fall_through);
3012 
3013  __ JumpIfSmi(v0, if_false);
3014  __ LoadRoot(at, Heap::kNullValueRootIndex);
3015  __ Branch(if_true, eq, v0, Operand(at));
3017  // Undetectable objects behave like undefined when tested with typeof.
3018  __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
3019  __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3020  __ Branch(if_false, ne, at, Operand(zero_reg));
3022  __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3023  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3024  Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
3025  if_true, if_false, fall_through);
3026 
3027  context()->Plug(if_true, if_false);
3028 }
3029 
3030 
3031 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3032  ZoneList<Expression*>* args = expr->arguments();
3033  ASSERT(args->length() == 1);
3034 
3035  VisitForAccumulatorValue(args->at(0));
3036 
3037  Label materialize_true, materialize_false;
3038  Label* if_true = NULL;
3039  Label* if_false = NULL;
3040  Label* fall_through = NULL;
3041  context()->PrepareTest(&materialize_true, &materialize_false,
3042  &if_true, &if_false, &fall_through);
3043 
3044  __ JumpIfSmi(v0, if_false);
3045  __ GetObjectType(v0, a1, a1);
3046  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3047  Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
3048  if_true, if_false, fall_through);
3049 
3050  context()->Plug(if_true, if_false);
3051 }
3052 
3053 
3054 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3055  ZoneList<Expression*>* args = expr->arguments();
3056  ASSERT(args->length() == 1);
3057 
3058  VisitForAccumulatorValue(args->at(0));
3059 
3060  Label materialize_true, materialize_false;
3061  Label* if_true = NULL;
3062  Label* if_false = NULL;
3063  Label* fall_through = NULL;
3064  context()->PrepareTest(&materialize_true, &materialize_false,
3065  &if_true, &if_false, &fall_through);
3066 
3067  __ JumpIfSmi(v0, if_false);
3069  __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
3070  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3071  __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3072  Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3073 
3074  context()->Plug(if_true, if_false);
3075 }
3076 
3077 
3078 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3079  CallRuntime* expr) {
3080  ZoneList<Expression*>* args = expr->arguments();
3081  ASSERT(args->length() == 1);
3082 
3083  VisitForAccumulatorValue(args->at(0));
3084 
3085  Label materialize_true, materialize_false, skip_lookup;
3086  Label* if_true = NULL;
3087  Label* if_false = NULL;
3088  Label* fall_through = NULL;
3089  context()->PrepareTest(&materialize_true, &materialize_false,
3090  &if_true, &if_false, &fall_through);
3091 
3092  __ AssertNotSmi(v0);
3093 
3096  __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3097  __ Branch(&skip_lookup, ne, t0, Operand(zero_reg));
3098 
3099  // Check for fast case object. Generate false result for slow case object.
3102  __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
3103  __ Branch(if_false, eq, a2, Operand(t0));
3104 
3105  // Look for valueOf name in the descriptor array, and indicate false if
3106  // found. Since we omit an enumeration index check, if it is added via a
3107  // transition that shares its descriptor array, this is a false positive.
3108  Label entry, loop, done;
3109 
3110  // Skip loop if no descriptors are valid.
3111  __ NumberOfOwnDescriptors(a3, a1);
3112  __ Branch(&done, eq, a3, Operand(zero_reg));
3113 
3114  __ LoadInstanceDescriptors(a1, t0);
3115  // t0: descriptor array.
3116  // a3: valid entries in the descriptor array.
3117  STATIC_ASSERT(kSmiTag == 0);
3118  STATIC_ASSERT(kSmiTagSize == 1);
3119  STATIC_ASSERT(kPointerSize == 4);
3120  __ li(at, Operand(DescriptorArray::kDescriptorSize));
3121  __ Mul(a3, a3, at);
3122  // Calculate location of the first key name.
3123  __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3124  // Calculate the end of the descriptor array.
3125  __ mov(a2, t0);
3126  __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize);
3127  __ Addu(a2, a2, t1);
3128 
3129  // Loop through all the keys in the descriptor array. If one of these is the
3130  // string "valueOf" the result is false.
3131  // The use of t2 to store the valueOf string assumes that it is not otherwise
3132  // used in the loop below.
3133  __ li(t2, Operand(isolate()->factory()->value_of_string()));
3134  __ jmp(&entry);
3135  __ bind(&loop);
3136  __ lw(a3, MemOperand(t0, 0));
3137  __ Branch(if_false, eq, a3, Operand(t2));
3138  __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3139  __ bind(&entry);
3140  __ Branch(&loop, ne, t0, Operand(a2));
3141 
3142  __ bind(&done);
3143 
3144  // Set the bit in the map to indicate that there is no local valueOf field.
3146  __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3148 
3149  __ bind(&skip_lookup);
3150 
3151  // If a valueOf property is not found on the object check that its
3152  // prototype is the un-modified String prototype. If not result is false.
3154  __ JumpIfSmi(a2, if_false);
3159  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3160  Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3161 
3162  context()->Plug(if_true, if_false);
3163 }
3164 
3165 
3166 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3167  ZoneList<Expression*>* args = expr->arguments();
3168  ASSERT(args->length() == 1);
3169 
3170  VisitForAccumulatorValue(args->at(0));
3171 
3172  Label materialize_true, materialize_false;
3173  Label* if_true = NULL;
3174  Label* if_false = NULL;
3175  Label* fall_through = NULL;
3176  context()->PrepareTest(&materialize_true, &materialize_false,
3177  &if_true, &if_false, &fall_through);
3178 
3179  __ JumpIfSmi(v0, if_false);
3180  __ GetObjectType(v0, a1, a2);
3181  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3182  __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3183  __ Branch(if_false);
3184 
3185  context()->Plug(if_true, if_false);
3186 }
3187 
3188 
3189 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3190  ZoneList<Expression*>* args = expr->arguments();
3191  ASSERT(args->length() == 1);
3192 
3193  VisitForAccumulatorValue(args->at(0));
3194 
3195  Label materialize_true, materialize_false;
3196  Label* if_true = NULL;
3197  Label* if_false = NULL;
3198  Label* fall_through = NULL;
3199  context()->PrepareTest(&materialize_true, &materialize_false,
3200  &if_true, &if_false, &fall_through);
3201 
3202  __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3205  __ li(t0, 0x80000000);
3206  Label not_nan;
3207  __ Branch(&not_nan, ne, a2, Operand(t0));
3208  __ mov(t0, zero_reg);
3209  __ mov(a2, a1);
3210  __ bind(&not_nan);
3211 
3212  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3213  Split(eq, a2, Operand(t0), if_true, if_false, fall_through);
3214 
3215  context()->Plug(if_true, if_false);
3216 }
3217 
3218 
3219 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3220  ZoneList<Expression*>* args = expr->arguments();
3221  ASSERT(args->length() == 1);
3222 
3223  VisitForAccumulatorValue(args->at(0));
3224 
3225  Label materialize_true, materialize_false;
3226  Label* if_true = NULL;
3227  Label* if_false = NULL;
3228  Label* fall_through = NULL;
3229  context()->PrepareTest(&materialize_true, &materialize_false,
3230  &if_true, &if_false, &fall_through);
3231 
3232  __ JumpIfSmi(v0, if_false);
3233  __ GetObjectType(v0, a1, a1);
3234  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3235  Split(eq, a1, Operand(JS_ARRAY_TYPE),
3236  if_true, if_false, fall_through);
3237 
3238  context()->Plug(if_true, if_false);
3239 }
3240 
3241 
3242 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3243  ZoneList<Expression*>* args = expr->arguments();
3244  ASSERT(args->length() == 1);
3245 
3246  VisitForAccumulatorValue(args->at(0));
3247 
3248  Label materialize_true, materialize_false;
3249  Label* if_true = NULL;
3250  Label* if_false = NULL;
3251  Label* fall_through = NULL;
3252  context()->PrepareTest(&materialize_true, &materialize_false,
3253  &if_true, &if_false, &fall_through);
3254 
3255  __ JumpIfSmi(v0, if_false);
3256  __ GetObjectType(v0, a1, a1);
3257  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3258  Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3259 
3260  context()->Plug(if_true, if_false);
3261 }
3262 
3263 
3264 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3265  ASSERT(expr->arguments()->length() == 0);
3266 
3267  Label materialize_true, materialize_false;
3268  Label* if_true = NULL;
3269  Label* if_false = NULL;
3270  Label* fall_through = NULL;
3271  context()->PrepareTest(&materialize_true, &materialize_false,
3272  &if_true, &if_false, &fall_through);
3273 
3274  // Get the frame pointer for the calling frame.
3276 
3277  // Skip the arguments adaptor frame if it exists.
3278  Label check_frame_marker;
3280  __ Branch(&check_frame_marker, ne,
3283 
3284  // Check the marker in the calling frame.
3285  __ bind(&check_frame_marker);
3287  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3288  Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3289  if_true, if_false, fall_through);
3290 
3291  context()->Plug(if_true, if_false);
3292 }
3293 
3294 
3295 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3296  ZoneList<Expression*>* args = expr->arguments();
3297  ASSERT(args->length() == 2);
3298 
3299  // Load the two objects into registers and perform the comparison.
3300  VisitForStackValue(args->at(0));
3301  VisitForAccumulatorValue(args->at(1));
3302 
3303  Label materialize_true, materialize_false;
3304  Label* if_true = NULL;
3305  Label* if_false = NULL;
3306  Label* fall_through = NULL;
3307  context()->PrepareTest(&materialize_true, &materialize_false,
3308  &if_true, &if_false, &fall_through);
3309 
3310  __ pop(a1);
3311  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3312  Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3313 
3314  context()->Plug(if_true, if_false);
3315 }
3316 
3317 
3318 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3319  ZoneList<Expression*>* args = expr->arguments();
3320  ASSERT(args->length() == 1);
3321 
3322  // ArgumentsAccessStub expects the key in a1 and the formal
3323  // parameter count in a0.
3324  VisitForAccumulatorValue(args->at(0));
3325  __ mov(a1, v0);
3326  __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3327  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3328  __ CallStub(&stub);
3329  context()->Plug(v0);
3330 }
3331 
3332 
3333 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3334  ASSERT(expr->arguments()->length() == 0);
3335  Label exit;
3336  // Get the number of formal parameters.
3337  __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3338 
3339  // Check if the calling frame is an arguments adaptor frame.
3342  __ Branch(&exit, ne, a3,
3344 
3345  // Arguments adaptor case: Read the arguments length from the
3346  // adaptor frame.
3348 
3349  __ bind(&exit);
3350  context()->Plug(v0);
3351 }
3352 
3353 
3354 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3355  ZoneList<Expression*>* args = expr->arguments();
3356  ASSERT(args->length() == 1);
3357  Label done, null, function, non_function_constructor;
3358 
3359  VisitForAccumulatorValue(args->at(0));
3360 
3361  // If the object is a smi, we return null.
3362  __ JumpIfSmi(v0, &null);
3363 
3364  // Check that the object is a JS object but take special care of JS
3365  // functions to make sure they have 'Function' as their class.
3366  // Assume that there are only two callable types, and one of them is at
3367  // either end of the type range for JS object types. Saves extra comparisons.
3369  __ GetObjectType(v0, v0, a1); // Map is now in v0.
3370  __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3371 
3374  __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3375 
3377  LAST_SPEC_OBJECT_TYPE - 1);
3378  __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3379  // Assume that there is no larger type.
3381 
3382  // Check if the constructor in the map is a JS function.
3384  __ GetObjectType(v0, a1, a1);
3385  __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
3386 
3387  // v0 now contains the constructor function. Grab the
3388  // instance class name from there.
3391  __ Branch(&done);
3392 
3393  // Functions have class 'Function'.
3394  __ bind(&function);
3395  __ LoadRoot(v0, Heap::kfunction_class_stringRootIndex);
3396  __ jmp(&done);
3397 
3398  // Objects with a non-function constructor have class 'Object'.
3399  __ bind(&non_function_constructor);
3400  __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3401  __ jmp(&done);
3402 
3403  // Non-JS objects have class null.
3404  __ bind(&null);
3405  __ LoadRoot(v0, Heap::kNullValueRootIndex);
3406 
3407  // All done.
3408  __ bind(&done);
3409 
3410  context()->Plug(v0);
3411 }
3412 
3413 
3414 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3415  // Conditionally generate a log call.
3416  // Args:
3417  // 0 (literal string): The type of logging (corresponds to the flags).
3418  // This is used to determine whether or not to generate the log call.
3419  // 1 (string): Format string. Access the string at argument index 2
3420  // with '%2s' (see Logger::LogRuntime for all the formats).
3421  // 2 (array): Arguments to the format string.
3422  ZoneList<Expression*>* args = expr->arguments();
3423  ASSERT_EQ(args->length(), 3);
3424  if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3425  VisitForStackValue(args->at(1));
3426  VisitForStackValue(args->at(2));
3427  __ CallRuntime(Runtime::kHiddenLog, 2);
3428  }
3429 
3430  // Finally, we're expected to leave a value on the top of the stack.
3431  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3432  context()->Plug(v0);
3433 }
3434 
3435 
3436 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3437  // Load the arguments on the stack and call the stub.
3438  SubStringStub stub;
3439  ZoneList<Expression*>* args = expr->arguments();
3440  ASSERT(args->length() == 3);
3441  VisitForStackValue(args->at(0));
3442  VisitForStackValue(args->at(1));
3443  VisitForStackValue(args->at(2));
3444  __ CallStub(&stub);
3445  context()->Plug(v0);
3446 }
3447 
3448 
3449 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3450  // Load the arguments on the stack and call the stub.
3451  RegExpExecStub stub;
3452  ZoneList<Expression*>* args = expr->arguments();
3453  ASSERT(args->length() == 4);
3454  VisitForStackValue(args->at(0));
3455  VisitForStackValue(args->at(1));
3456  VisitForStackValue(args->at(2));
3457  VisitForStackValue(args->at(3));
3458  __ CallStub(&stub);
3459  context()->Plug(v0);
3460 }
3461 
3462 
3463 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3464  ZoneList<Expression*>* args = expr->arguments();
3465  ASSERT(args->length() == 1);
3466 
3467  VisitForAccumulatorValue(args->at(0)); // Load the object.
3468 
3469  Label done;
3470  // If the object is a smi return the object.
3471  __ JumpIfSmi(v0, &done);
3472  // If the object is not a value type, return the object.
3473  __ GetObjectType(v0, a1, a1);
3474  __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3475 
3477 
3478  __ bind(&done);
3479  context()->Plug(v0);
3480 }
3481 
3482 
3483 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3484  ZoneList<Expression*>* args = expr->arguments();
3485  ASSERT(args->length() == 2);
3486  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3487  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3488 
3489  VisitForAccumulatorValue(args->at(0)); // Load the object.
3490 
3491  Label runtime, done, not_date_object;
3492  Register object = v0;
3493  Register result = v0;
3494  Register scratch0 = t5;
3495  Register scratch1 = a1;
3496 
3497  __ JumpIfSmi(object, &not_date_object);
3498  __ GetObjectType(object, scratch1, scratch1);
3499  __ Branch(&not_date_object, ne, scratch1, Operand(JS_DATE_TYPE));
3500 
3501  if (index->value() == 0) {
3502  __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
3503  __ jmp(&done);
3504  } else {
3505  if (index->value() < JSDate::kFirstUncachedField) {
3506  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3507  __ li(scratch1, Operand(stamp));
3508  __ lw(scratch1, MemOperand(scratch1));
3509  __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3510  __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3511  __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
3512  kPointerSize * index->value()));
3513  __ jmp(&done);
3514  }
3515  __ bind(&runtime);
3516  __ PrepareCallCFunction(2, scratch1);
3517  __ li(a1, Operand(index));
3518  __ Move(a0, object);
3519  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3520  __ jmp(&done);
3521  }
3522 
3523  __ bind(&not_date_object);
3524  __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3525  __ bind(&done);
3526  context()->Plug(v0);
3527 }
3528 
3529 
3530 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3531  ZoneList<Expression*>* args = expr->arguments();
3532  ASSERT_EQ(3, args->length());
3533 
3534  Register string = v0;
3535  Register index = a1;
3536  Register value = a2;
3537 
3538  VisitForStackValue(args->at(1)); // index
3539  VisitForStackValue(args->at(2)); // value
3540  VisitForAccumulatorValue(args->at(0)); // string
3541  __ Pop(index, value);
3542 
3543  if (FLAG_debug_code) {
3544  __ SmiTst(value, at);
3545  __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3546  __ SmiTst(index, at);
3547  __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3548  __ SmiUntag(index, index);
3549  static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3550  Register scratch = t5;
3551  __ EmitSeqStringSetCharCheck(
3552  string, index, value, scratch, one_byte_seq_type);
3553  __ SmiTag(index, index);
3554  }
3555 
3556  __ SmiUntag(value, value);
3557  __ Addu(at,
3558  string,
3560  __ SmiUntag(index);
3561  __ Addu(at, at, index);
3562  __ sb(value, MemOperand(at));
3563  context()->Plug(string);
3564 }
3565 
3566 
3567 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3568  ZoneList<Expression*>* args = expr->arguments();
3569  ASSERT_EQ(3, args->length());
3570 
3571  Register string = v0;
3572  Register index = a1;
3573  Register value = a2;
3574 
3575  VisitForStackValue(args->at(1)); // index
3576  VisitForStackValue(args->at(2)); // value
3577  VisitForAccumulatorValue(args->at(0)); // string
3578  __ Pop(index, value);
3579 
3580  if (FLAG_debug_code) {
3581  __ SmiTst(value, at);
3582  __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3583  __ SmiTst(index, at);
3584  __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3585  __ SmiUntag(index, index);
3586  static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3587  Register scratch = t5;
3588  __ EmitSeqStringSetCharCheck(
3589  string, index, value, scratch, two_byte_seq_type);
3590  __ SmiTag(index, index);
3591  }
3592 
3593  __ SmiUntag(value, value);
3594  __ Addu(at,
3595  string,
3597  __ Addu(at, at, index);
3598  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3599  __ sh(value, MemOperand(at));
3600  context()->Plug(string);
3601 }
3602 
3603 
3604 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3605  // Load the arguments on the stack and call the runtime function.
3606  ZoneList<Expression*>* args = expr->arguments();
3607  ASSERT(args->length() == 2);
3608  VisitForStackValue(args->at(0));
3609  VisitForStackValue(args->at(1));
3610  MathPowStub stub(MathPowStub::ON_STACK);
3611  __ CallStub(&stub);
3612  context()->Plug(v0);
3613 }
3614 
3615 
3616 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3617  ZoneList<Expression*>* args = expr->arguments();
3618  ASSERT(args->length() == 2);
3619 
3620  VisitForStackValue(args->at(0)); // Load the object.
3621  VisitForAccumulatorValue(args->at(1)); // Load the value.
3622  __ pop(a1); // v0 = value. a1 = object.
3623 
3624  Label done;
3625  // If the object is a smi, return the value.
3626  __ JumpIfSmi(a1, &done);
3627 
3628  // If the object is not a value type, return the value.
3629  __ GetObjectType(a1, a2, a2);
3630  __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3631 
3632  // Store the value.
3634  // Update the write barrier. Save the value as it will be
3635  // overwritten by the write barrier code and is needed afterward.
3636  __ mov(a2, v0);
3637  __ RecordWriteField(
3639 
3640  __ bind(&done);
3641  context()->Plug(v0);
3642 }
3643 
3644 
3645 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3646  ZoneList<Expression*>* args = expr->arguments();
3647  ASSERT_EQ(args->length(), 1);
3648 
3649  // Load the argument into a0 and call the stub.
3650  VisitForAccumulatorValue(args->at(0));
3651  __ mov(a0, result_register());
3652 
3653  NumberToStringStub stub;
3654  __ CallStub(&stub);
3655  context()->Plug(v0);
3656 }
3657 
3658 
3659 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3660  ZoneList<Expression*>* args = expr->arguments();
3661  ASSERT(args->length() == 1);
3662 
3663  VisitForAccumulatorValue(args->at(0));
3664 
3665  Label done;
3666  StringCharFromCodeGenerator generator(v0, a1);
3667  generator.GenerateFast(masm_);
3668  __ jmp(&done);
3669 
3670  NopRuntimeCallHelper call_helper;
3671  generator.GenerateSlow(masm_, call_helper);
3672 
3673  __ bind(&done);
3674  context()->Plug(a1);
3675 }
3676 
3677 
3678 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3679  ZoneList<Expression*>* args = expr->arguments();
3680  ASSERT(args->length() == 2);
3681 
3682  VisitForStackValue(args->at(0));
3683  VisitForAccumulatorValue(args->at(1));
3684  __ mov(a0, result_register());
3685 
3686  Register object = a1;
3687  Register index = a0;
3688  Register result = v0;
3689 
3690  __ pop(object);
3691 
3692  Label need_conversion;
3693  Label index_out_of_range;
3694  Label done;
3695  StringCharCodeAtGenerator generator(object,
3696  index,
3697  result,
3698  &need_conversion,
3699  &need_conversion,
3700  &index_out_of_range,
3702  generator.GenerateFast(masm_);
3703  __ jmp(&done);
3704 
3705  __ bind(&index_out_of_range);
3706  // When the index is out of range, the spec requires us to return
3707  // NaN.
3708  __ LoadRoot(result, Heap::kNanValueRootIndex);
3709  __ jmp(&done);
3710 
3711  __ bind(&need_conversion);
3712  // Load the undefined value into the result register, which will
3713  // trigger conversion.
3714  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3715  __ jmp(&done);
3716 
3717  NopRuntimeCallHelper call_helper;
3718  generator.GenerateSlow(masm_, call_helper);
3719 
3720  __ bind(&done);
3721  context()->Plug(result);
3722 }
3723 
3724 
3725 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3726  ZoneList<Expression*>* args = expr->arguments();
3727  ASSERT(args->length() == 2);
3728 
3729  VisitForStackValue(args->at(0));
3730  VisitForAccumulatorValue(args->at(1));
3731  __ mov(a0, result_register());
3732 
3733  Register object = a1;
3734  Register index = a0;
3735  Register scratch = a3;
3736  Register result = v0;
3737 
3738  __ pop(object);
3739 
3740  Label need_conversion;
3741  Label index_out_of_range;
3742  Label done;
3743  StringCharAtGenerator generator(object,
3744  index,
3745  scratch,
3746  result,
3747  &need_conversion,
3748  &need_conversion,
3749  &index_out_of_range,
3751  generator.GenerateFast(masm_);
3752  __ jmp(&done);
3753 
3754  __ bind(&index_out_of_range);
3755  // When the index is out of range, the spec requires us to return
3756  // the empty string.
3757  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3758  __ jmp(&done);
3759 
3760  __ bind(&need_conversion);
3761  // Move smi zero into the result register, which will trigger
3762  // conversion.
3763  __ li(result, Operand(Smi::FromInt(0)));
3764  __ jmp(&done);
3765 
3766  NopRuntimeCallHelper call_helper;
3767  generator.GenerateSlow(masm_, call_helper);
3768 
3769  __ bind(&done);
3770  context()->Plug(result);
3771 }
3772 
3773 
3774 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3775  ZoneList<Expression*>* args = expr->arguments();
3776  ASSERT_EQ(2, args->length());
3777  VisitForStackValue(args->at(0));
3778  VisitForAccumulatorValue(args->at(1));
3779 
3780  __ pop(a1);
3781  __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
3782  StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
3783  __ CallStub(&stub);
3784  context()->Plug(v0);
3785 }
3786 
3787 
3788 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3789  ZoneList<Expression*>* args = expr->arguments();
3790  ASSERT_EQ(2, args->length());
3791 
3792  VisitForStackValue(args->at(0));
3793  VisitForStackValue(args->at(1));
3794 
3795  StringCompareStub stub;
3796  __ CallStub(&stub);
3797  context()->Plug(v0);
3798 }
3799 
3800 
3801 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3802  // Load the argument on the stack and call the runtime function.
3803  ZoneList<Expression*>* args = expr->arguments();
3804  ASSERT(args->length() == 1);
3805  VisitForStackValue(args->at(0));
3806  __ CallRuntime(Runtime::kMath_log, 1);
3807  context()->Plug(v0);
3808 }
3809 
3810 
3811 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3812  // Load the argument on the stack and call the runtime function.
3813  ZoneList<Expression*>* args = expr->arguments();
3814  ASSERT(args->length() == 1);
3815  VisitForStackValue(args->at(0));
3816  __ CallRuntime(Runtime::kMath_sqrt, 1);
3817  context()->Plug(v0);
3818 }
3819 
3820 
3821 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3822  ZoneList<Expression*>* args = expr->arguments();
3823  ASSERT(args->length() >= 2);
3824 
3825  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3826  for (int i = 0; i < arg_count + 1; i++) {
3827  VisitForStackValue(args->at(i));
3828  }
3829  VisitForAccumulatorValue(args->last()); // Function.
3830 
3831  Label runtime, done;
3832  // Check for non-function argument (including proxy).
3833  __ JumpIfSmi(v0, &runtime);
3834  __ GetObjectType(v0, a1, a1);
3835  __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
3836 
3837  // InvokeFunction requires the function in a1. Move it in there.
3838  __ mov(a1, result_register());
3839  ParameterCount count(arg_count);
3840  __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
3842  __ jmp(&done);
3843 
3844  __ bind(&runtime);
3845  __ push(v0);
3846  __ CallRuntime(Runtime::kCall, args->length());
3847  __ bind(&done);
3848 
3849  context()->Plug(v0);
3850 }
3851 
3852 
3853 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3854  RegExpConstructResultStub stub;
3855  ZoneList<Expression*>* args = expr->arguments();
3856  ASSERT(args->length() == 3);
3857  VisitForStackValue(args->at(0));
3858  VisitForStackValue(args->at(1));
3859  VisitForAccumulatorValue(args->at(2));
3860  __ mov(a0, result_register());
3861  __ pop(a1);
3862  __ pop(a2);
3863  __ CallStub(&stub);
3864  context()->Plug(v0);
3865 }
3866 
3867 
3868 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3869  ZoneList<Expression*>* args = expr->arguments();
3870  ASSERT_EQ(2, args->length());
3871 
3872  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3873  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3874 
3875  Handle<FixedArray> jsfunction_result_caches(
3876  isolate()->native_context()->jsfunction_result_caches());
3877  if (jsfunction_result_caches->length() <= cache_id) {
3878  __ Abort(kAttemptToUseUndefinedCache);
3879  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3880  context()->Plug(v0);
3881  return;
3882  }
3883 
3884  VisitForAccumulatorValue(args->at(1));
3885 
3886  Register key = v0;
3887  Register cache = a1;
3890  __ lw(cache,
3893  __ lw(cache,
3895 
3896 
3897  Label done, not_found;
3898  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3900  // a2 now holds finger offset as a smi.
3901  __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3902  // a3 now points to the start of fixed array elements.
3903  __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize);
3904  __ addu(a3, a3, at);
3905  // a3 now points to key of indexed element of cache.
3906  __ lw(a2, MemOperand(a3));
3907  __ Branch(&not_found, ne, key, Operand(a2));
3908 
3909  __ lw(v0, MemOperand(a3, kPointerSize));
3910  __ Branch(&done);
3911 
3912  __ bind(&not_found);
3913  // Call runtime to perform the lookup.
3914  __ Push(cache, key);
3915  __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3916 
3917  __ bind(&done);
3918  context()->Plug(v0);
3919 }
3920 
3921 
3922 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3923  ZoneList<Expression*>* args = expr->arguments();
3924  VisitForAccumulatorValue(args->at(0));
3925 
3926  Label materialize_true, materialize_false;
3927  Label* if_true = NULL;
3928  Label* if_false = NULL;
3929  Label* fall_through = NULL;
3930  context()->PrepareTest(&materialize_true, &materialize_false,
3931  &if_true, &if_false, &fall_through);
3932 
3934  __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3935 
3936  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3937  Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3938 
3939  context()->Plug(if_true, if_false);
3940 }
3941 
3942 
3943 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3944  ZoneList<Expression*>* args = expr->arguments();
3945  ASSERT(args->length() == 1);
3946  VisitForAccumulatorValue(args->at(0));
3947 
3948  __ AssertString(v0);
3949 
3951  __ IndexFromHash(v0, v0);
3952 
3953  context()->Plug(v0);
3954 }
3955 
3956 
3957 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3958  Label bailout, done, one_char_separator, long_separator,
3959  non_trivial_array, not_size_one_array, loop,
3960  empty_separator_loop, one_char_separator_loop,
3961  one_char_separator_loop_entry, long_separator_loop;
3962  ZoneList<Expression*>* args = expr->arguments();
3963  ASSERT(args->length() == 2);
3964  VisitForStackValue(args->at(1));
3965  VisitForAccumulatorValue(args->at(0));
3966 
3967  // All aliases of the same register have disjoint lifetimes.
3968  Register array = v0;
3969  Register elements = no_reg; // Will be v0.
3970  Register result = no_reg; // Will be v0.
3971  Register separator = a1;
3972  Register array_length = a2;
3973  Register result_pos = no_reg; // Will be a2.
3974  Register string_length = a3;
3975  Register string = t0;
3976  Register element = t1;
3977  Register elements_end = t2;
3978  Register scratch1 = t3;
3979  Register scratch2 = t5;
3980  Register scratch3 = t4;
3981 
3982  // Separator operand is on the stack.
3983  __ pop(separator);
3984 
3985  // Check that the array is a JSArray.
3986  __ JumpIfSmi(array, &bailout);
3987  __ GetObjectType(array, scratch1, scratch2);
3988  __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
3989 
3990  // Check that the array has fast elements.
3991  __ CheckFastElements(scratch1, scratch2, &bailout);
3992 
3993  // If the array has length zero, return the empty string.
3994  __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3995  __ SmiUntag(array_length);
3996  __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
3997  __ LoadRoot(v0, Heap::kempty_stringRootIndex);
3998  __ Branch(&done);
3999 
4000  __ bind(&non_trivial_array);
4001 
4002  // Get the FixedArray containing array's elements.
4003  elements = array;
4004  __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4005  array = no_reg; // End of array's live range.
4006 
4007  // Check that all array elements are sequential ASCII strings, and
4008  // accumulate the sum of their lengths, as a smi-encoded value.
4009  __ mov(string_length, zero_reg);
4010  __ Addu(element,
4011  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4012  __ sll(elements_end, array_length, kPointerSizeLog2);
4013  __ Addu(elements_end, element, elements_end);
4014  // Loop condition: while (element < elements_end).
4015  // Live values in registers:
4016  // elements: Fixed array of strings.
4017  // array_length: Length of the fixed array of strings (not smi)
4018  // separator: Separator string
4019  // string_length: Accumulated sum of string lengths (smi).
4020  // element: Current array element.
4021  // elements_end: Array end.
4022  if (generate_debug_code_) {
4023  __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin,
4024  array_length, Operand(zero_reg));
4025  }
4026  __ bind(&loop);
4027  __ lw(string, MemOperand(element));
4028  __ Addu(element, element, kPointerSize);
4029  __ JumpIfSmi(string, &bailout);
4030  __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4031  __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4032  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
4033  __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4034  __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4035  __ BranchOnOverflow(&bailout, scratch3);
4036  __ Branch(&loop, lt, element, Operand(elements_end));
4037 
4038  // If array_length is 1, return elements[0], a string.
4039  __ Branch(&not_size_one_array, ne, array_length, Operand(1));
4040  __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4041  __ Branch(&done);
4042 
4043  __ bind(&not_size_one_array);
4044 
4045  // Live values in registers:
4046  // separator: Separator string
4047  // array_length: Length of the array.
4048  // string_length: Sum of string lengths (smi).
4049  // elements: FixedArray of strings.
4050 
4051  // Check that the separator is a flat ASCII string.
4052  __ JumpIfSmi(separator, &bailout);
4053  __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4054  __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4055  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
4056 
4057  // Add (separator length times array_length) - separator length to the
4058  // string_length to get the length of the result string. array_length is not
4059  // smi but the other values are, so the result is a smi.
4060  __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4061  __ Subu(string_length, string_length, Operand(scratch1));
4062  __ Mult(array_length, scratch1);
4063  // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4064  // zero.
4065  __ mfhi(scratch2);
4066  __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
4067  __ mflo(scratch2);
4068  __ And(scratch3, scratch2, Operand(0x80000000));
4069  __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
4070  __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4071  __ BranchOnOverflow(&bailout, scratch3);
4072  __ SmiUntag(string_length);
4073 
4074  // Get first element in the array to free up the elements register to be used
4075  // for the result.
4076  __ Addu(element,
4077  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4078  result = elements; // End of live range for elements.
4079  elements = no_reg;
4080  // Live values in registers:
4081  // element: First array element
4082  // separator: Separator string
4083  // string_length: Length of result string (not smi)
4084  // array_length: Length of the array.
4085  __ AllocateAsciiString(result,
4086  string_length,
4087  scratch1,
4088  scratch2,
4089  elements_end,
4090  &bailout);
4091  // Prepare for looping. Set up elements_end to end of the array. Set
4092  // result_pos to the position of the result where to write the first
4093  // character.
4094  __ sll(elements_end, array_length, kPointerSizeLog2);
4095  __ Addu(elements_end, element, elements_end);
4096  result_pos = array_length; // End of live range for array_length.
4097  array_length = no_reg;
4098  __ Addu(result_pos,
4099  result,
4101 
4102  // Check the length of the separator.
4103  __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4104  __ li(at, Operand(Smi::FromInt(1)));
4105  __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4106  __ Branch(&long_separator, gt, scratch1, Operand(at));
4107 
4108  // Empty separator case.
4109  __ bind(&empty_separator_loop);
4110  // Live values in registers:
4111  // result_pos: the position to which we are currently copying characters.
4112  // element: Current array element.
4113  // elements_end: Array end.
4114 
4115  // Copy next array element to the result.
4116  __ lw(string, MemOperand(element));
4117  __ Addu(element, element, kPointerSize);
4118  __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4119  __ SmiUntag(string_length);
4120  __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4121  __ CopyBytes(string, result_pos, string_length, scratch1);
4122  // End while (element < elements_end).
4123  __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4124  ASSERT(result.is(v0));
4125  __ Branch(&done);
4126 
4127  // One-character separator case.
4128  __ bind(&one_char_separator);
4129  // Replace separator with its ASCII character value.
4130  __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4131  // Jump into the loop after the code that copies the separator, so the first
4132  // element is not preceded by a separator.
4133  __ jmp(&one_char_separator_loop_entry);
4134 
4135  __ bind(&one_char_separator_loop);
4136  // Live values in registers:
4137  // result_pos: the position to which we are currently copying characters.
4138  // element: Current array element.
4139  // elements_end: Array end.
4140  // separator: Single separator ASCII char (in lower byte).
4141 
4142  // Copy the separator character to the result.
4143  __ sb(separator, MemOperand(result_pos));
4144  __ Addu(result_pos, result_pos, 1);
4145 
4146  // Copy next array element to the result.
4147  __ bind(&one_char_separator_loop_entry);
4148  __ lw(string, MemOperand(element));
4149  __ Addu(element, element, kPointerSize);
4150  __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4151  __ SmiUntag(string_length);
4152  __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4153  __ CopyBytes(string, result_pos, string_length, scratch1);
4154  // End while (element < elements_end).
4155  __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4156  ASSERT(result.is(v0));
4157  __ Branch(&done);
4158 
4159  // Long separator case (separator is more than one character). Entry is at the
4160  // label long_separator below.
4161  __ bind(&long_separator_loop);
4162  // Live values in registers:
4163  // result_pos: the position to which we are currently copying characters.
4164  // element: Current array element.
4165  // elements_end: Array end.
4166  // separator: Separator string.
4167 
4168  // Copy the separator to the result.
4169  __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset));
4170  __ SmiUntag(string_length);
4171  __ Addu(string,
4172  separator,
4174  __ CopyBytes(string, result_pos, string_length, scratch1);
4175 
4176  __ bind(&long_separator);
4177  __ lw(string, MemOperand(element));
4178  __ Addu(element, element, kPointerSize);
4179  __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4180  __ SmiUntag(string_length);
4181  __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4182  __ CopyBytes(string, result_pos, string_length, scratch1);
4183  // End while (element < elements_end).
4184  __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4185  ASSERT(result.is(v0));
4186  __ Branch(&done);
4187 
4188  __ bind(&bailout);
4189  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4190  __ bind(&done);
4191  context()->Plug(v0);
4192 }
4193 
4194 
4195 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4196  if (expr->function() != NULL &&
4197  expr->function()->intrinsic_type == Runtime::INLINE) {
4198  Comment cmnt(masm_, "[ InlineRuntimeCall");
4199  EmitInlineRuntimeCall(expr);
4200  return;
4201  }
4202 
4203  Comment cmnt(masm_, "[ CallRuntime");
4204  ZoneList<Expression*>* args = expr->arguments();
4205  int arg_count = args->length();
4206 
4207  if (expr->is_jsruntime()) {
4208  // Push the builtins object as the receiver.
4209  __ lw(a0, GlobalObjectOperand());
4211  __ push(a0);
4212  // Load the function from the receiver.
4213  __ li(a2, Operand(expr->name()));
4214  CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4215 
4216  // Push the target function under the receiver.
4217  __ lw(at, MemOperand(sp, 0));
4218  __ push(at);
4219  __ sw(v0, MemOperand(sp, kPointerSize));
4220 
4221  // Push the arguments ("left-to-right").
4222  int arg_count = args->length();
4223  for (int i = 0; i < arg_count; i++) {
4224  VisitForStackValue(args->at(i));
4225  }
4226 
4227  // Record source position of the IC call.
4228  SetSourcePosition(expr->position());
4229  CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
4230  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4231  __ CallStub(&stub);
4232 
4233  // Restore context register.
4235 
4236  context()->DropAndPlug(1, v0);
4237  } else {
4238  // Push the arguments ("left-to-right").
4239  for (int i = 0; i < arg_count; i++) {
4240  VisitForStackValue(args->at(i));
4241  }
4242 
4243  // Call the C runtime function.
4244  __ CallRuntime(expr->function(), arg_count);
4245  context()->Plug(v0);
4246  }
4247 }
4248 
4249 
4250 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4251  switch (expr->op()) {
4252  case Token::DELETE: {
4253  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4254  Property* property = expr->expression()->AsProperty();
4255  VariableProxy* proxy = expr->expression()->AsVariableProxy();
4256 
4257  if (property != NULL) {
4258  VisitForStackValue(property->obj());
4259  VisitForStackValue(property->key());
4260  __ li(a1, Operand(Smi::FromInt(strict_mode())));
4261  __ push(a1);
4262  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4263  context()->Plug(v0);
4264  } else if (proxy != NULL) {
4265  Variable* var = proxy->var();
4266  // Delete of an unqualified identifier is disallowed in strict mode
4267  // but "delete this" is allowed.
4268  ASSERT(strict_mode() == SLOPPY || var->is_this());
4269  if (var->IsUnallocated()) {
4270  __ lw(a2, GlobalObjectOperand());
4271  __ li(a1, Operand(var->name()));
4272  __ li(a0, Operand(Smi::FromInt(SLOPPY)));
4273  __ Push(a2, a1, a0);
4274  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4275  context()->Plug(v0);
4276  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4277  // Result of deleting non-global, non-dynamic variables is false.
4278  // The subexpression does not have side effects.
4279  context()->Plug(var->is_this());
4280  } else {
4281  // Non-global variable. Call the runtime to try to delete from the
4282  // context where the variable was introduced.
4283  ASSERT(!context_register().is(a2));
4284  __ li(a2, Operand(var->name()));
4285  __ Push(context_register(), a2);
4286  __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
4287  context()->Plug(v0);
4288  }
4289  } else {
4290  // Result of deleting non-property, non-variable reference is true.
4291  // The subexpression may have side effects.
4292  VisitForEffect(expr->expression());
4293  context()->Plug(true);
4294  }
4295  break;
4296  }
4297 
4298  case Token::VOID: {
4299  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4300  VisitForEffect(expr->expression());
4301  context()->Plug(Heap::kUndefinedValueRootIndex);
4302  break;
4303  }
4304 
4305  case Token::NOT: {
4306  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4307  if (context()->IsEffect()) {
4308  // Unary NOT has no side effects so it's only necessary to visit the
4309  // subexpression. Match the optimizing compiler by not branching.
4310  VisitForEffect(expr->expression());
4311  } else if (context()->IsTest()) {
4312  const TestContext* test = TestContext::cast(context());
4313  // The labels are swapped for the recursive call.
4314  VisitForControl(expr->expression(),
4315  test->false_label(),
4316  test->true_label(),
4317  test->fall_through());
4318  context()->Plug(test->true_label(), test->false_label());
4319  } else {
4320  // We handle value contexts explicitly rather than simply visiting
4321  // for control and plugging the control flow into the context,
4322  // because we need to prepare a pair of extra administrative AST ids
4323  // for the optimizing compiler.
4324  ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4325  Label materialize_true, materialize_false, done;
4326  VisitForControl(expr->expression(),
4327  &materialize_false,
4328  &materialize_true,
4329  &materialize_true);
4330  __ bind(&materialize_true);
4331  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4332  __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4333  if (context()->IsStackValue()) __ push(v0);
4334  __ jmp(&done);
4335  __ bind(&materialize_false);
4336  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4337  __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4338  if (context()->IsStackValue()) __ push(v0);
4339  __ bind(&done);
4340  }
4341  break;
4342  }
4343 
4344  case Token::TYPEOF: {
4345  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4346  { StackValueContext context(this);
4347  VisitForTypeofValue(expr->expression());
4348  }
4349  __ CallRuntime(Runtime::kTypeof, 1);
4350  context()->Plug(v0);
4351  break;
4352  }
4353 
4354  default:
4355  UNREACHABLE();
4356  }
4357 }
4358 
4359 
4360 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4361  ASSERT(expr->expression()->IsValidLeftHandSide());
4362 
4363  Comment cmnt(masm_, "[ CountOperation");
4364  SetSourcePosition(expr->position());
4365 
4366  // Expression can only be a property, a global or a (parameter or local)
4367  // slot.
4368  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4369  LhsKind assign_type = VARIABLE;
4370  Property* prop = expr->expression()->AsProperty();
4371  // In case of a property we use the uninitialized expression context
4372  // of the key to detect a named property.
4373  if (prop != NULL) {
4374  assign_type =
4375  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4376  }
4377 
4378  // Evaluate expression and get value.
4379  if (assign_type == VARIABLE) {
4380  ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4381  AccumulatorValueContext context(this);
4382  EmitVariableLoad(expr->expression()->AsVariableProxy());
4383  } else {
4384  // Reserve space for result of postfix operation.
4385  if (expr->is_postfix() && !context()->IsEffect()) {
4386  __ li(at, Operand(Smi::FromInt(0)));
4387  __ push(at);
4388  }
4389  if (assign_type == NAMED_PROPERTY) {
4390  // Put the object both on the stack and in the accumulator.
4391  VisitForAccumulatorValue(prop->obj());
4392  __ push(v0);
4393  EmitNamedPropertyLoad(prop);
4394  } else {
4395  VisitForStackValue(prop->obj());
4396  VisitForAccumulatorValue(prop->key());
4397  __ lw(a1, MemOperand(sp, 0));
4398  __ push(v0);
4399  EmitKeyedPropertyLoad(prop);
4400  }
4401  }
4402 
4403  // We need a second deoptimization point after loading the value
4404  // in case evaluating the property load my have a side effect.
4405  if (assign_type == VARIABLE) {
4406  PrepareForBailout(expr->expression(), TOS_REG);
4407  } else {
4408  PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4409  }
4410 
4411  // Inline smi case if we are in a loop.
4412  Label stub_call, done;
4413  JumpPatchSite patch_site(masm_);
4414 
4415  int count_value = expr->op() == Token::INC ? 1 : -1;
4416  __ mov(a0, v0);
4417  if (ShouldInlineSmiCase(expr->op())) {
4418  Label slow;
4419  patch_site.EmitJumpIfNotSmi(v0, &slow);
4420 
4421  // Save result for postfix expressions.
4422  if (expr->is_postfix()) {
4423  if (!context()->IsEffect()) {
4424  // Save the result on the stack. If we have a named or keyed property
4425  // we store the result under the receiver that is currently on top
4426  // of the stack.
4427  switch (assign_type) {
4428  case VARIABLE:
4429  __ push(v0);
4430  break;
4431  case NAMED_PROPERTY:
4432  __ sw(v0, MemOperand(sp, kPointerSize));
4433  break;
4434  case KEYED_PROPERTY:
4435  __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4436  break;
4437  }
4438  }
4439  }
4440 
4441  Register scratch1 = a1;
4442  Register scratch2 = t0;
4443  __ li(scratch1, Operand(Smi::FromInt(count_value)));
4444  __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4445  __ BranchOnNoOverflow(&done, scratch2);
4446  // Call stub. Undo operation first.
4447  __ Move(v0, a0);
4448  __ jmp(&stub_call);
4449  __ bind(&slow);
4450  }
4451  ToNumberStub convert_stub;
4452  __ CallStub(&convert_stub);
4453 
4454  // Save result for postfix expressions.
4455  if (expr->is_postfix()) {
4456  if (!context()->IsEffect()) {
4457  // Save the result on the stack. If we have a named or keyed property
4458  // we store the result under the receiver that is currently on top
4459  // of the stack.
4460  switch (assign_type) {
4461  case VARIABLE:
4462  __ push(v0);
4463  break;
4464  case NAMED_PROPERTY:
4465  __ sw(v0, MemOperand(sp, kPointerSize));
4466  break;
4467  case KEYED_PROPERTY:
4468  __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4469  break;
4470  }
4471  }
4472  }
4473 
4474  __ bind(&stub_call);
4475  __ mov(a1, v0);
4476  __ li(a0, Operand(Smi::FromInt(count_value)));
4477 
4478  // Record position before stub call.
4479  SetSourcePosition(expr->position());
4480 
4481  BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
4482  CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4483  patch_site.EmitPatchInfo();
4484  __ bind(&done);
4485 
4486  // Store the value returned in v0.
4487  switch (assign_type) {
4488  case VARIABLE:
4489  if (expr->is_postfix()) {
4490  { EffectContext context(this);
4491  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4492  Token::ASSIGN);
4493  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4494  context.Plug(v0);
4495  }
4496  // For all contexts except EffectConstant we have the result on
4497  // top of the stack.
4498  if (!context()->IsEffect()) {
4499  context()->PlugTOS();
4500  }
4501  } else {
4502  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4503  Token::ASSIGN);
4504  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4505  context()->Plug(v0);
4506  }
4507  break;
4508  case NAMED_PROPERTY: {
4509  __ mov(a0, result_register()); // Value.
4510  __ li(a2, Operand(prop->key()->AsLiteral()->value())); // Name.
4511  __ pop(a1); // Receiver.
4512  CallStoreIC(expr->CountStoreFeedbackId());
4513  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4514  if (expr->is_postfix()) {
4515  if (!context()->IsEffect()) {
4516  context()->PlugTOS();
4517  }
4518  } else {
4519  context()->Plug(v0);
4520  }
4521  break;
4522  }
4523  case KEYED_PROPERTY: {
4524  __ mov(a0, result_register()); // Value.
4525  __ Pop(a2, a1); // a1 = key, a2 = receiver.
4526  Handle<Code> ic = strict_mode() == SLOPPY
4527  ? isolate()->builtins()->KeyedStoreIC_Initialize()
4528  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4529  CallIC(ic, expr->CountStoreFeedbackId());
4530  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4531  if (expr->is_postfix()) {
4532  if (!context()->IsEffect()) {
4533  context()->PlugTOS();
4534  }
4535  } else {
4536  context()->Plug(v0);
4537  }
4538  break;
4539  }
4540  }
4541 }
4542 
4543 
4544 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4545  ASSERT(!context()->IsEffect());
4546  ASSERT(!context()->IsTest());
4547  VariableProxy* proxy = expr->AsVariableProxy();
4548  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4549  Comment cmnt(masm_, "[ Global variable");
4550  __ lw(a0, GlobalObjectOperand());
4551  __ li(a2, Operand(proxy->name()));
4552  // Use a regular load, not a contextual load, to avoid a reference
4553  // error.
4554  CallLoadIC(NOT_CONTEXTUAL);
4555  PrepareForBailout(expr, TOS_REG);
4556  context()->Plug(v0);
4557  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4558  Comment cmnt(masm_, "[ Lookup slot");
4559  Label done, slow;
4560 
4561  // Generate code for loading from variables potentially shadowed
4562  // by eval-introduced variables.
4563  EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4564 
4565  __ bind(&slow);
4566  __ li(a0, Operand(proxy->name()));
4567  __ Push(cp, a0);
4568  __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4569  PrepareForBailout(expr, TOS_REG);
4570  __ bind(&done);
4571 
4572  context()->Plug(v0);
4573  } else {
4574  // This expression cannot throw a reference error at the top level.
4575  VisitInDuplicateContext(expr);
4576  }
4577 }
4578 
4579 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4580  Expression* sub_expr,
4581  Handle<String> check) {
4582  Label materialize_true, materialize_false;
4583  Label* if_true = NULL;
4584  Label* if_false = NULL;
4585  Label* fall_through = NULL;
4586  context()->PrepareTest(&materialize_true, &materialize_false,
4587  &if_true, &if_false, &fall_through);
4588 
4589  { AccumulatorValueContext context(this);
4590  VisitForTypeofValue(sub_expr);
4591  }
4592  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4593 
4594  if (check->Equals(isolate()->heap()->number_string())) {
4595  __ JumpIfSmi(v0, if_true);
4597  __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4598  Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4599  } else if (check->Equals(isolate()->heap()->string_string())) {
4600  __ JumpIfSmi(v0, if_false);
4601  // Check for undetectable objects => false.
4602  __ GetObjectType(v0, v0, a1);
4603  __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
4604  __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4605  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4606  Split(eq, a1, Operand(zero_reg),
4607  if_true, if_false, fall_through);
4608  } else if (check->Equals(isolate()->heap()->symbol_string())) {
4609  __ JumpIfSmi(v0, if_false);
4610  __ GetObjectType(v0, v0, a1);
4611  Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
4612  } else if (check->Equals(isolate()->heap()->boolean_string())) {
4613  __ LoadRoot(at, Heap::kTrueValueRootIndex);
4614  __ Branch(if_true, eq, v0, Operand(at));
4615  __ LoadRoot(at, Heap::kFalseValueRootIndex);
4616  Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4617  } else if (FLAG_harmony_typeof &&
4618  check->Equals(isolate()->heap()->null_string())) {
4619  __ LoadRoot(at, Heap::kNullValueRootIndex);
4620  Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4621  } else if (check->Equals(isolate()->heap()->undefined_string())) {
4622  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4623  __ Branch(if_true, eq, v0, Operand(at));
4624  __ JumpIfSmi(v0, if_false);
4625  // Check for undetectable objects => true.
4627  __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4628  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4629  Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4630  } else if (check->Equals(isolate()->heap()->function_string())) {
4631  __ JumpIfSmi(v0, if_false);
4633  __ GetObjectType(v0, v0, a1);
4634  __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
4635  Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
4636  if_true, if_false, fall_through);
4637  } else if (check->Equals(isolate()->heap()->object_string())) {
4638  __ JumpIfSmi(v0, if_false);
4639  if (!FLAG_harmony_typeof) {
4640  __ LoadRoot(at, Heap::kNullValueRootIndex);
4641  __ Branch(if_true, eq, v0, Operand(at));
4642  }
4643  // Check for JS objects => true.
4644  __ GetObjectType(v0, v0, a1);
4645  __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
4647  __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
4648  // Check for undetectable objects => false.
4649  __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4650  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4651  Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4652  } else {
4653  if (if_false != fall_through) __ jmp(if_false);
4654  }
4655  context()->Plug(if_true, if_false);
4656 }
4657 
4658 
4659 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4660  Comment cmnt(masm_, "[ CompareOperation");
4661  SetSourcePosition(expr->position());
4662 
4663  // First we try a fast inlined version of the compare when one of
4664  // the operands is a literal.
4665  if (TryLiteralCompare(expr)) return;
4666 
4667  // Always perform the comparison for its control flow. Pack the result
4668  // into the expression's context after the comparison is performed.
4669  Label materialize_true, materialize_false;
4670  Label* if_true = NULL;
4671  Label* if_false = NULL;
4672  Label* fall_through = NULL;
4673  context()->PrepareTest(&materialize_true, &materialize_false,
4674  &if_true, &if_false, &fall_through);
4675 
4676  Token::Value op = expr->op();
4677  VisitForStackValue(expr->left());
4678  switch (op) {
4679  case Token::IN:
4680  VisitForStackValue(expr->right());
4681  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4682  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4683  __ LoadRoot(t0, Heap::kTrueValueRootIndex);
4684  Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
4685  break;
4686 
4687  case Token::INSTANCEOF: {
4688  VisitForStackValue(expr->right());
4689  InstanceofStub stub(InstanceofStub::kNoFlags);
4690  __ CallStub(&stub);
4691  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4692  // The stub returns 0 for true.
4693  Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
4694  break;
4695  }
4696 
4697  default: {
4698  VisitForAccumulatorValue(expr->right());
4700  __ mov(a0, result_register());
4701  __ pop(a1);
4702 
4703  bool inline_smi_code = ShouldInlineSmiCase(op);
4704  JumpPatchSite patch_site(masm_);
4705  if (inline_smi_code) {
4706  Label slow_case;
4707  __ Or(a2, a0, Operand(a1));
4708  patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4709  Split(cc, a1, Operand(a0), if_true, if_false, NULL);
4710  __ bind(&slow_case);
4711  }
4712  // Record position and call the compare IC.
4713  SetSourcePosition(expr->position());
4714  Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4715  CallIC(ic, expr->CompareOperationFeedbackId());
4716  patch_site.EmitPatchInfo();
4717  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4718  Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4719  }
4720  }
4721 
4722  // Convert the result of the comparison into one expected for this
4723  // expression's context.
4724  context()->Plug(if_true, if_false);
4725 }
4726 
4727 
4728 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4729  Expression* sub_expr,
4730  NilValue nil) {
4731  Label materialize_true, materialize_false;
4732  Label* if_true = NULL;
4733  Label* if_false = NULL;
4734  Label* fall_through = NULL;
4735  context()->PrepareTest(&materialize_true, &materialize_false,
4736  &if_true, &if_false, &fall_through);
4737 
4738  VisitForAccumulatorValue(sub_expr);
4739  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4740  __ mov(a0, result_register());
4741  if (expr->op() == Token::EQ_STRICT) {
4742  Heap::RootListIndex nil_value = nil == kNullValue ?
4743  Heap::kNullValueRootIndex :
4744  Heap::kUndefinedValueRootIndex;
4745  __ LoadRoot(a1, nil_value);
4746  Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
4747  } else {
4748  Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4749  CallIC(ic, expr->CompareOperationFeedbackId());
4750  Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
4751  }
4752  context()->Plug(if_true, if_false);
4753 }
4754 
4755 
4756 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4758  context()->Plug(v0);
4759 }
4760 
4761 
4762 Register FullCodeGenerator::result_register() {
4763  return v0;
4764 }
4765 
4766 
4767 Register FullCodeGenerator::context_register() {
4768  return cp;
4769 }
4770 
4771 
4772 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4773  ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4774  __ sw(value, MemOperand(fp, frame_offset));
4775 }
4776 
4777 
4778 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4779  __ lw(dst, ContextOperand(cp, context_index));
4780 }
4781 
4782 
4783 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4784  Scope* declaration_scope = scope()->DeclarationScope();
4785  if (declaration_scope->is_global_scope() ||
4786  declaration_scope->is_module_scope()) {
4787  // Contexts nested in the native context have a canonical empty function
4788  // as their closure, not the anonymous closure containing the global
4789  // code. Pass a smi sentinel and let the runtime look up the empty
4790  // function.
4791  __ li(at, Operand(Smi::FromInt(0)));
4792  } else if (declaration_scope->is_eval_scope()) {
4793  // Contexts created by a call to eval have the same closure as the
4794  // context calling eval, not the anonymous closure containing the eval
4795  // code. Fetch it from the context.
4797  } else {
4798  ASSERT(declaration_scope->is_function_scope());
4800  }
4801  __ push(at);
4802 }
4803 
4804 
4805 // ----------------------------------------------------------------------------
4806 // Non-local control flow support.
4807 
4808 void FullCodeGenerator::EnterFinallyBlock() {
4809  ASSERT(!result_register().is(a1));
4810  // Store result register while executing finally block.
4811  __ push(result_register());
4812  // Cook return address in link register to stack (smi encoded Code* delta).
4813  __ Subu(a1, ra, Operand(masm_->CodeObject()));
4815  STATIC_ASSERT(0 == kSmiTag);
4816  __ Addu(a1, a1, Operand(a1)); // Convert to smi.
4817 
4818  // Store result register while executing finally block.
4819  __ push(a1);
4820 
4821  // Store pending message while executing finally block.
4822  ExternalReference pending_message_obj =
4823  ExternalReference::address_of_pending_message_obj(isolate());
4824  __ li(at, Operand(pending_message_obj));
4825  __ lw(a1, MemOperand(at));
4826  __ push(a1);
4827 
4828  ExternalReference has_pending_message =
4829  ExternalReference::address_of_has_pending_message(isolate());
4830  __ li(at, Operand(has_pending_message));
4831  __ lw(a1, MemOperand(at));
4832  __ SmiTag(a1);
4833  __ push(a1);
4834 
4835  ExternalReference pending_message_script =
4836  ExternalReference::address_of_pending_message_script(isolate());
4837  __ li(at, Operand(pending_message_script));
4838  __ lw(a1, MemOperand(at));
4839  __ push(a1);
4840 }
4841 
4842 
4843 void FullCodeGenerator::ExitFinallyBlock() {
4844  ASSERT(!result_register().is(a1));
4845  // Restore pending message from stack.
4846  __ pop(a1);
4847  ExternalReference pending_message_script =
4848  ExternalReference::address_of_pending_message_script(isolate());
4849  __ li(at, Operand(pending_message_script));
4850  __ sw(a1, MemOperand(at));
4851 
4852  __ pop(a1);
4853  __ SmiUntag(a1);
4854  ExternalReference has_pending_message =
4855  ExternalReference::address_of_has_pending_message(isolate());
4856  __ li(at, Operand(has_pending_message));
4857  __ sw(a1, MemOperand(at));
4858 
4859  __ pop(a1);
4860  ExternalReference pending_message_obj =
4861  ExternalReference::address_of_pending_message_obj(isolate());
4862  __ li(at, Operand(pending_message_obj));
4863  __ sw(a1, MemOperand(at));
4864 
4865  // Restore result register from stack.
4866  __ pop(a1);
4867 
4868  // Uncook return address and return.
4869  __ pop(result_register());
4871  __ sra(a1, a1, 1); // Un-smi-tag value.
4872  __ Addu(at, a1, Operand(masm_->CodeObject()));
4873  __ Jump(at);
4874 }
4875 
4876 
4877 #undef __
4878 
4879 #define __ ACCESS_MASM(masm())
4880 
4881 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4882  int* stack_depth,
4883  int* context_length) {
4884  // The macros used here must preserve the result register.
4885 
4886  // Because the handler block contains the context of the finally
4887  // code, we can restore it directly from there for the finally code
4888  // rather than iteratively unwinding contexts via their previous
4889  // links.
4890  __ Drop(*stack_depth); // Down to the handler block.
4891  if (*context_length > 0) {
4892  // Restore the context to its dedicated register and the stack.
4895  }
4896  __ PopTryHandler();
4897  __ Call(finally_entry_);
4898 
4899  *stack_depth = 0;
4900  *context_length = 0;
4901  return previous_;
4902 }
4903 
4904 
4905 #undef __
4906 
4907 
4908 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4909  Address pc,
4910  BackEdgeState target_state,
4911  Code* replacement_code) {
4912  static const int kInstrSize = Assembler::kInstrSize;
4913  Address branch_address = pc - 6 * kInstrSize;
4914  CodePatcher patcher(branch_address, 1);
4915 
4916  switch (target_state) {
4917  case INTERRUPT:
4918  // slt at, a3, zero_reg (in case of count based interrupts)
4919  // beq at, zero_reg, ok
4920  // lui t9, <interrupt stub address> upper
4921  // ori t9, <interrupt stub address> lower
4922  // jalr t9
4923  // nop
4924  // ok-label ----- pc_after points here
4925  patcher.masm()->slt(at, a3, zero_reg);
4926  break;
4927  case ON_STACK_REPLACEMENT:
4928  case OSR_AFTER_STACK_CHECK:
4929  // addiu at, zero_reg, 1
4930  // beq at, zero_reg, ok ;; Not changed
4931  // lui t9, <on-stack replacement address> upper
4932  // ori t9, <on-stack replacement address> lower
4933  // jalr t9 ;; Not changed
4934  // nop ;; Not changed
4935  // ok-label ----- pc_after points here
4936  patcher.masm()->addiu(at, zero_reg, 1);
4937  break;
4938  }
4939  Address pc_immediate_load_address = pc - 4 * kInstrSize;
4940  // Replace the stack check address in the load-immediate (lui/ori pair)
4941  // with the entry address of the replacement code.
4942  Assembler::set_target_address_at(pc_immediate_load_address,
4943  replacement_code->entry());
4944 
4945  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4946  unoptimized_code, pc_immediate_load_address, replacement_code);
4947 }
4948 
4949 
4951  Isolate* isolate,
4952  Code* unoptimized_code,
4953  Address pc) {
4954  static const int kInstrSize = Assembler::kInstrSize;
4955  Address branch_address = pc - 6 * kInstrSize;
4956  Address pc_immediate_load_address = pc - 4 * kInstrSize;
4957 
4958  ASSERT(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize)));
4959  if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
4960  ASSERT(reinterpret_cast<uint32_t>(
4961  Assembler::target_address_at(pc_immediate_load_address)) ==
4962  reinterpret_cast<uint32_t>(
4963  isolate->builtins()->InterruptCheck()->entry()));
4964  return INTERRUPT;
4965  }
4966 
4968 
4969  if (reinterpret_cast<uint32_t>(
4970  Assembler::target_address_at(pc_immediate_load_address)) ==
4971  reinterpret_cast<uint32_t>(
4972  isolate->builtins()->OnStackReplacement()->entry())) {
4973  return ON_STACK_REPLACEMENT;
4974  }
4975 
4976  ASSERT(reinterpret_cast<uint32_t>(
4977  Assembler::target_address_at(pc_immediate_load_address)) ==
4978  reinterpret_cast<uint32_t>(
4979  isolate->builtins()->OsrAfterStackCheck()->entry()));
4980  return OSR_AFTER_STACK_CHECK;
4981 }
4982 
4983 
4984 } } // namespace v8::internal
4985 
4986 #endif // V8_TARGET_ARCH_MIPS
static const int kFunctionOffset
Definition: objects.h:7324
byte * Address
Definition: globals.h:186
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
static const int kHashFieldOffset
Definition: objects.h:8629
static const int kBitFieldOffset
Definition: objects.h:6461
Scope * DeclarationScope()
Definition: scopes.cc:743
const intptr_t kSmiTagMask
Definition: v8.h:5480
static const int kForInFastCaseMarker
Definition: objects.h:8230
VariableDeclaration * function() const
Definition: scopes.h:326
static const int kCodeEntryOffset
Definition: objects.h:7518
static const int kValueOffset
Definition: objects.h:9547
static int SlotOffset(int index)
Definition: contexts.h:498
static const int kBuiltinsOffset
Definition: objects.h:7610
static bool IsAddImmediate(Instr instr)
static Handle< Code > GetUninitialized(Isolate *isolate)
Definition: code-stubs.h:2385
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
static const int kEnumCacheOffset
Definition: objects.h:3499
static String * cast(Object *obj)
const uint32_t kTwoByteStringTag
Definition: objects.h:610
const Register cp
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
static const int kResultValuePropertyOffset
Definition: objects.h:7342
bool IsFastObjectElementsKind(ElementsKind kind)
static TypeFeedbackId None()
Definition: utils.h:1149
static Handle< Code > GetUninitialized(Isolate *isolate, Token::Value op)
Definition: ic.cc:2489
static const int kGlobalReceiverOffset
Definition: objects.h:7613
T Max(T a, T b)
Definition: utils.h:227
Scope * outer_scope() const
Definition: scopes.h:350
static const int kGeneratorClosed
Definition: objects.h:7321
int int32_t
Definition: unicode.cc:47
static const unsigned int kContainsCachedArrayIndexMask
Definition: objects.h:8673
static const int kForInSlowCaseMarker
Definition: objects.h:8231
static bool enabled()
Definition: serialize.h:485
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
static const int kSize
Definition: objects.h:7922
static const int kResultDonePropertyOffset
Definition: objects.h:7343
#define ASSERT(condition)
Definition: checks.h:329
static const int kContextOffset
Definition: frames.h:185
const int kPointerSizeLog2
Definition: globals.h:281
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:121
static const int kInObjectFieldCount
Definition: objects.h:7976
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3090
#define POINTER_SIZE_ALIGN(value)
Definition: v8globals.h:390
static const int kReceiverOffset
Definition: objects.h:7326
MemOperand GlobalObjectOperand()
static const int kCallerFPOffset
Definition: frames.h:188
static const int kInstanceClassNameOffset
Definition: objects.h:7107
bool IsOptimizable() const
Definition: compiler.h:232
Variable * parameter(int index) const
Definition: scopes.h:333
PropertyAttributes
MemOperand ContextOperand(Register context, int index)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
Definition: scopes.cc:721
kInstanceClassNameOffset flag
Definition: objects-inl.h:5115
#define IN
const Register sp
static const int kLiteralsOffset
Definition: objects.h:7524
#define UNREACHABLE()
Definition: checks.h:52
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static Condition ComputeCondition(Token::Value op)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
static const int kLengthOffset
Definition: objects.h:8905
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Definition: flags.cc:665
Variable * arguments() const
Definition: scopes.h:341
static const int kFirstOffset
Definition: objects.h:3500
NilValue
Definition: v8.h:133
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1278
static BailoutId Declarations()
Definition: utils.h:1166
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
const int kPointerSize
Definition: globals.h:268
void check(i::Vector< const uint8_t > string)
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:6478
static void MaybeCallEntryHook(MacroAssembler *masm)
const int kHeapObjectTag
Definition: v8.h:5473
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define __
static const int kCallerSPOffset
Definition: frames.h:190
static const int kCacheStampOffset
Definition: objects.h:7787
const Register pc
static const int kDescriptorSize
Definition: objects.h:3509
static const int kPropertiesOffset
Definition: objects.h:2755
static Register from_code(int code)
int num_parameters() const
Definition: scopes.h:338
static const int kMarkerOffset
Definition: frames.h:184
static const int kExpressionsOffset
Definition: frames.h:183
static const int kHeaderSize
Definition: objects.h:9042
static const int kElementsOffset
Definition: objects.h:2756
static BailoutId FunctionEntry()
Definition: utils.h:1165
#define BASE_EMBEDDED
Definition: allocation.h:68
OverwriteMode
Definition: ic.h:690
friend class BlockTrampolinePoolScope
bool IsDeclaredVariableMode(VariableMode mode)
Definition: v8globals.h:503
Vector< const char > CStrVector(const char *data)
Definition: utils.h:574
static int OffsetOfElementAt(int index)
Definition: objects.h:3070
static const int kLengthOffset
Definition: objects.h:10076
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
Definition: objects.h:3016
Scope * GlobalScope()
Definition: scopes.cc:734
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
Definition: code-stubs.h:1406
static const int kContextOffset
Definition: objects.h:7325
static const int kMapOffset
Definition: objects.h:1890
static const int kValueOffset
Definition: objects.h:7779
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:3503
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:545
static const int kLengthOffset
Definition: objects.h:3015
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:6675
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
MemOperand FieldMemOperand(Register object, int offset)
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:206
static const int kContextOffset
Definition: frames.h:97
static const int kFormalParameterCountOffset
Definition: objects.h:7156
const int kSmiShiftSize
Definition: v8.h:5539
const int kSmiTagSize
Definition: v8.h:5479
void CopyBytes(uint8_t *target, uint8_t *source)
Definition: runtime.cc:1309
static const int kGeneratorExecuting
Definition: objects.h:7320
Condition NegateCondition(Condition cond)
static bool ShouldGenerateLog(Isolate *isolate, Expression *type)
Definition: codegen.cc:191
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
static const int kContinuationOffset
Definition: objects.h:7327
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:996
static const int kJSReturnSequenceInstructions
static const int kConstructorOffset
Definition: objects.h:6428
const uint32_t kOneByteStringTag
Definition: objects.h:611
const int kSmiTag
Definition: v8.h:5478
#define ASSERT_NE(v1, v2)
Definition: checks.h:331
static const int kIsUndetectable
Definition: objects.h:6472
static bool IsBeq(Instr instr)
static const int kInstrSize
static const int kPrototypeOffset
Definition: objects.h:6427
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target)
const Register no_reg
static const int kValueOffset
Definition: objects.h:7701
bool IsImmutableVariableMode(VariableMode mode)
Definition: v8globals.h:513
const Register fp
static const int kNativeContextOffset
Definition: objects.h:7611
void AddNoFrameRange(int from, int to)
Definition: compiler.h:296
T Min(T a, T b)
Definition: utils.h:234
static const int kSharedFunctionInfoOffset
Definition: objects.h:7521
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:2121
static const int kBitField2Offset
Definition: objects.h:6462
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
#define VOID
static const int kExponentOffset
Definition: objects.h:1977
static const int kInstanceTypeOffset
Definition: objects.h:6459
static const int kOperandStackOffset
Definition: objects.h:7328
static const int kMantissaOffset
Definition: objects.h:1976
TypeofState
Definition: codegen.h:69
Scope * scope() const
Definition: compiler.h:78