v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
full-codegen-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_X64)
31 
32 #include "code-stubs.h"
33 #include "codegen.h"
34 #include "compiler.h"
35 #include "debug.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
38 #include "parser.h"
39 #include "scopes.h"
40 #include "stub-cache.h"
41 
42 namespace v8 {
43 namespace internal {
44 
45 #define __ ACCESS_MASM(masm_)
46 
47 
48 class JumpPatchSite BASE_EMBEDDED {
49  public:
50  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
51 #ifdef DEBUG
52  info_emitted_ = false;
53 #endif
54  }
55 
56  ~JumpPatchSite() {
57  ASSERT(patch_site_.is_bound() == info_emitted_);
58  }
59 
60  void EmitJumpIfNotSmi(Register reg,
61  Label* target,
62  Label::Distance near_jump = Label::kFar) {
63  __ testb(reg, Immediate(kSmiTagMask));
64  EmitJump(not_carry, target, near_jump); // Always taken before patched.
65  }
66 
67  void EmitJumpIfSmi(Register reg,
68  Label* target,
69  Label::Distance near_jump = Label::kFar) {
70  __ testb(reg, Immediate(kSmiTagMask));
71  EmitJump(carry, target, near_jump); // Never taken before patched.
72  }
73 
74  void EmitPatchInfo() {
75  if (patch_site_.is_bound()) {
76  int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
77  ASSERT(is_int8(delta_to_patch_site));
78  __ testl(rax, Immediate(delta_to_patch_site));
79 #ifdef DEBUG
80  info_emitted_ = true;
81 #endif
82  } else {
83  __ nop(); // Signals no inlined code.
84  }
85  }
86 
87  private:
88  // jc will be patched with jz, jnc will become jnz.
89  void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
90  ASSERT(!patch_site_.is_bound() && !info_emitted_);
91  ASSERT(cc == carry || cc == not_carry);
92  __ bind(&patch_site_);
93  __ j(cc, target, near_jump);
94  }
95 
96  MacroAssembler* masm_;
97  Label patch_site_;
98 #ifdef DEBUG
99  bool info_emitted_;
100 #endif
101 };
102 
103 
104 // Generate code for a JS function. On entry to the function the receiver
105 // and arguments have been pushed on the stack left to right, with the
106 // return address on top of them. The actual argument count matches the
107 // formal parameter count expected by the function.
108 //
109 // The live registers are:
110 // o rdi: the JS function object being called (i.e. ourselves)
111 // o rsi: our context
112 // o rbp: our caller's frame pointer
113 // o rsp: stack pointer (pointing to return address)
114 //
115 // The function builds a JS frame. Please see JavaScriptFrameConstants in
116 // frames-x64.h for its layout.
117 void FullCodeGenerator::Generate() {
118  CompilationInfo* info = info_;
119  handler_table_ =
120  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
121  profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
122  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
123  SetFunctionPosition(function());
124  Comment cmnt(masm_, "[ function compiled by full code generator");
125 
127 
128 #ifdef DEBUG
129  if (strlen(FLAG_stop_at) > 0 &&
130  info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
131  __ int3();
132  }
133 #endif
134 
135  // Strict mode functions and builtins need to replace the receiver
136  // with undefined when called as functions (without an explicit
137  // receiver object). rcx is zero for method calls and non-zero for
138  // function calls.
139  if (!info->is_classic_mode() || info->is_native()) {
140  Label ok;
141  __ testq(rcx, rcx);
142  __ j(zero, &ok, Label::kNear);
143  // +1 for return address.
144  int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
145  __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
146  __ movq(Operand(rsp, receiver_offset), kScratchRegister);
147  __ bind(&ok);
148  }
149 
150  // Open a frame scope to indicate that there is a frame on the stack. The
151  // MANUAL indicates that the scope shouldn't actually generate code to set up
152  // the frame (that is done below).
153  FrameScope frame_scope(masm_, StackFrame::MANUAL);
154 
155  __ push(rbp); // Caller's frame pointer.
156  __ movq(rbp, rsp);
157  __ push(rsi); // Callee's context.
158  __ push(rdi); // Callee's JS Function.
159 
160  { Comment cmnt(masm_, "[ Allocate locals");
161  int locals_count = info->scope()->num_stack_slots();
162  if (locals_count == 1) {
163  __ PushRoot(Heap::kUndefinedValueRootIndex);
164  } else if (locals_count > 1) {
165  __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
166  for (int i = 0; i < locals_count; i++) {
167  __ push(rdx);
168  }
169  }
170  }
171 
172  bool function_in_register = true;
173 
174  // Possibly allocate a local context.
175  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
176  if (heap_slots > 0) {
177  Comment cmnt(masm_, "[ Allocate context");
178  // Argument to NewContext is the function, which is still in rdi.
179  __ push(rdi);
180  if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
181  __ Push(info->scope()->GetScopeInfo());
182  __ CallRuntime(Runtime::kNewGlobalContext, 2);
183  } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
184  FastNewContextStub stub(heap_slots);
185  __ CallStub(&stub);
186  } else {
187  __ CallRuntime(Runtime::kNewFunctionContext, 1);
188  }
189  function_in_register = false;
190  // Context is returned in both rax and rsi. It replaces the context
191  // passed to us. It's saved in the stack and kept live in rsi.
193 
194  // Copy any necessary parameters into the context.
195  int num_parameters = info->scope()->num_parameters();
196  for (int i = 0; i < num_parameters; i++) {
197  Variable* var = scope()->parameter(i);
198  if (var->IsContextSlot()) {
199  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
200  (num_parameters - 1 - i) * kPointerSize;
201  // Load parameter from stack.
202  __ movq(rax, Operand(rbp, parameter_offset));
203  // Store it in the context.
204  int context_offset = Context::SlotOffset(var->index());
205  __ movq(Operand(rsi, context_offset), rax);
206  // Update the write barrier. This clobbers rax and rbx.
207  __ RecordWriteContextSlot(
208  rsi, context_offset, rax, rbx, kDontSaveFPRegs);
209  }
210  }
211  }
212 
213  // Possibly allocate an arguments object.
214  Variable* arguments = scope()->arguments();
215  if (arguments != NULL) {
216  // Arguments object must be allocated after the context object, in
217  // case the "arguments" or ".arguments" variables are in the context.
218  Comment cmnt(masm_, "[ Allocate arguments object");
219  if (function_in_register) {
220  __ push(rdi);
221  } else {
223  }
224  // The receiver is just before the parameters on the caller's stack.
225  int num_parameters = info->scope()->num_parameters();
226  int offset = num_parameters * kPointerSize;
227  __ lea(rdx,
228  Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
229  __ push(rdx);
230  __ Push(Smi::FromInt(num_parameters));
231  // Arguments to ArgumentsAccessStub:
232  // function, receiver address, parameter count.
233  // The stub will rewrite receiver and parameter count if the previous
234  // stack frame was an arguments adapter frame.
236  if (!is_classic_mode()) {
238  } else if (function()->has_duplicate_parameters()) {
240  } else {
242  }
243  ArgumentsAccessStub stub(type);
244  __ CallStub(&stub);
245 
246  SetVar(arguments, rax, rbx, rdx);
247  }
248 
249  if (FLAG_trace) {
250  __ CallRuntime(Runtime::kTraceEnter, 0);
251  }
252 
253  // Visit the declarations and body unless there is an illegal
254  // redeclaration.
255  if (scope()->HasIllegalRedeclaration()) {
256  Comment cmnt(masm_, "[ Declarations");
257  scope()->VisitIllegalRedeclaration(this);
258 
259  } else {
260  PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
261  { Comment cmnt(masm_, "[ Declarations");
262  // For named function expressions, declare the function name as a
263  // constant.
264  if (scope()->is_function_scope() && scope()->function() != NULL) {
265  VariableDeclaration* function = scope()->function();
266  ASSERT(function->proxy()->var()->mode() == CONST ||
267  function->proxy()->var()->mode() == CONST_HARMONY);
268  ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
269  VisitVariableDeclaration(function);
270  }
271  VisitDeclarations(scope()->declarations());
272  }
273 
274  { Comment cmnt(masm_, "[ Stack check");
275  PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
276  Label ok;
277  __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
278  __ j(above_equal, &ok, Label::kNear);
279  StackCheckStub stub;
280  __ CallStub(&stub);
281  __ bind(&ok);
282  }
283 
284  { Comment cmnt(masm_, "[ Body");
285  ASSERT(loop_depth() == 0);
286  VisitStatements(function()->body());
287  ASSERT(loop_depth() == 0);
288  }
289  }
290 
291  // Always emit a 'return undefined' in case control fell off the end of
292  // the body.
293  { Comment cmnt(masm_, "[ return <undefined>;");
294  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
295  EmitReturnSequence();
296  }
297 }
298 
299 
300 void FullCodeGenerator::ClearAccumulator() {
301  __ Set(rax, 0);
302 }
303 
304 
305 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
306  __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
308  Smi::FromInt(-delta));
309 }
310 
311 
312 void FullCodeGenerator::EmitProfilingCounterReset() {
313  int reset_value = FLAG_interrupt_budget;
314  if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
315  // Self-optimization is a one-off thing; if it fails, don't try again.
316  reset_value = Smi::kMaxValue;
317  }
318  __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
319  __ movq(kScratchRegister,
320  reinterpret_cast<uint64_t>(Smi::FromInt(reset_value)),
324 }
325 
326 
327 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
328  Label* back_edge_target) {
329  Comment cmnt(masm_, "[ Stack check");
330  Label ok;
331 
332  if (FLAG_count_based_interrupts) {
333  int weight = 1;
334  if (FLAG_weighted_back_edges) {
335  ASSERT(back_edge_target->is_bound());
336  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
337  weight = Min(kMaxBackEdgeWeight,
338  Max(1, distance / kBackEdgeDistanceUnit));
339  }
340  EmitProfilingCounterDecrement(weight);
341  __ j(positive, &ok, Label::kNear);
342  InterruptStub stub;
343  __ CallStub(&stub);
344  } else {
345  __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
346  __ j(above_equal, &ok, Label::kNear);
347  StackCheckStub stub;
348  __ CallStub(&stub);
349  }
350 
351  // Record a mapping of this PC offset to the OSR id. This is used to find
352  // the AST id from the unoptimized code in order to use it as a key into
353  // the deoptimization input data found in the optimized code.
354  RecordStackCheck(stmt->OsrEntryId());
355 
356  // Loop stack checks can be patched to perform on-stack replacement. In
357  // order to decide whether or not to perform OSR we embed the loop depth
358  // in a test instruction after the call so we can extract it from the OSR
359  // builtin.
360  ASSERT(loop_depth() > 0);
361  __ testl(rax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
362 
363  if (FLAG_count_based_interrupts) {
364  EmitProfilingCounterReset();
365  }
366 
367  __ bind(&ok);
368  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
369  // Record a mapping of the OSR id to this PC. This is used if the OSR
370  // entry becomes the target of a bailout. We don't expect it to be, but
371  // we want it to work if it is.
372  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
373 }
374 
375 
376 void FullCodeGenerator::EmitReturnSequence() {
377  Comment cmnt(masm_, "[ Return sequence");
378  if (return_label_.is_bound()) {
379  __ jmp(&return_label_);
380  } else {
381  __ bind(&return_label_);
382  if (FLAG_trace) {
383  __ push(rax);
384  __ CallRuntime(Runtime::kTraceExit, 1);
385  }
386  if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
387  // Pretend that the exit is a backwards jump to the entry.
388  int weight = 1;
389  if (info_->ShouldSelfOptimize()) {
390  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
391  } else if (FLAG_weighted_back_edges) {
392  int distance = masm_->pc_offset();
393  weight = Min(kMaxBackEdgeWeight,
394  Max(1, distance / kBackEdgeDistanceUnit));
395  }
396  EmitProfilingCounterDecrement(weight);
397  Label ok;
398  __ j(positive, &ok, Label::kNear);
399  __ push(rax);
400  if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
402  __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
403  } else {
404  InterruptStub stub;
405  __ CallStub(&stub);
406  }
407  __ pop(rax);
408  EmitProfilingCounterReset();
409  __ bind(&ok);
410  }
411 #ifdef DEBUG
412  // Add a label for checking the size of the code used for returning.
413  Label check_exit_codesize;
414  masm_->bind(&check_exit_codesize);
415 #endif
416  CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
417  __ RecordJSReturn();
418  // Do not use the leave instruction here because it is too short to
419  // patch with the code required by the debugger.
420  __ movq(rsp, rbp);
421  __ pop(rbp);
422 
423  int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
424  __ Ret(arguments_bytes, rcx);
425 
426 #ifdef ENABLE_DEBUGGER_SUPPORT
427  // Add padding that will be overwritten by a debugger breakpoint. We
428  // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k"
429  // (3 + 1 + 3).
430  const int kPadding = Assembler::kJSReturnSequenceLength - 7;
431  for (int i = 0; i < kPadding; ++i) {
432  masm_->int3();
433  }
434  // Check that the size of the code used for returning is large enough
435  // for the debugger's requirements.
437  masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
438 #endif
439  }
440 }
441 
442 
443 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
444  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
445 }
446 
447 
448 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
449  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
450  codegen()->GetVar(result_register(), var);
451 }
452 
453 
454 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
455  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
456  MemOperand operand = codegen()->VarOperand(var, result_register());
457  __ push(operand);
458 }
459 
460 
461 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
462  codegen()->GetVar(result_register(), var);
463  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
464  codegen()->DoTest(this);
465 }
466 
467 
468 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
469 }
470 
471 
472 void FullCodeGenerator::AccumulatorValueContext::Plug(
473  Heap::RootListIndex index) const {
474  __ LoadRoot(result_register(), index);
475 }
476 
477 
478 void FullCodeGenerator::StackValueContext::Plug(
479  Heap::RootListIndex index) const {
480  __ PushRoot(index);
481 }
482 
483 
484 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
485  codegen()->PrepareForBailoutBeforeSplit(condition(),
486  true,
487  true_label_,
488  false_label_);
489  if (index == Heap::kUndefinedValueRootIndex ||
490  index == Heap::kNullValueRootIndex ||
491  index == Heap::kFalseValueRootIndex) {
492  if (false_label_ != fall_through_) __ jmp(false_label_);
493  } else if (index == Heap::kTrueValueRootIndex) {
494  if (true_label_ != fall_through_) __ jmp(true_label_);
495  } else {
496  __ LoadRoot(result_register(), index);
497  codegen()->DoTest(this);
498  }
499 }
500 
501 
502 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
503 }
504 
505 
506 void FullCodeGenerator::AccumulatorValueContext::Plug(
507  Handle<Object> lit) const {
508  if (lit->IsSmi()) {
509  __ SafeMove(result_register(), Smi::cast(*lit));
510  } else {
511  __ Move(result_register(), lit);
512  }
513 }
514 
515 
516 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
517  if (lit->IsSmi()) {
518  __ SafePush(Smi::cast(*lit));
519  } else {
520  __ Push(lit);
521  }
522 }
523 
524 
525 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
526  codegen()->PrepareForBailoutBeforeSplit(condition(),
527  true,
528  true_label_,
529  false_label_);
530  ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
531  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
532  if (false_label_ != fall_through_) __ jmp(false_label_);
533  } else if (lit->IsTrue() || lit->IsJSObject()) {
534  if (true_label_ != fall_through_) __ jmp(true_label_);
535  } else if (lit->IsString()) {
536  if (String::cast(*lit)->length() == 0) {
537  if (false_label_ != fall_through_) __ jmp(false_label_);
538  } else {
539  if (true_label_ != fall_through_) __ jmp(true_label_);
540  }
541  } else if (lit->IsSmi()) {
542  if (Smi::cast(*lit)->value() == 0) {
543  if (false_label_ != fall_through_) __ jmp(false_label_);
544  } else {
545  if (true_label_ != fall_through_) __ jmp(true_label_);
546  }
547  } else {
548  // For simplicity we always test the accumulator register.
549  __ Move(result_register(), lit);
550  codegen()->DoTest(this);
551  }
552 }
553 
554 
555 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
556  Register reg) const {
557  ASSERT(count > 0);
558  __ Drop(count);
559 }
560 
561 
562 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
563  int count,
564  Register reg) const {
565  ASSERT(count > 0);
566  __ Drop(count);
567  __ Move(result_register(), reg);
568 }
569 
570 
571 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
572  Register reg) const {
573  ASSERT(count > 0);
574  if (count > 1) __ Drop(count - 1);
575  __ movq(Operand(rsp, 0), reg);
576 }
577 
578 
579 void FullCodeGenerator::TestContext::DropAndPlug(int count,
580  Register reg) const {
581  ASSERT(count > 0);
582  // For simplicity we always test the accumulator register.
583  __ Drop(count);
584  __ Move(result_register(), reg);
585  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
586  codegen()->DoTest(this);
587 }
588 
589 
590 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
591  Label* materialize_false) const {
592  ASSERT(materialize_true == materialize_false);
593  __ bind(materialize_true);
594 }
595 
596 
597 void FullCodeGenerator::AccumulatorValueContext::Plug(
598  Label* materialize_true,
599  Label* materialize_false) const {
600  Label done;
601  __ bind(materialize_true);
602  __ Move(result_register(), isolate()->factory()->true_value());
603  __ jmp(&done, Label::kNear);
604  __ bind(materialize_false);
605  __ Move(result_register(), isolate()->factory()->false_value());
606  __ bind(&done);
607 }
608 
609 
610 void FullCodeGenerator::StackValueContext::Plug(
611  Label* materialize_true,
612  Label* materialize_false) const {
613  Label done;
614  __ bind(materialize_true);
615  __ Push(isolate()->factory()->true_value());
616  __ jmp(&done, Label::kNear);
617  __ bind(materialize_false);
618  __ Push(isolate()->factory()->false_value());
619  __ bind(&done);
620 }
621 
622 
623 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
624  Label* materialize_false) const {
625  ASSERT(materialize_true == true_label_);
626  ASSERT(materialize_false == false_label_);
627 }
628 
629 
630 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
631 }
632 
633 
634 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
635  Heap::RootListIndex value_root_index =
636  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
637  __ LoadRoot(result_register(), value_root_index);
638 }
639 
640 
641 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
642  Heap::RootListIndex value_root_index =
643  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
644  __ PushRoot(value_root_index);
645 }
646 
647 
648 void FullCodeGenerator::TestContext::Plug(bool flag) const {
649  codegen()->PrepareForBailoutBeforeSplit(condition(),
650  true,
651  true_label_,
652  false_label_);
653  if (flag) {
654  if (true_label_ != fall_through_) __ jmp(true_label_);
655  } else {
656  if (false_label_ != fall_through_) __ jmp(false_label_);
657  }
658 }
659 
660 
661 void FullCodeGenerator::DoTest(Expression* condition,
662  Label* if_true,
663  Label* if_false,
664  Label* fall_through) {
665  ToBooleanStub stub(result_register());
666  __ push(result_register());
667  __ CallStub(&stub, condition->test_id());
668  __ testq(result_register(), result_register());
669  // The stub returns nonzero for true.
670  Split(not_zero, if_true, if_false, fall_through);
671 }
672 
673 
674 void FullCodeGenerator::Split(Condition cc,
675  Label* if_true,
676  Label* if_false,
677  Label* fall_through) {
678  if (if_false == fall_through) {
679  __ j(cc, if_true);
680  } else if (if_true == fall_through) {
681  __ j(NegateCondition(cc), if_false);
682  } else {
683  __ j(cc, if_true);
684  __ jmp(if_false);
685  }
686 }
687 
688 
689 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
690  ASSERT(var->IsStackAllocated());
691  // Offset is negative because higher indexes are at lower addresses.
692  int offset = -var->index() * kPointerSize;
693  // Adjust by a (parameter or local) base offset.
694  if (var->IsParameter()) {
695  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
696  } else {
698  }
699  return Operand(rbp, offset);
700 }
701 
702 
703 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
704  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
705  if (var->IsContextSlot()) {
706  int context_chain_length = scope()->ContextChainLength(var->scope());
707  __ LoadContext(scratch, context_chain_length);
708  return ContextOperand(scratch, var->index());
709  } else {
710  return StackOperand(var);
711  }
712 }
713 
714 
715 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
716  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
717  MemOperand location = VarOperand(var, dest);
718  __ movq(dest, location);
719 }
720 
721 
722 void FullCodeGenerator::SetVar(Variable* var,
723  Register src,
724  Register scratch0,
725  Register scratch1) {
726  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
727  ASSERT(!scratch0.is(src));
728  ASSERT(!scratch0.is(scratch1));
729  ASSERT(!scratch1.is(src));
730  MemOperand location = VarOperand(var, scratch0);
731  __ movq(location, src);
732 
733  // Emit the write barrier code if the location is in the heap.
734  if (var->IsContextSlot()) {
735  int offset = Context::SlotOffset(var->index());
736  __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
737  }
738 }
739 
740 
741 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
742  bool should_normalize,
743  Label* if_true,
744  Label* if_false) {
745  // Only prepare for bailouts before splits if we're in a test
746  // context. Otherwise, we let the Visit function deal with the
747  // preparation to avoid preparing with the same AST id twice.
748  if (!context()->IsTest() || !info_->IsOptimizable()) return;
749 
750  Label skip;
751  if (should_normalize) __ jmp(&skip, Label::kNear);
752  PrepareForBailout(expr, TOS_REG);
753  if (should_normalize) {
754  __ CompareRoot(rax, Heap::kTrueValueRootIndex);
755  Split(equal, if_true, if_false, NULL);
756  __ bind(&skip);
757  }
758 }
759 
760 
761 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
762  // The variable in the declaration always resides in the current function
763  // context.
764  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
765  if (generate_debug_code_) {
766  // Check that we're not inside a with or catch context.
768  __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
769  __ Check(not_equal, "Declaration in with context.");
770  __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
771  __ Check(not_equal, "Declaration in catch context.");
772  }
773 }
774 
775 
776 void FullCodeGenerator::VisitVariableDeclaration(
777  VariableDeclaration* declaration) {
778  // If it was not possible to allocate the variable at compile time, we
779  // need to "declare" it at runtime to make sure it actually exists in the
780  // local context.
781  VariableProxy* proxy = declaration->proxy();
782  VariableMode mode = declaration->mode();
783  Variable* variable = proxy->var();
784  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
785  switch (variable->location()) {
787  globals_->Add(variable->name(), zone());
788  globals_->Add(variable->binding_needs_init()
789  ? isolate()->factory()->the_hole_value()
790  : isolate()->factory()->undefined_value(),
791  zone());
792  break;
793 
794  case Variable::PARAMETER:
795  case Variable::LOCAL:
796  if (hole_init) {
797  Comment cmnt(masm_, "[ VariableDeclaration");
798  __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
799  __ movq(StackOperand(variable), kScratchRegister);
800  }
801  break;
802 
803  case Variable::CONTEXT:
804  if (hole_init) {
805  Comment cmnt(masm_, "[ VariableDeclaration");
806  EmitDebugCheckDeclarationContext(variable);
807  __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
808  __ movq(ContextOperand(rsi, variable->index()), kScratchRegister);
809  // No write barrier since the hole value is in old space.
810  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
811  }
812  break;
813 
814  case Variable::LOOKUP: {
815  Comment cmnt(masm_, "[ VariableDeclaration");
816  __ push(rsi);
817  __ Push(variable->name());
818  // Declaration nodes are always introduced in one of four modes.
820  PropertyAttributes attr =
822  __ Push(Smi::FromInt(attr));
823  // Push initial value, if any.
824  // Note: For variables we must not push an initial value (such as
825  // 'undefined') because we may have a (legal) redeclaration and we
826  // must not destroy the current value.
827  if (hole_init) {
828  __ PushRoot(Heap::kTheHoleValueRootIndex);
829  } else {
830  __ Push(Smi::FromInt(0)); // Indicates no initial value.
831  }
832  __ CallRuntime(Runtime::kDeclareContextSlot, 4);
833  break;
834  }
835  }
836 }
837 
838 
839 void FullCodeGenerator::VisitFunctionDeclaration(
840  FunctionDeclaration* declaration) {
841  VariableProxy* proxy = declaration->proxy();
842  Variable* variable = proxy->var();
843  switch (variable->location()) {
844  case Variable::UNALLOCATED: {
845  globals_->Add(variable->name(), zone());
846  Handle<SharedFunctionInfo> function =
847  Compiler::BuildFunctionInfo(declaration->fun(), script());
848  // Check for stack-overflow exception.
849  if (function.is_null()) return SetStackOverflow();
850  globals_->Add(function, zone());
851  break;
852  }
853 
854  case Variable::PARAMETER:
855  case Variable::LOCAL: {
856  Comment cmnt(masm_, "[ FunctionDeclaration");
857  VisitForAccumulatorValue(declaration->fun());
858  __ movq(StackOperand(variable), result_register());
859  break;
860  }
861 
862  case Variable::CONTEXT: {
863  Comment cmnt(masm_, "[ FunctionDeclaration");
864  EmitDebugCheckDeclarationContext(variable);
865  VisitForAccumulatorValue(declaration->fun());
866  __ movq(ContextOperand(rsi, variable->index()), result_register());
867  int offset = Context::SlotOffset(variable->index());
868  // We know that we have written a function, which is not a smi.
869  __ RecordWriteContextSlot(rsi,
870  offset,
871  result_register(),
872  rcx,
876  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
877  break;
878  }
879 
880  case Variable::LOOKUP: {
881  Comment cmnt(masm_, "[ FunctionDeclaration");
882  __ push(rsi);
883  __ Push(variable->name());
884  __ Push(Smi::FromInt(NONE));
885  VisitForStackValue(declaration->fun());
886  __ CallRuntime(Runtime::kDeclareContextSlot, 4);
887  break;
888  }
889  }
890 }
891 
892 
893 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
894  VariableProxy* proxy = declaration->proxy();
895  Variable* variable = proxy->var();
896  Handle<JSModule> instance = declaration->module()->interface()->Instance();
897  ASSERT(!instance.is_null());
898 
899  switch (variable->location()) {
900  case Variable::UNALLOCATED: {
901  Comment cmnt(masm_, "[ ModuleDeclaration");
902  globals_->Add(variable->name(), zone());
903  globals_->Add(instance, zone());
904  Visit(declaration->module());
905  break;
906  }
907 
908  case Variable::CONTEXT: {
909  Comment cmnt(masm_, "[ ModuleDeclaration");
910  EmitDebugCheckDeclarationContext(variable);
911  __ Move(ContextOperand(rsi, variable->index()), instance);
912  Visit(declaration->module());
913  break;
914  }
915 
916  case Variable::PARAMETER:
917  case Variable::LOCAL:
918  case Variable::LOOKUP:
919  UNREACHABLE();
920  }
921 }
922 
923 
924 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
925  VariableProxy* proxy = declaration->proxy();
926  Variable* variable = proxy->var();
927  switch (variable->location()) {
929  // TODO(rossberg)
930  break;
931 
932  case Variable::CONTEXT: {
933  Comment cmnt(masm_, "[ ImportDeclaration");
934  EmitDebugCheckDeclarationContext(variable);
935  // TODO(rossberg)
936  break;
937  }
938 
939  case Variable::PARAMETER:
940  case Variable::LOCAL:
941  case Variable::LOOKUP:
942  UNREACHABLE();
943  }
944 }
945 
946 
947 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
948  // TODO(rossberg)
949 }
950 
951 
952 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
953  // Call the runtime to declare the globals.
954  __ push(rsi); // The context is the first argument.
955  __ Push(pairs);
956  __ Push(Smi::FromInt(DeclareGlobalsFlags()));
957  __ CallRuntime(Runtime::kDeclareGlobals, 3);
958  // Return value is ignored.
959 }
960 
961 
962 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
963  Comment cmnt(masm_, "[ SwitchStatement");
964  Breakable nested_statement(this, stmt);
965  SetStatementPosition(stmt);
966 
967  // Keep the switch value on the stack until a case matches.
968  VisitForStackValue(stmt->tag());
969  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
970 
971  ZoneList<CaseClause*>* clauses = stmt->cases();
972  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
973 
974  Label next_test; // Recycled for each test.
975  // Compile all the tests with branches to their bodies.
976  for (int i = 0; i < clauses->length(); i++) {
977  CaseClause* clause = clauses->at(i);
978  clause->body_target()->Unuse();
979 
980  // The default is not a test, but remember it as final fall through.
981  if (clause->is_default()) {
982  default_clause = clause;
983  continue;
984  }
985 
986  Comment cmnt(masm_, "[ Case comparison");
987  __ bind(&next_test);
988  next_test.Unuse();
989 
990  // Compile the label expression.
991  VisitForAccumulatorValue(clause->label());
992 
993  // Perform the comparison as if via '==='.
994  __ movq(rdx, Operand(rsp, 0)); // Switch value.
995  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
996  JumpPatchSite patch_site(masm_);
997  if (inline_smi_code) {
998  Label slow_case;
999  __ movq(rcx, rdx);
1000  __ or_(rcx, rax);
1001  patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
1002 
1003  __ cmpq(rdx, rax);
1004  __ j(not_equal, &next_test);
1005  __ Drop(1); // Switch value is no longer needed.
1006  __ jmp(clause->body_target());
1007  __ bind(&slow_case);
1008  }
1009 
1010  // Record position before stub call for type feedback.
1011  SetSourcePosition(clause->position());
1012  Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
1013  CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1014  patch_site.EmitPatchInfo();
1015 
1016  __ testq(rax, rax);
1017  __ j(not_equal, &next_test);
1018  __ Drop(1); // Switch value is no longer needed.
1019  __ jmp(clause->body_target());
1020  }
1021 
1022  // Discard the test value and jump to the default if present, otherwise to
1023  // the end of the statement.
1024  __ bind(&next_test);
1025  __ Drop(1); // Switch value is no longer needed.
1026  if (default_clause == NULL) {
1027  __ jmp(nested_statement.break_label());
1028  } else {
1029  __ jmp(default_clause->body_target());
1030  }
1031 
1032  // Compile all the case bodies.
1033  for (int i = 0; i < clauses->length(); i++) {
1034  Comment cmnt(masm_, "[ Case body");
1035  CaseClause* clause = clauses->at(i);
1036  __ bind(clause->body_target());
1037  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1038  VisitStatements(clause->statements());
1039  }
1040 
1041  __ bind(nested_statement.break_label());
1042  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1043 }
1044 
1045 
1046 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1047  Comment cmnt(masm_, "[ ForInStatement");
1048  SetStatementPosition(stmt);
1049 
1050  Label loop, exit;
1051  ForIn loop_statement(this, stmt);
1052  increment_loop_depth();
1053 
1054  // Get the object to enumerate over. Both SpiderMonkey and JSC
1055  // ignore null and undefined in contrast to the specification; see
1056  // ECMA-262 section 12.6.4.
1057  VisitForAccumulatorValue(stmt->enumerable());
1058  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1059  __ j(equal, &exit);
1060  Register null_value = rdi;
1061  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1062  __ cmpq(rax, null_value);
1063  __ j(equal, &exit);
1064 
1065  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1066 
1067  // Convert the object to a JS object.
1068  Label convert, done_convert;
1069  __ JumpIfSmi(rax, &convert);
1070  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1071  __ j(above_equal, &done_convert);
1072  __ bind(&convert);
1073  __ push(rax);
1074  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1075  __ bind(&done_convert);
1076  __ push(rax);
1077 
1078  // Check for proxies.
1079  Label call_runtime;
1081  __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
1082  __ j(below_equal, &call_runtime);
1083 
1084  // Check cache validity in generated code. This is a fast case for
1085  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1086  // guarantee cache validity, call the runtime system to check cache
1087  // validity or get the property names in a fixed array.
1088  __ CheckEnumCache(null_value, &call_runtime);
1089 
1090  // The enum cache is valid. Load the map of the object being
1091  // iterated over and use the cache for the iteration.
1092  Label use_cache;
1094  __ jmp(&use_cache, Label::kNear);
1095 
1096  // Get the set of properties to enumerate.
1097  __ bind(&call_runtime);
1098  __ push(rax); // Duplicate the enumerable object on the stack.
1099  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1100 
1101  // If we got a map from the runtime call, we can do a fast
1102  // modification check. Otherwise, we got a fixed array, and we have
1103  // to do a slow check.
1104  Label fixed_array;
1105  __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1106  Heap::kMetaMapRootIndex);
1107  __ j(not_equal, &fixed_array);
1108 
1109  // We got a map in register rax. Get the enumeration cache from it.
1110  __ bind(&use_cache);
1111 
1112  Label no_descriptors;
1113 
1114  __ EnumLength(rdx, rax);
1115  __ Cmp(rdx, Smi::FromInt(0));
1116  __ j(equal, &no_descriptors);
1117 
1118  __ LoadInstanceDescriptors(rax, rcx);
1121 
1122  // Set up the four remaining stack slots.
1123  __ push(rax); // Map.
1124  __ push(rcx); // Enumeration cache.
1125  __ push(rdx); // Number of valid entries for the map in the enum cache.
1126  __ Push(Smi::FromInt(0)); // Initial index.
1127  __ jmp(&loop);
1128 
1129  __ bind(&no_descriptors);
1130  __ addq(rsp, Immediate(kPointerSize));
1131  __ jmp(&exit);
1132 
1133  // We got a fixed array in register rax. Iterate through that.
1134  Label non_proxy;
1135  __ bind(&fixed_array);
1136 
1137  Handle<JSGlobalPropertyCell> cell =
1138  isolate()->factory()->NewJSGlobalPropertyCell(
1139  Handle<Object>(
1141  RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1142  __ LoadHeapObject(rbx, cell);
1145 
1146  __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
1147  __ movq(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
1149  __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
1150  __ j(above, &non_proxy);
1151  __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy
1152  __ bind(&non_proxy);
1153  __ push(rbx); // Smi
1154  __ push(rax); // Array
1156  __ push(rax); // Fixed array length (as smi).
1157  __ Push(Smi::FromInt(0)); // Initial index.
1158 
1159  // Generate code for doing the condition check.
1160  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1161  __ bind(&loop);
1162  __ movq(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
1163  __ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
1164  __ j(above_equal, loop_statement.break_label());
1165 
1166  // Get the current entry of the array into register rbx.
1167  __ movq(rbx, Operand(rsp, 2 * kPointerSize));
1168  SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1169  __ movq(rbx, FieldOperand(rbx,
1170  index.reg,
1171  index.scale,
1173 
1174  // Get the expected map from the stack or a smi in the
1175  // permanent slow case into register rdx.
1176  __ movq(rdx, Operand(rsp, 3 * kPointerSize));
1177 
1178  // Check if the expected map still matches that of the enumerable.
1179  // If not, we may have to filter the key.
1180  Label update_each;
1181  __ movq(rcx, Operand(rsp, 4 * kPointerSize));
1183  __ j(equal, &update_each, Label::kNear);
1184 
1185  // For proxies, no filtering is done.
1186  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1187  __ Cmp(rdx, Smi::FromInt(0));
1188  __ j(equal, &update_each, Label::kNear);
1189 
1190  // Convert the entry to a string or null if it isn't a property
1191  // anymore. If the property has been removed while iterating, we
1192  // just skip it.
1193  __ push(rcx); // Enumerable.
1194  __ push(rbx); // Current entry.
1195  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1196  __ Cmp(rax, Smi::FromInt(0));
1197  __ j(equal, loop_statement.continue_label());
1198  __ movq(rbx, rax);
1199 
1200  // Update the 'each' property or variable from the possibly filtered
1201  // entry in register rbx.
1202  __ bind(&update_each);
1203  __ movq(result_register(), rbx);
1204  // Perform the assignment as if via '='.
1205  { EffectContext context(this);
1206  EmitAssignment(stmt->each());
1207  }
1208 
1209  // Generate code for the body of the loop.
1210  Visit(stmt->body());
1211 
1212  // Generate code for going to the next element by incrementing the
1213  // index (smi) stored on top of the stack.
1214  __ bind(loop_statement.continue_label());
1215  __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1216 
1217  EmitStackCheck(stmt, &loop);
1218  __ jmp(&loop);
1219 
1220  // Remove the pointers stored on the stack.
1221  __ bind(loop_statement.break_label());
1222  __ addq(rsp, Immediate(5 * kPointerSize));
1223 
1224  // Exit and decrement the loop depth.
1225  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1226  __ bind(&exit);
1227  decrement_loop_depth();
1228 }
1229 
1230 
1231 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1232  bool pretenure) {
1233  // Use the fast case closure allocation code that allocates in new
1234  // space for nested functions that don't need literals cloning. If
1235  // we're running with the --always-opt or the --prepare-always-opt
1236  // flag, we need to use the runtime function so that the new function
1237  // we are creating here gets a chance to have its code optimized and
1238  // doesn't just get a copy of the existing unoptimized code.
1239  if (!FLAG_always_opt &&
1240  !FLAG_prepare_always_opt &&
1241  !pretenure &&
1242  scope()->is_function_scope() &&
1243  info->num_literals() == 0) {
1244  FastNewClosureStub stub(info->language_mode());
1245  __ Push(info);
1246  __ CallStub(&stub);
1247  } else {
1248  __ push(rsi);
1249  __ Push(info);
1250  __ Push(pretenure
1251  ? isolate()->factory()->true_value()
1252  : isolate()->factory()->false_value());
1253  __ CallRuntime(Runtime::kNewClosure, 3);
1254  }
1255  context()->Plug(rax);
1256 }
1257 
1258 
1259 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1260  Comment cmnt(masm_, "[ VariableProxy");
1261  EmitVariableLoad(expr);
1262 }
1263 
1264 
1265 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1266  TypeofState typeof_state,
1267  Label* slow) {
1268  Register context = rsi;
1269  Register temp = rdx;
1270 
1271  Scope* s = scope();
1272  while (s != NULL) {
1273  if (s->num_heap_slots() > 0) {
1274  if (s->calls_non_strict_eval()) {
1275  // Check that extension is NULL.
1277  Immediate(0));
1278  __ j(not_equal, slow);
1279  }
1280  // Load next context in chain.
1281  __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1282  // Walk the rest of the chain without clobbering rsi.
1283  context = temp;
1284  }
1285  // If no outer scope calls eval, we do not need to check more
1286  // context extensions. If we have reached an eval scope, we check
1287  // all extensions from this point.
1288  if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1289  s = s->outer_scope();
1290  }
1291 
1292  if (s != NULL && s->is_eval_scope()) {
1293  // Loop up the context chain. There is no frame effect so it is
1294  // safe to use raw labels here.
1295  Label next, fast;
1296  if (!context.is(temp)) {
1297  __ movq(temp, context);
1298  }
1299  // Load map for comparison into register, outside loop.
1300  __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex);
1301  __ bind(&next);
1302  // Terminate at native context.
1304  __ j(equal, &fast, Label::kNear);
1305  // Check that extension is NULL.
1306  __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1307  __ j(not_equal, slow);
1308  // Load next context in chain.
1309  __ movq(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1310  __ jmp(&next);
1311  __ bind(&fast);
1312  }
1313 
1314  // All extension objects were empty and it is safe to use a global
1315  // load IC call.
1316  __ movq(rax, GlobalObjectOperand());
1317  __ Move(rcx, var->name());
1318  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1319  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1320  ? RelocInfo::CODE_TARGET
1321  : RelocInfo::CODE_TARGET_CONTEXT;
1322  CallIC(ic, mode);
1323 }
1324 
1325 
1326 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1327  Label* slow) {
1328  ASSERT(var->IsContextSlot());
1329  Register context = rsi;
1330  Register temp = rbx;
1331 
1332  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1333  if (s->num_heap_slots() > 0) {
1334  if (s->calls_non_strict_eval()) {
1335  // Check that extension is NULL.
1337  Immediate(0));
1338  __ j(not_equal, slow);
1339  }
1340  __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1341  // Walk the rest of the chain without clobbering rsi.
1342  context = temp;
1343  }
1344  }
1345  // Check that last extension is NULL.
1346  __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1347  __ j(not_equal, slow);
1348 
1349  // This function is used only for loads, not stores, so it's safe to
1350  // return an rsi-based operand (the write barrier cannot be allowed to
1351  // destroy the rsi register).
1352  return ContextOperand(context, var->index());
1353 }
1354 
1355 
1356 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1357  TypeofState typeof_state,
1358  Label* slow,
1359  Label* done) {
1360  // Generate fast-case code for variables that might be shadowed by
1361  // eval-introduced variables. Eval is used a lot without
1362  // introducing variables. In those cases, we do not want to
1363  // perform a runtime call for all variables in the scope
1364  // containing the eval.
1365  if (var->mode() == DYNAMIC_GLOBAL) {
1366  EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1367  __ jmp(done);
1368  } else if (var->mode() == DYNAMIC_LOCAL) {
1369  Variable* local = var->local_if_not_shadowed();
1370  __ movq(rax, ContextSlotOperandCheckExtensions(local, slow));
1371  if (local->mode() == CONST ||
1372  local->mode() == CONST_HARMONY ||
1373  local->mode() == LET) {
1374  __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1375  __ j(not_equal, done);
1376  if (local->mode() == CONST) {
1377  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1378  } else { // LET || CONST_HARMONY
1379  __ Push(var->name());
1380  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1381  }
1382  }
1383  __ jmp(done);
1384  }
1385 }
1386 
1387 
1388 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1389  // Record position before possible IC call.
1390  SetSourcePosition(proxy->position());
1391  Variable* var = proxy->var();
1392 
1393  // Three cases: global variables, lookup variables, and all other types of
1394  // variables.
1395  switch (var->location()) {
1396  case Variable::UNALLOCATED: {
1397  Comment cmnt(masm_, "Global variable");
1398  // Use inline caching. Variable name is passed in rcx and the global
1399  // object on the stack.
1400  __ Move(rcx, var->name());
1401  __ movq(rax, GlobalObjectOperand());
1402  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1403  CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1404  context()->Plug(rax);
1405  break;
1406  }
1407 
1408  case Variable::PARAMETER:
1409  case Variable::LOCAL:
1410  case Variable::CONTEXT: {
1411  Comment cmnt(masm_, var->IsContextSlot() ? "Context slot" : "Stack slot");
1412  if (var->binding_needs_init()) {
1413  // var->scope() may be NULL when the proxy is located in eval code and
1414  // refers to a potential outside binding. Currently those bindings are
1415  // always looked up dynamically, i.e. in that case
1416  // var->location() == LOOKUP.
1417  // always holds.
1418  ASSERT(var->scope() != NULL);
1419 
1420  // Check if the binding really needs an initialization check. The check
1421  // can be skipped in the following situation: we have a LET or CONST
1422  // binding in harmony mode, both the Variable and the VariableProxy have
1423  // the same declaration scope (i.e. they are both in global code, in the
1424  // same function or in the same eval code) and the VariableProxy is in
1425  // the source physically located after the initializer of the variable.
1426  //
1427  // We cannot skip any initialization checks for CONST in non-harmony
1428  // mode because const variables may be declared but never initialized:
1429  // if (false) { const x; }; var y = x;
1430  //
1431  // The condition on the declaration scopes is a conservative check for
1432  // nested functions that access a binding and are called before the
1433  // binding is initialized:
1434  // function() { f(); let x = 1; function f() { x = 2; } }
1435  //
1436  bool skip_init_check;
1437  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1438  skip_init_check = false;
1439  } else {
1440  // Check that we always have valid source position.
1441  ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1442  ASSERT(proxy->position() != RelocInfo::kNoPosition);
1443  skip_init_check = var->mode() != CONST &&
1444  var->initializer_position() < proxy->position();
1445  }
1446 
1447  if (!skip_init_check) {
1448  // Let and const need a read barrier.
1449  Label done;
1450  GetVar(rax, var);
1451  __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1452  __ j(not_equal, &done, Label::kNear);
1453  if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1454  // Throw a reference error when using an uninitialized let/const
1455  // binding in harmony mode.
1456  __ Push(var->name());
1457  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1458  } else {
1459  // Uninitalized const bindings outside of harmony mode are unholed.
1460  ASSERT(var->mode() == CONST);
1461  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1462  }
1463  __ bind(&done);
1464  context()->Plug(rax);
1465  break;
1466  }
1467  }
1468  context()->Plug(var);
1469  break;
1470  }
1471 
1472  case Variable::LOOKUP: {
1473  Label done, slow;
1474  // Generate code for loading from variables potentially shadowed
1475  // by eval-introduced variables.
1476  EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1477  __ bind(&slow);
1478  Comment cmnt(masm_, "Lookup slot");
1479  __ push(rsi); // Context.
1480  __ Push(var->name());
1481  __ CallRuntime(Runtime::kLoadContextSlot, 2);
1482  __ bind(&done);
1483  context()->Plug(rax);
1484  break;
1485  }
1486  }
1487 }
1488 
1489 
1490 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1491  Comment cmnt(masm_, "[ RegExpLiteral");
1492  Label materialized;
1493  // Registers will be used as follows:
1494  // rdi = JS function.
1495  // rcx = literals array.
1496  // rbx = regexp literal.
1497  // rax = regexp literal clone.
1500  int literal_offset =
1501  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1502  __ movq(rbx, FieldOperand(rcx, literal_offset));
1503  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1504  __ j(not_equal, &materialized, Label::kNear);
1505 
1506  // Create regexp literal using runtime function
1507  // Result will be in rax.
1508  __ push(rcx);
1509  __ Push(Smi::FromInt(expr->literal_index()));
1510  __ Push(expr->pattern());
1511  __ Push(expr->flags());
1512  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1513  __ movq(rbx, rax);
1514 
1515  __ bind(&materialized);
1517  Label allocated, runtime_allocate;
1518  __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1519  __ jmp(&allocated);
1520 
1521  __ bind(&runtime_allocate);
1522  __ push(rbx);
1523  __ Push(Smi::FromInt(size));
1524  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1525  __ pop(rbx);
1526 
1527  __ bind(&allocated);
1528  // Copy the content into the newly allocated memory.
1529  // (Unroll copy loop once for better throughput).
1530  for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1531  __ movq(rdx, FieldOperand(rbx, i));
1532  __ movq(rcx, FieldOperand(rbx, i + kPointerSize));
1533  __ movq(FieldOperand(rax, i), rdx);
1534  __ movq(FieldOperand(rax, i + kPointerSize), rcx);
1535  }
1536  if ((size % (2 * kPointerSize)) != 0) {
1537  __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
1538  __ movq(FieldOperand(rax, size - kPointerSize), rdx);
1539  }
1540  context()->Plug(rax);
1541 }
1542 
1543 
1544 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1545  if (expression == NULL) {
1546  __ PushRoot(Heap::kNullValueRootIndex);
1547  } else {
1548  VisitForStackValue(expression);
1549  }
1550 }
1551 
1552 
1553 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1554  Comment cmnt(masm_, "[ ObjectLiteral");
1555  Handle<FixedArray> constant_properties = expr->constant_properties();
1558  __ Push(Smi::FromInt(expr->literal_index()));
1559  __ Push(constant_properties);
1560  int flags = expr->fast_elements()
1563  flags |= expr->has_function()
1566  __ Push(Smi::FromInt(flags));
1567  int properties_count = constant_properties->length() / 2;
1568  if (expr->depth() > 1) {
1569  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1570  } else if (flags != ObjectLiteral::kFastElements ||
1572  __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1573  } else {
1574  FastCloneShallowObjectStub stub(properties_count);
1575  __ CallStub(&stub);
1576  }
1577 
1578  // If result_saved is true the result is on top of the stack. If
1579  // result_saved is false the result is in rax.
1580  bool result_saved = false;
1581 
1582  // Mark all computed expressions that are bound to a key that
1583  // is shadowed by a later occurrence of the same key. For the
1584  // marked expressions, no store code is emitted.
1585  expr->CalculateEmitStore(zone());
1586 
1587  AccessorTable accessor_table(zone());
1588  for (int i = 0; i < expr->properties()->length(); i++) {
1589  ObjectLiteral::Property* property = expr->properties()->at(i);
1590  if (property->IsCompileTimeValue()) continue;
1591 
1592  Literal* key = property->key();
1593  Expression* value = property->value();
1594  if (!result_saved) {
1595  __ push(rax); // Save result on the stack
1596  result_saved = true;
1597  }
1598  switch (property->kind()) {
1600  UNREACHABLE();
1603  // Fall through.
1605  if (key->handle()->IsSymbol()) {
1606  if (property->emit_store()) {
1607  VisitForAccumulatorValue(value);
1608  __ Move(rcx, key->handle());
1609  __ movq(rdx, Operand(rsp, 0));
1610  Handle<Code> ic = is_classic_mode()
1611  ? isolate()->builtins()->StoreIC_Initialize()
1612  : isolate()->builtins()->StoreIC_Initialize_Strict();
1613  CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
1614  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1615  } else {
1616  VisitForEffect(value);
1617  }
1618  break;
1619  }
1620  // Fall through.
1622  __ push(Operand(rsp, 0)); // Duplicate receiver.
1623  VisitForStackValue(key);
1624  VisitForStackValue(value);
1625  if (property->emit_store()) {
1626  __ Push(Smi::FromInt(NONE)); // PropertyAttributes
1627  __ CallRuntime(Runtime::kSetProperty, 4);
1628  } else {
1629  __ Drop(3);
1630  }
1631  break;
1633  accessor_table.lookup(key)->second->getter = value;
1634  break;
1636  accessor_table.lookup(key)->second->setter = value;
1637  break;
1638  }
1639  }
1640 
1641  // Emit code to define accessors, using only a single call to the runtime for
1642  // each pair of corresponding getters and setters.
1643  for (AccessorTable::Iterator it = accessor_table.begin();
1644  it != accessor_table.end();
1645  ++it) {
1646  __ push(Operand(rsp, 0)); // Duplicate receiver.
1647  VisitForStackValue(it->first);
1648  EmitAccessor(it->second->getter);
1649  EmitAccessor(it->second->setter);
1650  __ Push(Smi::FromInt(NONE));
1651  __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1652  }
1653 
1654  if (expr->has_function()) {
1655  ASSERT(result_saved);
1656  __ push(Operand(rsp, 0));
1657  __ CallRuntime(Runtime::kToFastProperties, 1);
1658  }
1659 
1660  if (result_saved) {
1661  context()->PlugTOS();
1662  } else {
1663  context()->Plug(rax);
1664  }
1665 }
1666 
1667 
1668 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1669  Comment cmnt(masm_, "[ ArrayLiteral");
1670 
1671  ZoneList<Expression*>* subexprs = expr->values();
1672  int length = subexprs->length();
1673  Handle<FixedArray> constant_elements = expr->constant_elements();
1674  ASSERT_EQ(2, constant_elements->length());
1675  ElementsKind constant_elements_kind =
1676  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1677  bool has_constant_fast_elements =
1678  IsFastObjectElementsKind(constant_elements_kind);
1679  Handle<FixedArrayBase> constant_elements_values(
1680  FixedArrayBase::cast(constant_elements->get(1)));
1681 
1684  __ Push(Smi::FromInt(expr->literal_index()));
1685  __ Push(constant_elements);
1686  Heap* heap = isolate()->heap();
1687  if (has_constant_fast_elements &&
1688  constant_elements_values->map() == heap->fixed_cow_array_map()) {
1689  // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1690  // change, so it's possible to specialize the stub in advance.
1691  __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1692  FastCloneShallowArrayStub stub(
1694  length);
1695  __ CallStub(&stub);
1696  } else if (expr->depth() > 1) {
1697  __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1699  __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1700  } else {
1701  ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1702  FLAG_smi_only_arrays);
1703  // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1704  // change, so it's possible to specialize the stub in advance.
1705  FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
1708  FastCloneShallowArrayStub stub(mode, length);
1709  __ CallStub(&stub);
1710  }
1711 
1712  bool result_saved = false; // Is the result saved to the stack?
1713 
1714  // Emit code to evaluate all the non-constant subexpressions and to store
1715  // them into the newly cloned array.
1716  for (int i = 0; i < length; i++) {
1717  Expression* subexpr = subexprs->at(i);
1718  // If the subexpression is a literal or a simple materialized literal it
1719  // is already set in the cloned array.
1720  if (subexpr->AsLiteral() != NULL ||
1722  continue;
1723  }
1724 
1725  if (!result_saved) {
1726  __ push(rax);
1727  result_saved = true;
1728  }
1729  VisitForAccumulatorValue(subexpr);
1730 
1731  if (IsFastObjectElementsKind(constant_elements_kind)) {
1732  // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1733  // cannot transition and don't need to call the runtime stub.
1734  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1735  __ movq(rbx, Operand(rsp, 0)); // Copy of array literal.
1737  // Store the subexpression value in the array's elements.
1738  __ movq(FieldOperand(rbx, offset), result_register());
1739  // Update the write barrier for the array store.
1740  __ RecordWriteField(rbx, offset, result_register(), rcx,
1744  } else {
1745  // Store the subexpression value in the array's elements.
1746  __ movq(rbx, Operand(rsp, 0)); // Copy of array literal.
1748  __ Move(rcx, Smi::FromInt(i));
1749  __ Move(rdx, Smi::FromInt(expr->literal_index()));
1750  StoreArrayLiteralElementStub stub;
1751  __ CallStub(&stub);
1752  }
1753 
1754  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1755  }
1756 
1757  if (result_saved) {
1758  context()->PlugTOS();
1759  } else {
1760  context()->Plug(rax);
1761  }
1762 }
1763 
1764 
1765 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1766  Comment cmnt(masm_, "[ Assignment");
1767  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1768  // on the left-hand side.
1769  if (!expr->target()->IsValidLeftHandSide()) {
1770  VisitForEffect(expr->target());
1771  return;
1772  }
1773 
1774  // Left-hand side can only be a property, a global or a (parameter or local)
1775  // slot.
1776  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1777  LhsKind assign_type = VARIABLE;
1778  Property* property = expr->target()->AsProperty();
1779  if (property != NULL) {
1780  assign_type = (property->key()->IsPropertyName())
1781  ? NAMED_PROPERTY
1782  : KEYED_PROPERTY;
1783  }
1784 
1785  // Evaluate LHS expression.
1786  switch (assign_type) {
1787  case VARIABLE:
1788  // Nothing to do here.
1789  break;
1790  case NAMED_PROPERTY:
1791  if (expr->is_compound()) {
1792  // We need the receiver both on the stack and in the accumulator.
1793  VisitForAccumulatorValue(property->obj());
1794  __ push(result_register());
1795  } else {
1796  VisitForStackValue(property->obj());
1797  }
1798  break;
1799  case KEYED_PROPERTY: {
1800  if (expr->is_compound()) {
1801  VisitForStackValue(property->obj());
1802  VisitForAccumulatorValue(property->key());
1803  __ movq(rdx, Operand(rsp, 0));
1804  __ push(rax);
1805  } else {
1806  VisitForStackValue(property->obj());
1807  VisitForStackValue(property->key());
1808  }
1809  break;
1810  }
1811  }
1812 
1813  // For compound assignments we need another deoptimization point after the
1814  // variable/property load.
1815  if (expr->is_compound()) {
1816  { AccumulatorValueContext context(this);
1817  switch (assign_type) {
1818  case VARIABLE:
1819  EmitVariableLoad(expr->target()->AsVariableProxy());
1820  PrepareForBailout(expr->target(), TOS_REG);
1821  break;
1822  case NAMED_PROPERTY:
1823  EmitNamedPropertyLoad(property);
1824  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1825  break;
1826  case KEYED_PROPERTY:
1827  EmitKeyedPropertyLoad(property);
1828  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1829  break;
1830  }
1831  }
1832 
1833  Token::Value op = expr->binary_op();
1834  __ push(rax); // Left operand goes on the stack.
1835  VisitForAccumulatorValue(expr->value());
1836 
1837  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1838  ? OVERWRITE_RIGHT
1839  : NO_OVERWRITE;
1840  SetSourcePosition(expr->position() + 1);
1841  AccumulatorValueContext context(this);
1842  if (ShouldInlineSmiCase(op)) {
1843  EmitInlineSmiBinaryOp(expr->binary_operation(),
1844  op,
1845  mode,
1846  expr->target(),
1847  expr->value());
1848  } else {
1849  EmitBinaryOp(expr->binary_operation(), op, mode);
1850  }
1851  // Deoptimization point in case the binary operation may have side effects.
1852  PrepareForBailout(expr->binary_operation(), TOS_REG);
1853  } else {
1854  VisitForAccumulatorValue(expr->value());
1855  }
1856 
1857  // Record source position before possible IC call.
1858  SetSourcePosition(expr->position());
1859 
1860  // Store the value.
1861  switch (assign_type) {
1862  case VARIABLE:
1863  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1864  expr->op());
1865  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1866  context()->Plug(rax);
1867  break;
1868  case NAMED_PROPERTY:
1869  EmitNamedPropertyAssignment(expr);
1870  break;
1871  case KEYED_PROPERTY:
1872  EmitKeyedPropertyAssignment(expr);
1873  break;
1874  }
1875 }
1876 
1877 
1878 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1879  SetSourcePosition(prop->position());
1880  Literal* key = prop->key()->AsLiteral();
1881  __ Move(rcx, key->handle());
1882  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1883  CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1884 }
1885 
1886 
1887 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1888  SetSourcePosition(prop->position());
1889  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1890  CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1891 }
1892 
1893 
1894 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1895  Token::Value op,
1896  OverwriteMode mode,
1897  Expression* left,
1898  Expression* right) {
1899  // Do combined smi check of the operands. Left operand is on the
1900  // stack (popped into rdx). Right operand is in rax but moved into
1901  // rcx to make the shifts easier.
1902  Label done, stub_call, smi_case;
1903  __ pop(rdx);
1904  __ movq(rcx, rax);
1905  __ or_(rax, rdx);
1906  JumpPatchSite patch_site(masm_);
1907  patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
1908 
1909  __ bind(&stub_call);
1910  __ movq(rax, rcx);
1911  BinaryOpStub stub(op, mode);
1912  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
1913  expr->BinaryOperationFeedbackId());
1914  patch_site.EmitPatchInfo();
1915  __ jmp(&done, Label::kNear);
1916 
1917  __ bind(&smi_case);
1918  switch (op) {
1919  case Token::SAR:
1920  __ SmiShiftArithmeticRight(rax, rdx, rcx);
1921  break;
1922  case Token::SHL:
1923  __ SmiShiftLeft(rax, rdx, rcx);
1924  break;
1925  case Token::SHR:
1926  __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
1927  break;
1928  case Token::ADD:
1929  __ SmiAdd(rax, rdx, rcx, &stub_call);
1930  break;
1931  case Token::SUB:
1932  __ SmiSub(rax, rdx, rcx, &stub_call);
1933  break;
1934  case Token::MUL:
1935  __ SmiMul(rax, rdx, rcx, &stub_call);
1936  break;
1937  case Token::BIT_OR:
1938  __ SmiOr(rax, rdx, rcx);
1939  break;
1940  case Token::BIT_AND:
1941  __ SmiAnd(rax, rdx, rcx);
1942  break;
1943  case Token::BIT_XOR:
1944  __ SmiXor(rax, rdx, rcx);
1945  break;
1946  default:
1947  UNREACHABLE();
1948  break;
1949  }
1950 
1951  __ bind(&done);
1952  context()->Plug(rax);
1953 }
1954 
1955 
1956 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1957  Token::Value op,
1958  OverwriteMode mode) {
1959  __ pop(rdx);
1960  BinaryOpStub stub(op, mode);
1961  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1962  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
1963  expr->BinaryOperationFeedbackId());
1964  patch_site.EmitPatchInfo();
1965  context()->Plug(rax);
1966 }
1967 
1968 
1969 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1970  // Invalid left-hand sides are rewritten to have a 'throw
1971  // ReferenceError' on the left-hand side.
1972  if (!expr->IsValidLeftHandSide()) {
1973  VisitForEffect(expr);
1974  return;
1975  }
1976 
1977  // Left-hand side can only be a property, a global or a (parameter or local)
1978  // slot.
1979  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1980  LhsKind assign_type = VARIABLE;
1981  Property* prop = expr->AsProperty();
1982  if (prop != NULL) {
1983  assign_type = (prop->key()->IsPropertyName())
1984  ? NAMED_PROPERTY
1985  : KEYED_PROPERTY;
1986  }
1987 
1988  switch (assign_type) {
1989  case VARIABLE: {
1990  Variable* var = expr->AsVariableProxy()->var();
1991  EffectContext context(this);
1992  EmitVariableAssignment(var, Token::ASSIGN);
1993  break;
1994  }
1995  case NAMED_PROPERTY: {
1996  __ push(rax); // Preserve value.
1997  VisitForAccumulatorValue(prop->obj());
1998  __ movq(rdx, rax);
1999  __ pop(rax); // Restore value.
2000  __ Move(rcx, prop->key()->AsLiteral()->handle());
2001  Handle<Code> ic = is_classic_mode()
2002  ? isolate()->builtins()->StoreIC_Initialize()
2003  : isolate()->builtins()->StoreIC_Initialize_Strict();
2004  CallIC(ic);
2005  break;
2006  }
2007  case KEYED_PROPERTY: {
2008  __ push(rax); // Preserve value.
2009  VisitForStackValue(prop->obj());
2010  VisitForAccumulatorValue(prop->key());
2011  __ movq(rcx, rax);
2012  __ pop(rdx);
2013  __ pop(rax); // Restore value.
2014  Handle<Code> ic = is_classic_mode()
2015  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2016  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2017  CallIC(ic);
2018  break;
2019  }
2020  }
2021  context()->Plug(rax);
2022 }
2023 
2024 
2025 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2026  Token::Value op) {
2027  if (var->IsUnallocated()) {
2028  // Global var, const, or let.
2029  __ Move(rcx, var->name());
2030  __ movq(rdx, GlobalObjectOperand());
2031  Handle<Code> ic = is_classic_mode()
2032  ? isolate()->builtins()->StoreIC_Initialize()
2033  : isolate()->builtins()->StoreIC_Initialize_Strict();
2034  CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2035  } else if (op == Token::INIT_CONST) {
2036  // Const initializers need a write barrier.
2037  ASSERT(!var->IsParameter()); // No const parameters.
2038  if (var->IsStackLocal()) {
2039  Label skip;
2040  __ movq(rdx, StackOperand(var));
2041  __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2042  __ j(not_equal, &skip);
2043  __ movq(StackOperand(var), rax);
2044  __ bind(&skip);
2045  } else {
2046  ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2047  // Like var declarations, const declarations are hoisted to function
2048  // scope. However, unlike var initializers, const initializers are
2049  // able to drill a hole to that function context, even from inside a
2050  // 'with' context. We thus bypass the normal static scope lookup for
2051  // var->IsContextSlot().
2052  __ push(rax);
2053  __ push(rsi);
2054  __ Push(var->name());
2055  __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2056  }
2057 
2058  } else if (var->mode() == LET && op != Token::INIT_LET) {
2059  // Non-initializing assignment to let variable needs a write barrier.
2060  if (var->IsLookupSlot()) {
2061  __ push(rax); // Value.
2062  __ push(rsi); // Context.
2063  __ Push(var->name());
2064  __ Push(Smi::FromInt(language_mode()));
2065  __ CallRuntime(Runtime::kStoreContextSlot, 4);
2066  } else {
2067  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2068  Label assign;
2069  MemOperand location = VarOperand(var, rcx);
2070  __ movq(rdx, location);
2071  __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2072  __ j(not_equal, &assign, Label::kNear);
2073  __ Push(var->name());
2074  __ CallRuntime(Runtime::kThrowReferenceError, 1);
2075  __ bind(&assign);
2076  __ movq(location, rax);
2077  if (var->IsContextSlot()) {
2078  __ movq(rdx, rax);
2079  __ RecordWriteContextSlot(
2080  rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2081  }
2082  }
2083 
2084  } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2085  // Assignment to var or initializing assignment to let/const
2086  // in harmony mode.
2087  if (var->IsStackAllocated() || var->IsContextSlot()) {
2088  MemOperand location = VarOperand(var, rcx);
2089  if (generate_debug_code_ && op == Token::INIT_LET) {
2090  // Check for an uninitialized let binding.
2091  __ movq(rdx, location);
2092  __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2093  __ Check(equal, "Let binding re-initialization.");
2094  }
2095  // Perform the assignment.
2096  __ movq(location, rax);
2097  if (var->IsContextSlot()) {
2098  __ movq(rdx, rax);
2099  __ RecordWriteContextSlot(
2100  rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2101  }
2102  } else {
2103  ASSERT(var->IsLookupSlot());
2104  __ push(rax); // Value.
2105  __ push(rsi); // Context.
2106  __ Push(var->name());
2107  __ Push(Smi::FromInt(language_mode()));
2108  __ CallRuntime(Runtime::kStoreContextSlot, 4);
2109  }
2110  }
2111  // Non-initializing assignments to consts are ignored.
2112 }
2113 
2114 
2115 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2116  // Assignment to a property, using a named store IC.
2117  Property* prop = expr->target()->AsProperty();
2118  ASSERT(prop != NULL);
2119  ASSERT(prop->key()->AsLiteral() != NULL);
2120 
2121  // Record source code position before IC call.
2122  SetSourcePosition(expr->position());
2123  __ Move(rcx, prop->key()->AsLiteral()->handle());
2124  __ pop(rdx);
2125  Handle<Code> ic = is_classic_mode()
2126  ? isolate()->builtins()->StoreIC_Initialize()
2127  : isolate()->builtins()->StoreIC_Initialize_Strict();
2128  CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2129 
2130  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2131  context()->Plug(rax);
2132 }
2133 
2134 
2135 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2136  // Assignment to a property, using a keyed store IC.
2137 
2138  __ pop(rcx);
2139  __ pop(rdx);
2140  // Record source code position before IC call.
2141  SetSourcePosition(expr->position());
2142  Handle<Code> ic = is_classic_mode()
2143  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2144  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2145  CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2146 
2147  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2148  context()->Plug(rax);
2149 }
2150 
2151 
2152 void FullCodeGenerator::VisitProperty(Property* expr) {
2153  Comment cmnt(masm_, "[ Property");
2154  Expression* key = expr->key();
2155 
2156  if (key->IsPropertyName()) {
2157  VisitForAccumulatorValue(expr->obj());
2158  EmitNamedPropertyLoad(expr);
2159  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2160  context()->Plug(rax);
2161  } else {
2162  VisitForStackValue(expr->obj());
2163  VisitForAccumulatorValue(expr->key());
2164  __ pop(rdx);
2165  EmitKeyedPropertyLoad(expr);
2166  context()->Plug(rax);
2167  }
2168 }
2169 
2170 
2171 void FullCodeGenerator::CallIC(Handle<Code> code,
2172  RelocInfo::Mode rmode,
2173  TypeFeedbackId ast_id) {
2174  ic_total_count_++;
2175  __ call(code, rmode, ast_id);
2176 }
2177 
2178 
2179 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2180  Handle<Object> name,
2181  RelocInfo::Mode mode) {
2182  // Code common for calls using the IC.
2183  ZoneList<Expression*>* args = expr->arguments();
2184  int arg_count = args->length();
2185  { PreservePositionScope scope(masm()->positions_recorder());
2186  for (int i = 0; i < arg_count; i++) {
2187  VisitForStackValue(args->at(i));
2188  }
2189  __ Move(rcx, name);
2190  }
2191  // Record source position for debugger.
2192  SetSourcePosition(expr->position());
2193  // Call the IC initialization code.
2194  Handle<Code> ic =
2195  isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2196  CallIC(ic, mode, expr->CallFeedbackId());
2197  RecordJSReturnSite(expr);
2198  // Restore context register.
2200  context()->Plug(rax);
2201 }
2202 
2203 
2204 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2205  Expression* key) {
2206  // Load the key.
2207  VisitForAccumulatorValue(key);
2208 
2209  // Swap the name of the function and the receiver on the stack to follow
2210  // the calling convention for call ICs.
2211  __ pop(rcx);
2212  __ push(rax);
2213  __ push(rcx);
2214 
2215  // Load the arguments.
2216  ZoneList<Expression*>* args = expr->arguments();
2217  int arg_count = args->length();
2218  { PreservePositionScope scope(masm()->positions_recorder());
2219  for (int i = 0; i < arg_count; i++) {
2220  VisitForStackValue(args->at(i));
2221  }
2222  }
2223  // Record source position for debugger.
2224  SetSourcePosition(expr->position());
2225  // Call the IC initialization code.
2226  Handle<Code> ic =
2227  isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2228  __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
2229  CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
2230  RecordJSReturnSite(expr);
2231  // Restore context register.
2233  context()->DropAndPlug(1, rax); // Drop the key still on the stack.
2234 }
2235 
2236 
2237 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2238  // Code common for calls using the call stub.
2239  ZoneList<Expression*>* args = expr->arguments();
2240  int arg_count = args->length();
2241  { PreservePositionScope scope(masm()->positions_recorder());
2242  for (int i = 0; i < arg_count; i++) {
2243  VisitForStackValue(args->at(i));
2244  }
2245  }
2246  // Record source position for debugger.
2247  SetSourcePosition(expr->position());
2248 
2249  // Record call targets in unoptimized code.
2250  flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2251  Handle<Object> uninitialized =
2253  Handle<JSGlobalPropertyCell> cell =
2254  isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2255  RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2256  __ Move(rbx, cell);
2257 
2258  CallFunctionStub stub(arg_count, flags);
2259  __ movq(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2260  __ CallStub(&stub, expr->CallFeedbackId());
2261  RecordJSReturnSite(expr);
2262  // Restore context register.
2264  // Discard the function left on TOS.
2265  context()->DropAndPlug(1, rax);
2266 }
2267 
2268 
2269 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2270  // Push copy of the first argument or undefined if it doesn't exist.
2271  if (arg_count > 0) {
2272  __ push(Operand(rsp, arg_count * kPointerSize));
2273  } else {
2274  __ PushRoot(Heap::kUndefinedValueRootIndex);
2275  }
2276 
2277  // Push the receiver of the enclosing function and do runtime call.
2278  __ push(Operand(rbp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2279 
2280  // Push the language mode.
2281  __ Push(Smi::FromInt(language_mode()));
2282 
2283  // Push the start position of the scope the calls resides in.
2284  __ Push(Smi::FromInt(scope()->start_position()));
2285 
2286  // Do the runtime call.
2287  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2288 }
2289 
2290 
2291 void FullCodeGenerator::VisitCall(Call* expr) {
2292 #ifdef DEBUG
2293  // We want to verify that RecordJSReturnSite gets called on all paths
2294  // through this function. Avoid early returns.
2295  expr->return_is_recorded_ = false;
2296 #endif
2297 
2298  Comment cmnt(masm_, "[ Call");
2299  Expression* callee = expr->expression();
2300  VariableProxy* proxy = callee->AsVariableProxy();
2301  Property* property = callee->AsProperty();
2302 
2303  if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2304  // In a call to eval, we first call %ResolvePossiblyDirectEval to
2305  // resolve the function we need to call and the receiver of the call.
2306  // Then we call the resolved function using the given arguments.
2307  ZoneList<Expression*>* args = expr->arguments();
2308  int arg_count = args->length();
2309  { PreservePositionScope pos_scope(masm()->positions_recorder());
2310  VisitForStackValue(callee);
2311  __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot.
2312 
2313  // Push the arguments.
2314  for (int i = 0; i < arg_count; i++) {
2315  VisitForStackValue(args->at(i));
2316  }
2317 
2318  // Push a copy of the function (found below the arguments) and resolve
2319  // eval.
2320  __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
2321  EmitResolvePossiblyDirectEval(arg_count);
2322 
2323  // The runtime call returns a pair of values in rax (function) and
2324  // rdx (receiver). Touch up the stack with the right values.
2325  __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
2326  __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2327  }
2328  // Record source position for debugger.
2329  SetSourcePosition(expr->position());
2330  CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2331  __ movq(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2332  __ CallStub(&stub);
2333  RecordJSReturnSite(expr);
2334  // Restore context register.
2336  context()->DropAndPlug(1, rax);
2337  } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2338  // Call to a global variable. Push global object as receiver for the
2339  // call IC lookup.
2340  __ push(GlobalObjectOperand());
2341  EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2342  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2343  // Call to a lookup slot (dynamically introduced variable).
2344  Label slow, done;
2345 
2346  { PreservePositionScope scope(masm()->positions_recorder());
2347  // Generate code for loading from variables potentially shadowed by
2348  // eval-introduced variables.
2349  EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2350  }
2351  __ bind(&slow);
2352  // Call the runtime to find the function to call (returned in rax) and
2353  // the object holding it (returned in rdx).
2354  __ push(context_register());
2355  __ Push(proxy->name());
2356  __ CallRuntime(Runtime::kLoadContextSlot, 2);
2357  __ push(rax); // Function.
2358  __ push(rdx); // Receiver.
2359 
2360  // If fast case code has been generated, emit code to push the function
2361  // and receiver and have the slow path jump around this code.
2362  if (done.is_linked()) {
2363  Label call;
2364  __ jmp(&call, Label::kNear);
2365  __ bind(&done);
2366  // Push function.
2367  __ push(rax);
2368  // The receiver is implicitly the global receiver. Indicate this by
2369  // passing the hole to the call function stub.
2370  __ PushRoot(Heap::kTheHoleValueRootIndex);
2371  __ bind(&call);
2372  }
2373 
2374  // The receiver is either the global receiver or an object found by
2375  // LoadContextSlot. That object could be the hole if the receiver is
2376  // implicitly the global object.
2377  EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2378  } else if (property != NULL) {
2379  { PreservePositionScope scope(masm()->positions_recorder());
2380  VisitForStackValue(property->obj());
2381  }
2382  if (property->key()->IsPropertyName()) {
2383  EmitCallWithIC(expr,
2384  property->key()->AsLiteral()->handle(),
2385  RelocInfo::CODE_TARGET);
2386  } else {
2387  EmitKeyedCallWithIC(expr, property->key());
2388  }
2389  } else {
2390  // Call to an arbitrary expression not handled specially above.
2391  { PreservePositionScope scope(masm()->positions_recorder());
2392  VisitForStackValue(callee);
2393  }
2394  // Load global receiver object.
2395  __ movq(rbx, GlobalObjectOperand());
2397  // Emit function call.
2398  EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2399  }
2400 
2401 #ifdef DEBUG
2402  // RecordJSReturnSite should have been called.
2403  ASSERT(expr->return_is_recorded_);
2404 #endif
2405 }
2406 
2407 
2408 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2409  Comment cmnt(masm_, "[ CallNew");
2410  // According to ECMA-262, section 11.2.2, page 44, the function
2411  // expression in new calls must be evaluated before the
2412  // arguments.
2413 
2414  // Push constructor on the stack. If it's not a function it's used as
2415  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2416  // ignored.
2417  VisitForStackValue(expr->expression());
2418 
2419  // Push the arguments ("left-to-right") on the stack.
2420  ZoneList<Expression*>* args = expr->arguments();
2421  int arg_count = args->length();
2422  for (int i = 0; i < arg_count; i++) {
2423  VisitForStackValue(args->at(i));
2424  }
2425 
2426  // Call the construct call builtin that handles allocation and
2427  // constructor invocation.
2428  SetSourcePosition(expr->position());
2429 
2430  // Load function and argument count into rdi and rax.
2431  __ Set(rax, arg_count);
2432  __ movq(rdi, Operand(rsp, arg_count * kPointerSize));
2433 
2434  // Record call targets in unoptimized code, but not in the snapshot.
2435  Handle<Object> uninitialized =
2437  Handle<JSGlobalPropertyCell> cell =
2438  isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2439  RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2440  __ Move(rbx, cell);
2441 
2442  CallConstructStub stub(RECORD_CALL_TARGET);
2443  __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2444  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2445  context()->Plug(rax);
2446 }
2447 
2448 
2449 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2450  ZoneList<Expression*>* args = expr->arguments();
2451  ASSERT(args->length() == 1);
2452 
2453  VisitForAccumulatorValue(args->at(0));
2454 
2455  Label materialize_true, materialize_false;
2456  Label* if_true = NULL;
2457  Label* if_false = NULL;
2458  Label* fall_through = NULL;
2459  context()->PrepareTest(&materialize_true, &materialize_false,
2460  &if_true, &if_false, &fall_through);
2461 
2462  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2463  __ JumpIfSmi(rax, if_true);
2464  __ jmp(if_false);
2465 
2466  context()->Plug(if_true, if_false);
2467 }
2468 
2469 
2470 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2471  ZoneList<Expression*>* args = expr->arguments();
2472  ASSERT(args->length() == 1);
2473 
2474  VisitForAccumulatorValue(args->at(0));
2475 
2476  Label materialize_true, materialize_false;
2477  Label* if_true = NULL;
2478  Label* if_false = NULL;
2479  Label* fall_through = NULL;
2480  context()->PrepareTest(&materialize_true, &materialize_false,
2481  &if_true, &if_false, &fall_through);
2482 
2483  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2484  Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
2485  Split(non_negative_smi, if_true, if_false, fall_through);
2486 
2487  context()->Plug(if_true, if_false);
2488 }
2489 
2490 
2491 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2492  ZoneList<Expression*>* args = expr->arguments();
2493  ASSERT(args->length() == 1);
2494 
2495  VisitForAccumulatorValue(args->at(0));
2496 
2497  Label materialize_true, materialize_false;
2498  Label* if_true = NULL;
2499  Label* if_false = NULL;
2500  Label* fall_through = NULL;
2501  context()->PrepareTest(&materialize_true, &materialize_false,
2502  &if_true, &if_false, &fall_through);
2503 
2504  __ JumpIfSmi(rax, if_false);
2505  __ CompareRoot(rax, Heap::kNullValueRootIndex);
2506  __ j(equal, if_true);
2508  // Undetectable objects behave like undefined when tested with typeof.
2510  Immediate(1 << Map::kIsUndetectable));
2511  __ j(not_zero, if_false);
2513  __ cmpq(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2514  __ j(below, if_false);
2515  __ cmpq(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2516  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2517  Split(below_equal, if_true, if_false, fall_through);
2518 
2519  context()->Plug(if_true, if_false);
2520 }
2521 
2522 
2523 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2524  ZoneList<Expression*>* args = expr->arguments();
2525  ASSERT(args->length() == 1);
2526 
2527  VisitForAccumulatorValue(args->at(0));
2528 
2529  Label materialize_true, materialize_false;
2530  Label* if_true = NULL;
2531  Label* if_false = NULL;
2532  Label* fall_through = NULL;
2533  context()->PrepareTest(&materialize_true, &materialize_false,
2534  &if_true, &if_false, &fall_through);
2535 
2536  __ JumpIfSmi(rax, if_false);
2537  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
2538  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2539  Split(above_equal, if_true, if_false, fall_through);
2540 
2541  context()->Plug(if_true, if_false);
2542 }
2543 
2544 
2545 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2546  ZoneList<Expression*>* args = expr->arguments();
2547  ASSERT(args->length() == 1);
2548 
2549  VisitForAccumulatorValue(args->at(0));
2550 
2551  Label materialize_true, materialize_false;
2552  Label* if_true = NULL;
2553  Label* if_false = NULL;
2554  Label* fall_through = NULL;
2555  context()->PrepareTest(&materialize_true, &materialize_false,
2556  &if_true, &if_false, &fall_through);
2557 
2558  __ JumpIfSmi(rax, if_false);
2561  Immediate(1 << Map::kIsUndetectable));
2562  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2563  Split(not_zero, if_true, if_false, fall_through);
2564 
2565  context()->Plug(if_true, if_false);
2566 }
2567 
2568 
2569 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2570  CallRuntime* expr) {
2571  ZoneList<Expression*>* args = expr->arguments();
2572  ASSERT(args->length() == 1);
2573 
2574  VisitForAccumulatorValue(args->at(0));
2575 
2576  Label materialize_true, materialize_false;
2577  Label* if_true = NULL;
2578  Label* if_false = NULL;
2579  Label* fall_through = NULL;
2580  context()->PrepareTest(&materialize_true, &materialize_false,
2581  &if_true, &if_false, &fall_through);
2582 
2583  __ AssertNotSmi(rax);
2584 
2585  // Check whether this map has already been checked to be safe for default
2586  // valueOf.
2590  __ j(not_zero, if_true);
2591 
2592  // Check for fast case object. Generate false result for slow case object.
2595  __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
2596  __ j(equal, if_false);
2597 
2598  // Look for valueOf symbol in the descriptor array, and indicate false if
2599  // found. Since we omit an enumeration index check, if it is added via a
2600  // transition that shares its descriptor array, this is a false positive.
2601  Label entry, loop, done;
2602 
2603  // Skip loop if no descriptors are valid.
2604  __ NumberOfOwnDescriptors(rcx, rbx);
2605  __ cmpq(rcx, Immediate(0));
2606  __ j(equal, &done);
2607 
2608  __ LoadInstanceDescriptors(rbx, rbx);
2609  // rbx: descriptor array.
2610  // rcx: valid entries in the descriptor array.
2611  // Calculate the end of the descriptor array.
2612  __ imul(rcx, rcx, Immediate(DescriptorArray::kDescriptorSize));
2613  SmiIndex index = masm_->SmiToIndex(rdx, rcx, kPointerSizeLog2);
2614  __ lea(rcx,
2615  Operand(
2616  rbx, index.reg, index.scale, DescriptorArray::kFirstOffset));
2617  // Calculate location of the first key name.
2618  __ addq(rbx, Immediate(DescriptorArray::kFirstOffset));
2619  // Loop through all the keys in the descriptor array. If one of these is the
2620  // symbol valueOf the result is false.
2621  __ jmp(&entry);
2622  __ bind(&loop);
2623  __ movq(rdx, FieldOperand(rbx, 0));
2624  __ Cmp(rdx, FACTORY->value_of_symbol());
2625  __ j(equal, if_false);
2626  __ addq(rbx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
2627  __ bind(&entry);
2628  __ cmpq(rbx, rcx);
2629  __ j(not_equal, &loop);
2630 
2631  __ bind(&done);
2632  // Reload map as register rbx was used as temporary above.
2634 
2635  // If a valueOf property is not found on the object check that its
2636  // prototype is the un-modified String prototype. If not result is false.
2638  __ testq(rcx, Immediate(kSmiTagMask));
2639  __ j(zero, if_false);
2643  __ cmpq(rcx,
2645  __ j(not_equal, if_false);
2646  // Set the bit in the map to indicate that it has been checked safe for
2647  // default valueOf and set true result.
2650  __ jmp(if_true);
2651 
2652  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2653  context()->Plug(if_true, if_false);
2654 }
2655 
2656 
2657 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2658  ZoneList<Expression*>* args = expr->arguments();
2659  ASSERT(args->length() == 1);
2660 
2661  VisitForAccumulatorValue(args->at(0));
2662 
2663  Label materialize_true, materialize_false;
2664  Label* if_true = NULL;
2665  Label* if_false = NULL;
2666  Label* fall_through = NULL;
2667  context()->PrepareTest(&materialize_true, &materialize_false,
2668  &if_true, &if_false, &fall_through);
2669 
2670  __ JumpIfSmi(rax, if_false);
2671  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2672  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2673  Split(equal, if_true, if_false, fall_through);
2674 
2675  context()->Plug(if_true, if_false);
2676 }
2677 
2678 
2679 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2680  ZoneList<Expression*>* args = expr->arguments();
2681  ASSERT(args->length() == 1);
2682 
2683  VisitForAccumulatorValue(args->at(0));
2684 
2685  Label materialize_true, materialize_false;
2686  Label* if_true = NULL;
2687  Label* if_false = NULL;
2688  Label* fall_through = NULL;
2689  context()->PrepareTest(&materialize_true, &materialize_false,
2690  &if_true, &if_false, &fall_through);
2691 
2692  __ JumpIfSmi(rax, if_false);
2693  __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
2694  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2695  Split(equal, if_true, if_false, fall_through);
2696 
2697  context()->Plug(if_true, if_false);
2698 }
2699 
2700 
2701 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2702  ZoneList<Expression*>* args = expr->arguments();
2703  ASSERT(args->length() == 1);
2704 
2705  VisitForAccumulatorValue(args->at(0));
2706 
2707  Label materialize_true, materialize_false;
2708  Label* if_true = NULL;
2709  Label* if_false = NULL;
2710  Label* fall_through = NULL;
2711  context()->PrepareTest(&materialize_true, &materialize_false,
2712  &if_true, &if_false, &fall_through);
2713 
2714  __ JumpIfSmi(rax, if_false);
2715  __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
2716  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2717  Split(equal, if_true, if_false, fall_through);
2718 
2719  context()->Plug(if_true, if_false);
2720 }
2721 
2722 
2723 
2724 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2725  ASSERT(expr->arguments()->length() == 0);
2726 
2727  Label materialize_true, materialize_false;
2728  Label* if_true = NULL;
2729  Label* if_false = NULL;
2730  Label* fall_through = NULL;
2731  context()->PrepareTest(&materialize_true, &materialize_false,
2732  &if_true, &if_false, &fall_through);
2733 
2734  // Get the frame pointer for the calling frame.
2736 
2737  // Skip the arguments adaptor frame if it exists.
2738  Label check_frame_marker;
2741  __ j(not_equal, &check_frame_marker);
2743 
2744  // Check the marker in the calling frame.
2745  __ bind(&check_frame_marker);
2747  Smi::FromInt(StackFrame::CONSTRUCT));
2748  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2749  Split(equal, if_true, if_false, fall_through);
2750 
2751  context()->Plug(if_true, if_false);
2752 }
2753 
2754 
2755 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2756  ZoneList<Expression*>* args = expr->arguments();
2757  ASSERT(args->length() == 2);
2758 
2759  // Load the two objects into registers and perform the comparison.
2760  VisitForStackValue(args->at(0));
2761  VisitForAccumulatorValue(args->at(1));
2762 
2763  Label materialize_true, materialize_false;
2764  Label* if_true = NULL;
2765  Label* if_false = NULL;
2766  Label* fall_through = NULL;
2767  context()->PrepareTest(&materialize_true, &materialize_false,
2768  &if_true, &if_false, &fall_through);
2769 
2770  __ pop(rbx);
2771  __ cmpq(rax, rbx);
2772  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2773  Split(equal, if_true, if_false, fall_through);
2774 
2775  context()->Plug(if_true, if_false);
2776 }
2777 
2778 
2779 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2780  ZoneList<Expression*>* args = expr->arguments();
2781  ASSERT(args->length() == 1);
2782 
2783  // ArgumentsAccessStub expects the key in rdx and the formal
2784  // parameter count in rax.
2785  VisitForAccumulatorValue(args->at(0));
2786  __ movq(rdx, rax);
2787  __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
2788  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2789  __ CallStub(&stub);
2790  context()->Plug(rax);
2791 }
2792 
2793 
2794 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2795  ASSERT(expr->arguments()->length() == 0);
2796 
2797  Label exit;
2798  // Get the number of formal parameters.
2799  __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
2800 
2801  // Check if the calling frame is an arguments adaptor frame.
2805  __ j(not_equal, &exit, Label::kNear);
2806 
2807  // Arguments adaptor case: Read the arguments length from the
2808  // adaptor frame.
2810 
2811  __ bind(&exit);
2812  __ AssertSmi(rax);
2813  context()->Plug(rax);
2814 }
2815 
2816 
2817 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2818  ZoneList<Expression*>* args = expr->arguments();
2819  ASSERT(args->length() == 1);
2820  Label done, null, function, non_function_constructor;
2821 
2822  VisitForAccumulatorValue(args->at(0));
2823 
2824  // If the object is a smi, we return null.
2825  __ JumpIfSmi(rax, &null);
2826 
2827  // Check that the object is a JS object but take special care of JS
2828  // functions to make sure they have 'Function' as their class.
2829  // Assume that there are only two callable types, and one of them is at
2830  // either end of the type range for JS object types. Saves extra comparisons.
2832  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
2833  // Map is now in rax.
2834  __ j(below, &null);
2837  __ j(equal, &function);
2838 
2839  __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
2841  LAST_SPEC_OBJECT_TYPE - 1);
2842  __ j(equal, &function);
2843  // Assume that there is no larger type.
2845 
2846  // Check if the constructor in the map is a JS function.
2848  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2849  __ j(not_equal, &non_function_constructor);
2850 
2851  // rax now contains the constructor function. Grab the
2852  // instance class name from there.
2855  __ jmp(&done);
2856 
2857  // Functions have class 'Function'.
2858  __ bind(&function);
2859  __ Move(rax, isolate()->factory()->function_class_symbol());
2860  __ jmp(&done);
2861 
2862  // Objects with a non-function constructor have class 'Object'.
2863  __ bind(&non_function_constructor);
2864  __ Move(rax, isolate()->factory()->Object_symbol());
2865  __ jmp(&done);
2866 
2867  // Non-JS objects have class null.
2868  __ bind(&null);
2869  __ LoadRoot(rax, Heap::kNullValueRootIndex);
2870 
2871  // All done.
2872  __ bind(&done);
2873 
2874  context()->Plug(rax);
2875 }
2876 
2877 
2878 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2879  // Conditionally generate a log call.
2880  // Args:
2881  // 0 (literal string): The type of logging (corresponds to the flags).
2882  // This is used to determine whether or not to generate the log call.
2883  // 1 (string): Format string. Access the string at argument index 2
2884  // with '%2s' (see Logger::LogRuntime for all the formats).
2885  // 2 (array): Arguments to the format string.
2886  ZoneList<Expression*>* args = expr->arguments();
2887  ASSERT_EQ(args->length(), 3);
2888  if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2889  VisitForStackValue(args->at(1));
2890  VisitForStackValue(args->at(2));
2891  __ CallRuntime(Runtime::kLog, 2);
2892  }
2893  // Finally, we're expected to leave a value on the top of the stack.
2894  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2895  context()->Plug(rax);
2896 }
2897 
2898 
2899 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2900  ASSERT(expr->arguments()->length() == 0);
2901 
2902  Label slow_allocate_heapnumber;
2903  Label heapnumber_allocated;
2904 
2905  __ AllocateHeapNumber(rbx, rcx, &slow_allocate_heapnumber);
2906  __ jmp(&heapnumber_allocated);
2907 
2908  __ bind(&slow_allocate_heapnumber);
2909  // Allocate a heap number.
2910  __ CallRuntime(Runtime::kNumberAlloc, 0);
2911  __ movq(rbx, rax);
2912 
2913  __ bind(&heapnumber_allocated);
2914 
2915  // Return a random uint32 number in rax.
2916  // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
2917  __ PrepareCallCFunction(1);
2918 #ifdef _WIN64
2919  __ movq(rcx,
2920  ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
2922 
2923 #else
2924  __ movq(rdi,
2925  ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
2927 #endif
2928  __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2929 
2930  // Convert 32 random bits in rax to 0.(32 random bits) in a double
2931  // by computing:
2932  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2933  __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
2934  __ movd(xmm1, rcx);
2935  __ movd(xmm0, rax);
2936  __ cvtss2sd(xmm1, xmm1);
2937  __ xorps(xmm0, xmm1);
2938  __ subsd(xmm0, xmm1);
2940 
2941  __ movq(rax, rbx);
2942  context()->Plug(rax);
2943 }
2944 
2945 
2946 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
2947  // Load the arguments on the stack and call the stub.
2948  SubStringStub stub;
2949  ZoneList<Expression*>* args = expr->arguments();
2950  ASSERT(args->length() == 3);
2951  VisitForStackValue(args->at(0));
2952  VisitForStackValue(args->at(1));
2953  VisitForStackValue(args->at(2));
2954  __ CallStub(&stub);
2955  context()->Plug(rax);
2956 }
2957 
2958 
2959 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
2960  // Load the arguments on the stack and call the stub.
2961  RegExpExecStub stub;
2962  ZoneList<Expression*>* args = expr->arguments();
2963  ASSERT(args->length() == 4);
2964  VisitForStackValue(args->at(0));
2965  VisitForStackValue(args->at(1));
2966  VisitForStackValue(args->at(2));
2967  VisitForStackValue(args->at(3));
2968  __ CallStub(&stub);
2969  context()->Plug(rax);
2970 }
2971 
2972 
2973 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2974  ZoneList<Expression*>* args = expr->arguments();
2975  ASSERT(args->length() == 1);
2976 
2977  VisitForAccumulatorValue(args->at(0)); // Load the object.
2978 
2979  Label done;
2980  // If the object is a smi return the object.
2981  __ JumpIfSmi(rax, &done);
2982  // If the object is not a value type, return the object.
2983  __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
2984  __ j(not_equal, &done);
2986 
2987  __ bind(&done);
2988  context()->Plug(rax);
2989 }
2990 
2991 
2992 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
2993  ZoneList<Expression*>* args = expr->arguments();
2994  ASSERT(args->length() == 2);
2995  ASSERT_NE(NULL, args->at(1)->AsLiteral());
2996  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
2997 
2998  VisitForAccumulatorValue(args->at(0)); // Load the object.
2999 
3000  Label runtime, done, not_date_object;
3001  Register object = rax;
3002  Register result = rax;
3003  Register scratch = rcx;
3004 
3005  __ JumpIfSmi(object, &not_date_object);
3006  __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3007  __ j(not_equal, &not_date_object);
3008 
3009  if (index->value() == 0) {
3010  __ movq(result, FieldOperand(object, JSDate::kValueOffset));
3011  __ jmp(&done);
3012  } else {
3013  if (index->value() < JSDate::kFirstUncachedField) {
3014  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3015  __ movq(scratch, stamp);
3016  __ cmpq(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3017  __ j(not_equal, &runtime, Label::kNear);
3018  __ movq(result, FieldOperand(object, JSDate::kValueOffset +
3019  kPointerSize * index->value()));
3020  __ jmp(&done);
3021  }
3022  __ bind(&runtime);
3023  __ PrepareCallCFunction(2);
3024 #ifdef _WIN64
3025  __ movq(rcx, object);
3026  __ movq(rdx, index, RelocInfo::NONE);
3027 #else
3028  __ movq(rdi, object);
3029  __ movq(rsi, index, RelocInfo::NONE);
3030 #endif
3031  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3033  __ jmp(&done);
3034  }
3035 
3036  __ bind(&not_date_object);
3037  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3038  __ bind(&done);
3039  context()->Plug(rax);
3040 }
3041 
3042 
3043 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3044  // Load the arguments on the stack and call the runtime function.
3045  ZoneList<Expression*>* args = expr->arguments();
3046  ASSERT(args->length() == 2);
3047  VisitForStackValue(args->at(0));
3048  VisitForStackValue(args->at(1));
3049  MathPowStub stub(MathPowStub::ON_STACK);
3050  __ CallStub(&stub);
3051  context()->Plug(rax);
3052 }
3053 
3054 
3055 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3056  ZoneList<Expression*>* args = expr->arguments();
3057  ASSERT(args->length() == 2);
3058 
3059  VisitForStackValue(args->at(0)); // Load the object.
3060  VisitForAccumulatorValue(args->at(1)); // Load the value.
3061  __ pop(rbx); // rax = value. rbx = object.
3062 
3063  Label done;
3064  // If the object is a smi, return the value.
3065  __ JumpIfSmi(rbx, &done);
3066 
3067  // If the object is not a value type, return the value.
3068  __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
3069  __ j(not_equal, &done);
3070 
3071  // Store the value.
3073  // Update the write barrier. Save the value as it will be
3074  // overwritten by the write barrier code and is needed afterward.
3075  __ movq(rdx, rax);
3076  __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs);
3077 
3078  __ bind(&done);
3079  context()->Plug(rax);
3080 }
3081 
3082 
3083 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3084  ZoneList<Expression*>* args = expr->arguments();
3085  ASSERT_EQ(args->length(), 1);
3086 
3087  // Load the argument on the stack and call the stub.
3088  VisitForStackValue(args->at(0));
3089 
3090  NumberToStringStub stub;
3091  __ CallStub(&stub);
3092  context()->Plug(rax);
3093 }
3094 
3095 
3096 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3097  ZoneList<Expression*>* args = expr->arguments();
3098  ASSERT(args->length() == 1);
3099 
3100  VisitForAccumulatorValue(args->at(0));
3101 
3102  Label done;
3103  StringCharFromCodeGenerator generator(rax, rbx);
3104  generator.GenerateFast(masm_);
3105  __ jmp(&done);
3106 
3107  NopRuntimeCallHelper call_helper;
3108  generator.GenerateSlow(masm_, call_helper);
3109 
3110  __ bind(&done);
3111  context()->Plug(rbx);
3112 }
3113 
3114 
3115 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3116  ZoneList<Expression*>* args = expr->arguments();
3117  ASSERT(args->length() == 2);
3118 
3119  VisitForStackValue(args->at(0));
3120  VisitForAccumulatorValue(args->at(1));
3121 
3122  Register object = rbx;
3123  Register index = rax;
3124  Register result = rdx;
3125 
3126  __ pop(object);
3127 
3128  Label need_conversion;
3129  Label index_out_of_range;
3130  Label done;
3131  StringCharCodeAtGenerator generator(object,
3132  index,
3133  result,
3134  &need_conversion,
3135  &need_conversion,
3136  &index_out_of_range,
3138  generator.GenerateFast(masm_);
3139  __ jmp(&done);
3140 
3141  __ bind(&index_out_of_range);
3142  // When the index is out of range, the spec requires us to return
3143  // NaN.
3144  __ LoadRoot(result, Heap::kNanValueRootIndex);
3145  __ jmp(&done);
3146 
3147  __ bind(&need_conversion);
3148  // Move the undefined value into the result register, which will
3149  // trigger conversion.
3150  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3151  __ jmp(&done);
3152 
3153  NopRuntimeCallHelper call_helper;
3154  generator.GenerateSlow(masm_, call_helper);
3155 
3156  __ bind(&done);
3157  context()->Plug(result);
3158 }
3159 
3160 
3161 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3162  ZoneList<Expression*>* args = expr->arguments();
3163  ASSERT(args->length() == 2);
3164 
3165  VisitForStackValue(args->at(0));
3166  VisitForAccumulatorValue(args->at(1));
3167 
3168  Register object = rbx;
3169  Register index = rax;
3170  Register scratch = rdx;
3171  Register result = rax;
3172 
3173  __ pop(object);
3174 
3175  Label need_conversion;
3176  Label index_out_of_range;
3177  Label done;
3178  StringCharAtGenerator generator(object,
3179  index,
3180  scratch,
3181  result,
3182  &need_conversion,
3183  &need_conversion,
3184  &index_out_of_range,
3186  generator.GenerateFast(masm_);
3187  __ jmp(&done);
3188 
3189  __ bind(&index_out_of_range);
3190  // When the index is out of range, the spec requires us to return
3191  // the empty string.
3192  __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3193  __ jmp(&done);
3194 
3195  __ bind(&need_conversion);
3196  // Move smi zero into the result register, which will trigger
3197  // conversion.
3198  __ Move(result, Smi::FromInt(0));
3199  __ jmp(&done);
3200 
3201  NopRuntimeCallHelper call_helper;
3202  generator.GenerateSlow(masm_, call_helper);
3203 
3204  __ bind(&done);
3205  context()->Plug(result);
3206 }
3207 
3208 
3209 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3210  ZoneList<Expression*>* args = expr->arguments();
3211  ASSERT_EQ(2, args->length());
3212 
3213  VisitForStackValue(args->at(0));
3214  VisitForStackValue(args->at(1));
3215 
3216  StringAddStub stub(NO_STRING_ADD_FLAGS);
3217  __ CallStub(&stub);
3218  context()->Plug(rax);
3219 }
3220 
3221 
3222 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3223  ZoneList<Expression*>* args = expr->arguments();
3224  ASSERT_EQ(2, args->length());
3225 
3226  VisitForStackValue(args->at(0));
3227  VisitForStackValue(args->at(1));
3228 
3229  StringCompareStub stub;
3230  __ CallStub(&stub);
3231  context()->Plug(rax);
3232 }
3233 
3234 
3235 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3236  // Load the argument on the stack and call the stub.
3237  TranscendentalCacheStub stub(TranscendentalCache::SIN,
3239  ZoneList<Expression*>* args = expr->arguments();
3240  ASSERT(args->length() == 1);
3241  VisitForStackValue(args->at(0));
3242  __ CallStub(&stub);
3243  context()->Plug(rax);
3244 }
3245 
3246 
3247 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3248  // Load the argument on the stack and call the stub.
3249  TranscendentalCacheStub stub(TranscendentalCache::COS,
3251  ZoneList<Expression*>* args = expr->arguments();
3252  ASSERT(args->length() == 1);
3253  VisitForStackValue(args->at(0));
3254  __ CallStub(&stub);
3255  context()->Plug(rax);
3256 }
3257 
3258 
3259 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3260  // Load the argument on the stack and call the stub.
3261  TranscendentalCacheStub stub(TranscendentalCache::TAN,
3263  ZoneList<Expression*>* args = expr->arguments();
3264  ASSERT(args->length() == 1);
3265  VisitForStackValue(args->at(0));
3266  __ CallStub(&stub);
3267  context()->Plug(rax);
3268 }
3269 
3270 
3271 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3272  // Load the argument on the stack and call the stub.
3273  TranscendentalCacheStub stub(TranscendentalCache::LOG,
3275  ZoneList<Expression*>* args = expr->arguments();
3276  ASSERT(args->length() == 1);
3277  VisitForStackValue(args->at(0));
3278  __ CallStub(&stub);
3279  context()->Plug(rax);
3280 }
3281 
3282 
3283 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3284  // Load the argument on the stack and call the runtime function.
3285  ZoneList<Expression*>* args = expr->arguments();
3286  ASSERT(args->length() == 1);
3287  VisitForStackValue(args->at(0));
3288  __ CallRuntime(Runtime::kMath_sqrt, 1);
3289  context()->Plug(rax);
3290 }
3291 
3292 
3293 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3294  ZoneList<Expression*>* args = expr->arguments();
3295  ASSERT(args->length() >= 2);
3296 
3297  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3298  for (int i = 0; i < arg_count + 1; i++) {
3299  VisitForStackValue(args->at(i));
3300  }
3301  VisitForAccumulatorValue(args->last()); // Function.
3302 
3303  Label runtime, done;
3304  // Check for non-function argument (including proxy).
3305  __ JumpIfSmi(rax, &runtime);
3306  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3307  __ j(not_equal, &runtime);
3308 
3309  // InvokeFunction requires the function in rdi. Move it in there.
3310  __ movq(rdi, result_register());
3311  ParameterCount count(arg_count);
3312  __ InvokeFunction(rdi, count, CALL_FUNCTION,
3313  NullCallWrapper(), CALL_AS_METHOD);
3315  __ jmp(&done);
3316 
3317  __ bind(&runtime);
3318  __ push(rax);
3319  __ CallRuntime(Runtime::kCall, args->length());
3320  __ bind(&done);
3321 
3322  context()->Plug(rax);
3323 }
3324 
3325 
3326 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3327  RegExpConstructResultStub stub;
3328  ZoneList<Expression*>* args = expr->arguments();
3329  ASSERT(args->length() == 3);
3330  VisitForStackValue(args->at(0));
3331  VisitForStackValue(args->at(1));
3332  VisitForStackValue(args->at(2));
3333  __ CallStub(&stub);
3334  context()->Plug(rax);
3335 }
3336 
3337 
3338 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3339  ZoneList<Expression*>* args = expr->arguments();
3340  ASSERT_EQ(2, args->length());
3341 
3342  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3343  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3344 
3345  Handle<FixedArray> jsfunction_result_caches(
3346  isolate()->native_context()->jsfunction_result_caches());
3347  if (jsfunction_result_caches->length() <= cache_id) {
3348  __ Abort("Attempt to use undefined cache.");
3349  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3350  context()->Plug(rax);
3351  return;
3352  }
3353 
3354  VisitForAccumulatorValue(args->at(1));
3355 
3356  Register key = rax;
3357  Register cache = rbx;
3358  Register tmp = rcx;
3360  __ movq(cache,
3362  __ movq(cache,
3364  __ movq(cache,
3365  FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3366 
3367  Label done, not_found;
3368  // tmp now holds finger offset as a smi.
3369  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3371  SmiIndex index =
3372  __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
3373  __ cmpq(key, FieldOperand(cache,
3374  index.reg,
3375  index.scale,
3377  __ j(not_equal, &not_found, Label::kNear);
3378  __ movq(rax, FieldOperand(cache,
3379  index.reg,
3380  index.scale,
3381  FixedArray::kHeaderSize + kPointerSize));
3382  __ jmp(&done, Label::kNear);
3383 
3384  __ bind(&not_found);
3385  // Call runtime to perform the lookup.
3386  __ push(cache);
3387  __ push(key);
3388  __ CallRuntime(Runtime::kGetFromCache, 2);
3389 
3390  __ bind(&done);
3391  context()->Plug(rax);
3392 }
3393 
3394 
3395 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3396  ZoneList<Expression*>* args = expr->arguments();
3397  ASSERT_EQ(2, args->length());
3398 
3399  Register right = rax;
3400  Register left = rbx;
3401  Register tmp = rcx;
3402 
3403  VisitForStackValue(args->at(0));
3404  VisitForAccumulatorValue(args->at(1));
3405  __ pop(left);
3406 
3407  Label done, fail, ok;
3408  __ cmpq(left, right);
3409  __ j(equal, &ok, Label::kNear);
3410  // Fail if either is a non-HeapObject.
3411  Condition either_smi = masm()->CheckEitherSmi(left, right, tmp);
3412  __ j(either_smi, &fail, Label::kNear);
3413  __ j(zero, &fail, Label::kNear);
3414  __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
3416  Immediate(JS_REGEXP_TYPE));
3417  __ j(not_equal, &fail, Label::kNear);
3418  __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
3419  __ j(not_equal, &fail, Label::kNear);
3420  __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
3421  __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
3422  __ j(equal, &ok, Label::kNear);
3423  __ bind(&fail);
3424  __ Move(rax, isolate()->factory()->false_value());
3425  __ jmp(&done, Label::kNear);
3426  __ bind(&ok);
3427  __ Move(rax, isolate()->factory()->true_value());
3428  __ bind(&done);
3429 
3430  context()->Plug(rax);
3431 }
3432 
3433 
3434 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3435  ZoneList<Expression*>* args = expr->arguments();
3436  ASSERT(args->length() == 1);
3437 
3438  VisitForAccumulatorValue(args->at(0));
3439 
3440  Label materialize_true, materialize_false;
3441  Label* if_true = NULL;
3442  Label* if_false = NULL;
3443  Label* fall_through = NULL;
3444  context()->PrepareTest(&materialize_true, &materialize_false,
3445  &if_true, &if_false, &fall_through);
3446 
3449  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3450  __ j(zero, if_true);
3451  __ jmp(if_false);
3452 
3453  context()->Plug(if_true, if_false);
3454 }
3455 
3456 
3457 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3458  ZoneList<Expression*>* args = expr->arguments();
3459  ASSERT(args->length() == 1);
3460  VisitForAccumulatorValue(args->at(0));
3461 
3462  __ AssertString(rax);
3463 
3466  __ IndexFromHash(rax, rax);
3467 
3468  context()->Plug(rax);
3469 }
3470 
3471 
3472 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3473  Label bailout, return_result, done, one_char_separator, long_separator,
3474  non_trivial_array, not_size_one_array, loop,
3475  loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3476  ZoneList<Expression*>* args = expr->arguments();
3477  ASSERT(args->length() == 2);
3478  // We will leave the separator on the stack until the end of the function.
3479  VisitForStackValue(args->at(1));
3480  // Load this to rax (= array)
3481  VisitForAccumulatorValue(args->at(0));
3482  // All aliases of the same register have disjoint lifetimes.
3483  Register array = rax;
3484  Register elements = no_reg; // Will be rax.
3485 
3486  Register index = rdx;
3487 
3488  Register string_length = rcx;
3489 
3490  Register string = rsi;
3491 
3492  Register scratch = rbx;
3493 
3494  Register array_length = rdi;
3495  Register result_pos = no_reg; // Will be rdi.
3496 
3497  Operand separator_operand = Operand(rsp, 2 * kPointerSize);
3498  Operand result_operand = Operand(rsp, 1 * kPointerSize);
3499  Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
3500  // Separator operand is already pushed. Make room for the two
3501  // other stack fields, and clear the direction flag in anticipation
3502  // of calling CopyBytes.
3503  __ subq(rsp, Immediate(2 * kPointerSize));
3504  __ cld();
3505  // Check that the array is a JSArray
3506  __ JumpIfSmi(array, &bailout);
3507  __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3508  __ j(not_equal, &bailout);
3509 
3510  // Check that the array has fast elements.
3511  __ CheckFastElements(scratch, &bailout);
3512 
3513  // Array has fast elements, so its length must be a smi.
3514  // If the array has length zero, return the empty string.
3515  __ movq(array_length, FieldOperand(array, JSArray::kLengthOffset));
3516  __ SmiCompare(array_length, Smi::FromInt(0));
3517  __ j(not_zero, &non_trivial_array);
3518  __ LoadRoot(rax, Heap::kEmptyStringRootIndex);
3519  __ jmp(&return_result);
3520 
3521  // Save the array length on the stack.
3522  __ bind(&non_trivial_array);
3523  __ SmiToInteger32(array_length, array_length);
3524  __ movl(array_length_operand, array_length);
3525 
3526  // Save the FixedArray containing array's elements.
3527  // End of array's live range.
3528  elements = array;
3529  __ movq(elements, FieldOperand(array, JSArray::kElementsOffset));
3530  array = no_reg;
3531 
3532 
3533  // Check that all array elements are sequential ASCII strings, and
3534  // accumulate the sum of their lengths, as a smi-encoded value.
3535  __ Set(index, 0);
3536  __ Set(string_length, 0);
3537  // Loop condition: while (index < array_length).
3538  // Live loop registers: index(int32), array_length(int32), string(String*),
3539  // scratch, string_length(int32), elements(FixedArray*).
3540  if (generate_debug_code_) {
3541  __ cmpq(index, array_length);
3542  __ Assert(below, "No empty arrays here in EmitFastAsciiArrayJoin");
3543  }
3544  __ bind(&loop);
3545  __ movq(string, FieldOperand(elements,
3546  index,
3549  __ JumpIfSmi(string, &bailout);
3550  __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
3551  __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3552  __ andb(scratch, Immediate(
3554  __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
3555  __ j(not_equal, &bailout);
3556  __ AddSmiField(string_length,
3558  __ j(overflow, &bailout);
3559  __ incl(index);
3560  __ cmpl(index, array_length);
3561  __ j(less, &loop);
3562 
3563  // Live registers:
3564  // string_length: Sum of string lengths.
3565  // elements: FixedArray of strings.
3566  // index: Array length.
3567  // array_length: Array length.
3568 
3569  // If array_length is 1, return elements[0], a string.
3570  __ cmpl(array_length, Immediate(1));
3571  __ j(not_equal, &not_size_one_array);
3572  __ movq(rax, FieldOperand(elements, FixedArray::kHeaderSize));
3573  __ jmp(&return_result);
3574 
3575  __ bind(&not_size_one_array);
3576 
3577  // End of array_length live range.
3578  result_pos = array_length;
3579  array_length = no_reg;
3580 
3581  // Live registers:
3582  // string_length: Sum of string lengths.
3583  // elements: FixedArray of strings.
3584  // index: Array length.
3585 
3586  // Check that the separator is a sequential ASCII string.
3587  __ movq(string, separator_operand);
3588  __ JumpIfSmi(string, &bailout);
3589  __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
3590  __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3591  __ andb(scratch, Immediate(
3593  __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
3594  __ j(not_equal, &bailout);
3595 
3596  // Live registers:
3597  // string_length: Sum of string lengths.
3598  // elements: FixedArray of strings.
3599  // index: Array length.
3600  // string: Separator string.
3601 
3602  // Add (separator length times (array_length - 1)) to string_length.
3603  __ SmiToInteger32(scratch,
3605  __ decl(index);
3606  __ imull(scratch, index);
3607  __ j(overflow, &bailout);
3608  __ addl(string_length, scratch);
3609  __ j(overflow, &bailout);
3610 
3611  // Live registers and stack values:
3612  // string_length: Total length of result string.
3613  // elements: FixedArray of strings.
3614  __ AllocateAsciiString(result_pos, string_length, scratch,
3615  index, string, &bailout);
3616  __ movq(result_operand, result_pos);
3617  __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
3618 
3619  __ movq(string, separator_operand);
3620  __ SmiCompare(FieldOperand(string, SeqAsciiString::kLengthOffset),
3621  Smi::FromInt(1));
3622  __ j(equal, &one_char_separator);
3623  __ j(greater, &long_separator);
3624 
3625 
3626  // Empty separator case:
3627  __ Set(index, 0);
3628  __ movl(scratch, array_length_operand);
3629  __ jmp(&loop_1_condition);
3630  // Loop condition: while (index < array_length).
3631  __ bind(&loop_1);
3632  // Each iteration of the loop concatenates one string to the result.
3633  // Live values in registers:
3634  // index: which element of the elements array we are adding to the result.
3635  // result_pos: the position to which we are currently copying characters.
3636  // elements: the FixedArray of strings we are joining.
3637  // scratch: array length.
3638 
3639  // Get string = array[index].
3640  __ movq(string, FieldOperand(elements, index,
3643  __ SmiToInteger32(string_length,
3645  __ lea(string,
3647  __ CopyBytes(result_pos, string, string_length);
3648  __ incl(index);
3649  __ bind(&loop_1_condition);
3650  __ cmpl(index, scratch);
3651  __ j(less, &loop_1); // Loop while (index < array_length).
3652  __ jmp(&done);
3653 
3654  // Generic bailout code used from several places.
3655  __ bind(&bailout);
3656  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3657  __ jmp(&return_result);
3658 
3659 
3660  // One-character separator case
3661  __ bind(&one_char_separator);
3662  // Get the separator ASCII character value.
3663  // Register "string" holds the separator.
3664  __ movzxbl(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
3665  __ Set(index, 0);
3666  // Jump into the loop after the code that copies the separator, so the first
3667  // element is not preceded by a separator
3668  __ jmp(&loop_2_entry);
3669  // Loop condition: while (index < length).
3670  __ bind(&loop_2);
3671  // Each iteration of the loop concatenates one string to the result.
3672  // Live values in registers:
3673  // elements: The FixedArray of strings we are joining.
3674  // index: which element of the elements array we are adding to the result.
3675  // result_pos: the position to which we are currently copying characters.
3676  // scratch: Separator character.
3677 
3678  // Copy the separator character to the result.
3679  __ movb(Operand(result_pos, 0), scratch);
3680  __ incq(result_pos);
3681 
3682  __ bind(&loop_2_entry);
3683  // Get string = array[index].
3684  __ movq(string, FieldOperand(elements, index,
3687  __ SmiToInteger32(string_length,
3689  __ lea(string,
3691  __ CopyBytes(result_pos, string, string_length);
3692  __ incl(index);
3693  __ cmpl(index, array_length_operand);
3694  __ j(less, &loop_2); // End while (index < length).
3695  __ jmp(&done);
3696 
3697 
3698  // Long separator case (separator is more than one character).
3699  __ bind(&long_separator);
3700 
3701  // Make elements point to end of elements array, and index
3702  // count from -array_length to zero, so we don't need to maintain
3703  // a loop limit.
3704  __ movl(index, array_length_operand);
3705  __ lea(elements, FieldOperand(elements, index, times_pointer_size,
3707  __ neg(index);
3708 
3709  // Replace separator string with pointer to its first character, and
3710  // make scratch be its length.
3711  __ movq(string, separator_operand);
3712  __ SmiToInteger32(scratch,
3714  __ lea(string,
3716  __ movq(separator_operand, string);
3717 
3718  // Jump into the loop after the code that copies the separator, so the first
3719  // element is not preceded by a separator
3720  __ jmp(&loop_3_entry);
3721  // Loop condition: while (index < length).
3722  __ bind(&loop_3);
3723  // Each iteration of the loop concatenates one string to the result.
3724  // Live values in registers:
3725  // index: which element of the elements array we are adding to the result.
3726  // result_pos: the position to which we are currently copying characters.
3727  // scratch: Separator length.
3728  // separator_operand (rsp[0x10]): Address of first char of separator.
3729 
3730  // Copy the separator to the result.
3731  __ movq(string, separator_operand);
3732  __ movl(string_length, scratch);
3733  __ CopyBytes(result_pos, string, string_length, 2);
3734 
3735  __ bind(&loop_3_entry);
3736  // Get string = array[index].
3737  __ movq(string, Operand(elements, index, times_pointer_size, 0));
3738  __ SmiToInteger32(string_length,
3740  __ lea(string,
3742  __ CopyBytes(result_pos, string, string_length);
3743  __ incq(index);
3744  __ j(not_equal, &loop_3); // Loop while (index < 0).
3745 
3746  __ bind(&done);
3747  __ movq(rax, result_operand);
3748 
3749  __ bind(&return_result);
3750  // Drop temp values from the stack, and restore context register.
3751  __ addq(rsp, Immediate(3 * kPointerSize));
3753  context()->Plug(rax);
3754 }
3755 
3756 
3757 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3758  Handle<String> name = expr->name();
3759  if (name->length() > 0 && name->Get(0) == '_') {
3760  Comment cmnt(masm_, "[ InlineRuntimeCall");
3761  EmitInlineRuntimeCall(expr);
3762  return;
3763  }
3764 
3765  Comment cmnt(masm_, "[ CallRuntime");
3766  ZoneList<Expression*>* args = expr->arguments();
3767 
3768  if (expr->is_jsruntime()) {
3769  // Prepare for calling JS runtime function.
3770  __ movq(rax, GlobalObjectOperand());
3772  }
3773 
3774  // Push the arguments ("left-to-right").
3775  int arg_count = args->length();
3776  for (int i = 0; i < arg_count; i++) {
3777  VisitForStackValue(args->at(i));
3778  }
3779 
3780  if (expr->is_jsruntime()) {
3781  // Call the JS runtime function using a call IC.
3782  __ Move(rcx, expr->name());
3783  RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3784  Handle<Code> ic =
3785  isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3786  CallIC(ic, mode, expr->CallRuntimeFeedbackId());
3787  // Restore context register.
3789  } else {
3790  __ CallRuntime(expr->function(), arg_count);
3791  }
3792  context()->Plug(rax);
3793 }
3794 
3795 
3796 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3797  switch (expr->op()) {
3798  case Token::DELETE: {
3799  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3800  Property* property = expr->expression()->AsProperty();
3801  VariableProxy* proxy = expr->expression()->AsVariableProxy();
3802 
3803  if (property != NULL) {
3804  VisitForStackValue(property->obj());
3805  VisitForStackValue(property->key());
3806  StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
3808  __ Push(Smi::FromInt(strict_mode_flag));
3809  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3810  context()->Plug(rax);
3811  } else if (proxy != NULL) {
3812  Variable* var = proxy->var();
3813  // Delete of an unqualified identifier is disallowed in strict mode
3814  // but "delete this" is allowed.
3815  ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
3816  if (var->IsUnallocated()) {
3817  __ push(GlobalObjectOperand());
3818  __ Push(var->name());
3820  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3821  context()->Plug(rax);
3822  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3823  // Result of deleting non-global variables is false. 'this' is
3824  // not really a variable, though we implement it as one. The
3825  // subexpression does not have side effects.
3826  context()->Plug(var->is_this());
3827  } else {
3828  // Non-global variable. Call the runtime to try to delete from the
3829  // context where the variable was introduced.
3830  __ push(context_register());
3831  __ Push(var->name());
3832  __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3833  context()->Plug(rax);
3834  }
3835  } else {
3836  // Result of deleting non-property, non-variable reference is true.
3837  // The subexpression may have side effects.
3838  VisitForEffect(expr->expression());
3839  context()->Plug(true);
3840  }
3841  break;
3842  }
3843 
3844  case Token::VOID: {
3845  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3846  VisitForEffect(expr->expression());
3847  context()->Plug(Heap::kUndefinedValueRootIndex);
3848  break;
3849  }
3850 
3851  case Token::NOT: {
3852  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3853  if (context()->IsEffect()) {
3854  // Unary NOT has no side effects so it's only necessary to visit the
3855  // subexpression. Match the optimizing compiler by not branching.
3856  VisitForEffect(expr->expression());
3857  } else if (context()->IsTest()) {
3858  const TestContext* test = TestContext::cast(context());
3859  // The labels are swapped for the recursive call.
3860  VisitForControl(expr->expression(),
3861  test->false_label(),
3862  test->true_label(),
3863  test->fall_through());
3864  context()->Plug(test->true_label(), test->false_label());
3865  } else {
3866  // We handle value contexts explicitly rather than simply visiting
3867  // for control and plugging the control flow into the context,
3868  // because we need to prepare a pair of extra administrative AST ids
3869  // for the optimizing compiler.
3870  ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3871  Label materialize_true, materialize_false, done;
3872  VisitForControl(expr->expression(),
3873  &materialize_false,
3874  &materialize_true,
3875  &materialize_true);
3876  __ bind(&materialize_true);
3877  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3878  if (context()->IsAccumulatorValue()) {
3879  __ LoadRoot(rax, Heap::kTrueValueRootIndex);
3880  } else {
3881  __ PushRoot(Heap::kTrueValueRootIndex);
3882  }
3883  __ jmp(&done, Label::kNear);
3884  __ bind(&materialize_false);
3885  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3886  if (context()->IsAccumulatorValue()) {
3887  __ LoadRoot(rax, Heap::kFalseValueRootIndex);
3888  } else {
3889  __ PushRoot(Heap::kFalseValueRootIndex);
3890  }
3891  __ bind(&done);
3892  }
3893  break;
3894  }
3895 
3896  case Token::TYPEOF: {
3897  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3898  { StackValueContext context(this);
3899  VisitForTypeofValue(expr->expression());
3900  }
3901  __ CallRuntime(Runtime::kTypeof, 1);
3902  context()->Plug(rax);
3903  break;
3904  }
3905 
3906  case Token::ADD: {
3907  Comment cmt(masm_, "[ UnaryOperation (ADD)");
3908  VisitForAccumulatorValue(expr->expression());
3909  Label no_conversion;
3910  __ JumpIfSmi(result_register(), &no_conversion);
3911  ToNumberStub convert_stub;
3912  __ CallStub(&convert_stub);
3913  __ bind(&no_conversion);
3914  context()->Plug(result_register());
3915  break;
3916  }
3917 
3918  case Token::SUB:
3919  EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
3920  break;
3921 
3922  case Token::BIT_NOT:
3923  EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
3924  break;
3925 
3926  default:
3927  UNREACHABLE();
3928  }
3929 }
3930 
3931 
3932 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3933  const char* comment) {
3934  // TODO(svenpanne): Allowing format strings in Comment would be nice here...
3935  Comment cmt(masm_, comment);
3936  bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3937  UnaryOverwriteMode overwrite =
3938  can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3939  UnaryOpStub stub(expr->op(), overwrite);
3940  // UnaryOpStub expects the argument to be in the
3941  // accumulator register rax.
3942  VisitForAccumulatorValue(expr->expression());
3943  SetSourcePosition(expr->position());
3944  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
3945  expr->UnaryOperationFeedbackId());
3946  context()->Plug(rax);
3947 }
3948 
3949 
3950 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3951  Comment cmnt(masm_, "[ CountOperation");
3952  SetSourcePosition(expr->position());
3953 
3954  // Invalid left-hand-sides are rewritten to have a 'throw
3955  // ReferenceError' as the left-hand side.
3956  if (!expr->expression()->IsValidLeftHandSide()) {
3957  VisitForEffect(expr->expression());
3958  return;
3959  }
3960 
3961  // Expression can only be a property, a global or a (parameter or local)
3962  // slot.
3963  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3964  LhsKind assign_type = VARIABLE;
3965  Property* prop = expr->expression()->AsProperty();
3966  // In case of a property we use the uninitialized expression context
3967  // of the key to detect a named property.
3968  if (prop != NULL) {
3969  assign_type =
3970  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3971  }
3972 
3973  // Evaluate expression and get value.
3974  if (assign_type == VARIABLE) {
3975  ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3976  AccumulatorValueContext context(this);
3977  EmitVariableLoad(expr->expression()->AsVariableProxy());
3978  } else {
3979  // Reserve space for result of postfix operation.
3980  if (expr->is_postfix() && !context()->IsEffect()) {
3981  __ Push(Smi::FromInt(0));
3982  }
3983  if (assign_type == NAMED_PROPERTY) {
3984  VisitForAccumulatorValue(prop->obj());
3985  __ push(rax); // Copy of receiver, needed for later store.
3986  EmitNamedPropertyLoad(prop);
3987  } else {
3988  VisitForStackValue(prop->obj());
3989  VisitForAccumulatorValue(prop->key());
3990  __ movq(rdx, Operand(rsp, 0)); // Leave receiver on stack
3991  __ push(rax); // Copy of key, needed for later store.
3992  EmitKeyedPropertyLoad(prop);
3993  }
3994  }
3995 
3996  // We need a second deoptimization point after loading the value
3997  // in case evaluating the property load my have a side effect.
3998  if (assign_type == VARIABLE) {
3999  PrepareForBailout(expr->expression(), TOS_REG);
4000  } else {
4001  PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4002  }
4003 
4004  // Call ToNumber only if operand is not a smi.
4005  Label no_conversion;
4006  __ JumpIfSmi(rax, &no_conversion, Label::kNear);
4007  ToNumberStub convert_stub;
4008  __ CallStub(&convert_stub);
4009  __ bind(&no_conversion);
4010 
4011  // Save result for postfix expressions.
4012  if (expr->is_postfix()) {
4013  if (!context()->IsEffect()) {
4014  // Save the result on the stack. If we have a named or keyed property
4015  // we store the result under the receiver that is currently on top
4016  // of the stack.
4017  switch (assign_type) {
4018  case VARIABLE:
4019  __ push(rax);
4020  break;
4021  case NAMED_PROPERTY:
4022  __ movq(Operand(rsp, kPointerSize), rax);
4023  break;
4024  case KEYED_PROPERTY:
4025  __ movq(Operand(rsp, 2 * kPointerSize), rax);
4026  break;
4027  }
4028  }
4029  }
4030 
4031  // Inline smi case if we are in a loop.
4032  Label done, stub_call;
4033  JumpPatchSite patch_site(masm_);
4034 
4035  if (ShouldInlineSmiCase(expr->op())) {
4036  if (expr->op() == Token::INC) {
4037  __ SmiAddConstant(rax, rax, Smi::FromInt(1));
4038  } else {
4039  __ SmiSubConstant(rax, rax, Smi::FromInt(1));
4040  }
4041  __ j(overflow, &stub_call, Label::kNear);
4042  // We could eliminate this smi check if we split the code at
4043  // the first smi check before calling ToNumber.
4044  patch_site.EmitJumpIfSmi(rax, &done, Label::kNear);
4045 
4046  __ bind(&stub_call);
4047  // Call stub. Undo operation first.
4048  if (expr->op() == Token::INC) {
4049  __ SmiSubConstant(rax, rax, Smi::FromInt(1));
4050  } else {
4051  __ SmiAddConstant(rax, rax, Smi::FromInt(1));
4052  }
4053  }
4054 
4055  // Record position before stub call.
4056  SetSourcePosition(expr->position());
4057 
4058  // Call stub for +1/-1.
4059  BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
4060  if (expr->op() == Token::INC) {
4061  __ Move(rdx, Smi::FromInt(1));
4062  } else {
4063  __ movq(rdx, rax);
4064  __ Move(rax, Smi::FromInt(1));
4065  }
4066  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
4067  patch_site.EmitPatchInfo();
4068  __ bind(&done);
4069 
4070  // Store the value returned in rax.
4071  switch (assign_type) {
4072  case VARIABLE:
4073  if (expr->is_postfix()) {
4074  // Perform the assignment as if via '='.
4075  { EffectContext context(this);
4076  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4077  Token::ASSIGN);
4078  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4079  context.Plug(rax);
4080  }
4081  // For all contexts except kEffect: We have the result on
4082  // top of the stack.
4083  if (!context()->IsEffect()) {
4084  context()->PlugTOS();
4085  }
4086  } else {
4087  // Perform the assignment as if via '='.
4088  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4089  Token::ASSIGN);
4090  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4091  context()->Plug(rax);
4092  }
4093  break;
4094  case NAMED_PROPERTY: {
4095  __ Move(rcx, prop->key()->AsLiteral()->handle());
4096  __ pop(rdx);
4097  Handle<Code> ic = is_classic_mode()
4098  ? isolate()->builtins()->StoreIC_Initialize()
4099  : isolate()->builtins()->StoreIC_Initialize_Strict();
4100  CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4101  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4102  if (expr->is_postfix()) {
4103  if (!context()->IsEffect()) {
4104  context()->PlugTOS();
4105  }
4106  } else {
4107  context()->Plug(rax);
4108  }
4109  break;
4110  }
4111  case KEYED_PROPERTY: {
4112  __ pop(rcx);
4113  __ pop(rdx);
4114  Handle<Code> ic = is_classic_mode()
4115  ? isolate()->builtins()->KeyedStoreIC_Initialize()
4116  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4117  CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4118  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4119  if (expr->is_postfix()) {
4120  if (!context()->IsEffect()) {
4121  context()->PlugTOS();
4122  }
4123  } else {
4124  context()->Plug(rax);
4125  }
4126  break;
4127  }
4128  }
4129 }
4130 
4131 
4132 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4133  VariableProxy* proxy = expr->AsVariableProxy();
4134  ASSERT(!context()->IsEffect());
4135  ASSERT(!context()->IsTest());
4136 
4137  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4138  Comment cmnt(masm_, "Global variable");
4139  __ Move(rcx, proxy->name());
4140  __ movq(rax, GlobalObjectOperand());
4141  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4142  // Use a regular load, not a contextual load, to avoid a reference
4143  // error.
4144  CallIC(ic);
4145  PrepareForBailout(expr, TOS_REG);
4146  context()->Plug(rax);
4147  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4148  Label done, slow;
4149 
4150  // Generate code for loading from variables potentially shadowed
4151  // by eval-introduced variables.
4152  EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4153 
4154  __ bind(&slow);
4155  __ push(rsi);
4156  __ Push(proxy->name());
4157  __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4158  PrepareForBailout(expr, TOS_REG);
4159  __ bind(&done);
4160 
4161  context()->Plug(rax);
4162  } else {
4163  // This expression cannot throw a reference error at the top level.
4164  VisitInDuplicateContext(expr);
4165  }
4166 }
4167 
4168 
4169 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4170  Expression* sub_expr,
4171  Handle<String> check) {
4172  Label materialize_true, materialize_false;
4173  Label* if_true = NULL;
4174  Label* if_false = NULL;
4175  Label* fall_through = NULL;
4176  context()->PrepareTest(&materialize_true, &materialize_false,
4177  &if_true, &if_false, &fall_through);
4178 
4179  { AccumulatorValueContext context(this);
4180  VisitForTypeofValue(sub_expr);
4181  }
4182  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4183 
4184  if (check->Equals(isolate()->heap()->number_symbol())) {
4185  __ JumpIfSmi(rax, if_true);
4187  __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
4188  Split(equal, if_true, if_false, fall_through);
4189  } else if (check->Equals(isolate()->heap()->string_symbol())) {
4190  __ JumpIfSmi(rax, if_false);
4191  // Check for undetectable objects => false.
4192  __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
4193  __ j(above_equal, if_false);
4195  Immediate(1 << Map::kIsUndetectable));
4196  Split(zero, if_true, if_false, fall_through);
4197  } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4198  __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4199  __ j(equal, if_true);
4200  __ CompareRoot(rax, Heap::kFalseValueRootIndex);
4201  Split(equal, if_true, if_false, fall_through);
4202  } else if (FLAG_harmony_typeof &&
4203  check->Equals(isolate()->heap()->null_symbol())) {
4204  __ CompareRoot(rax, Heap::kNullValueRootIndex);
4205  Split(equal, if_true, if_false, fall_through);
4206  } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4207  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4208  __ j(equal, if_true);
4209  __ JumpIfSmi(rax, if_false);
4210  // Check for undetectable objects => true.
4213  Immediate(1 << Map::kIsUndetectable));
4214  Split(not_zero, if_true, if_false, fall_through);
4215  } else if (check->Equals(isolate()->heap()->function_symbol())) {
4216  __ JumpIfSmi(rax, if_false);
4218  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
4219  __ j(equal, if_true);
4220  __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
4221  Split(equal, if_true, if_false, fall_through);
4222  } else if (check->Equals(isolate()->heap()->object_symbol())) {
4223  __ JumpIfSmi(rax, if_false);
4224  if (!FLAG_harmony_typeof) {
4225  __ CompareRoot(rax, Heap::kNullValueRootIndex);
4226  __ j(equal, if_true);
4227  }
4228  __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
4229  __ j(below, if_false);
4230  __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4231  __ j(above, if_false);
4232  // Check for undetectable objects => false.
4234  Immediate(1 << Map::kIsUndetectable));
4235  Split(zero, if_true, if_false, fall_through);
4236  } else {
4237  if (if_false != fall_through) __ jmp(if_false);
4238  }
4239  context()->Plug(if_true, if_false);
4240 }
4241 
4242 
4243 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4244  Comment cmnt(masm_, "[ CompareOperation");
4245  SetSourcePosition(expr->position());
4246 
4247  // First we try a fast inlined version of the compare when one of
4248  // the operands is a literal.
4249  if (TryLiteralCompare(expr)) return;
4250 
4251  // Always perform the comparison for its control flow. Pack the result
4252  // into the expression's context after the comparison is performed.
4253  Label materialize_true, materialize_false;
4254  Label* if_true = NULL;
4255  Label* if_false = NULL;
4256  Label* fall_through = NULL;
4257  context()->PrepareTest(&materialize_true, &materialize_false,
4258  &if_true, &if_false, &fall_through);
4259 
4260  Token::Value op = expr->op();
4261  VisitForStackValue(expr->left());
4262  switch (op) {
4263  case Token::IN:
4264  VisitForStackValue(expr->right());
4265  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4266  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4267  __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4268  Split(equal, if_true, if_false, fall_through);
4269  break;
4270 
4271  case Token::INSTANCEOF: {
4272  VisitForStackValue(expr->right());
4273  InstanceofStub stub(InstanceofStub::kNoFlags);
4274  __ CallStub(&stub);
4275  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4276  __ testq(rax, rax);
4277  // The stub returns 0 for true.
4278  Split(zero, if_true, if_false, fall_through);
4279  break;
4280  }
4281 
4282  default: {
4283  VisitForAccumulatorValue(expr->right());
4284  Condition cc = no_condition;
4285  switch (op) {
4286  case Token::EQ_STRICT:
4287  case Token::EQ:
4288  cc = equal;
4289  break;
4290  case Token::LT:
4291  cc = less;
4292  break;
4293  case Token::GT:
4294  cc = greater;
4295  break;
4296  case Token::LTE:
4297  cc = less_equal;
4298  break;
4299  case Token::GTE:
4300  cc = greater_equal;
4301  break;
4302  case Token::IN:
4303  case Token::INSTANCEOF:
4304  default:
4305  UNREACHABLE();
4306  }
4307  __ pop(rdx);
4308 
4309  bool inline_smi_code = ShouldInlineSmiCase(op);
4310  JumpPatchSite patch_site(masm_);
4311  if (inline_smi_code) {
4312  Label slow_case;
4313  __ movq(rcx, rdx);
4314  __ or_(rcx, rax);
4315  patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4316  __ cmpq(rdx, rax);
4317  Split(cc, if_true, if_false, NULL);
4318  __ bind(&slow_case);
4319  }
4320 
4321  // Record position and call the compare IC.
4322  SetSourcePosition(expr->position());
4323  Handle<Code> ic = CompareIC::GetUninitialized(op);
4324  CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4325  patch_site.EmitPatchInfo();
4326 
4327  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4328  __ testq(rax, rax);
4329  Split(cc, if_true, if_false, fall_through);
4330  }
4331  }
4332 
4333  // Convert the result of the comparison into one expected for this
4334  // expression's context.
4335  context()->Plug(if_true, if_false);
4336 }
4337 
4338 
4339 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4340  Expression* sub_expr,
4341  NilValue nil) {
4342  Label materialize_true, materialize_false;
4343  Label* if_true = NULL;
4344  Label* if_false = NULL;
4345  Label* fall_through = NULL;
4346  context()->PrepareTest(&materialize_true, &materialize_false,
4347  &if_true, &if_false, &fall_through);
4348 
4349  VisitForAccumulatorValue(sub_expr);
4350  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4351  Heap::RootListIndex nil_value = nil == kNullValue ?
4352  Heap::kNullValueRootIndex :
4353  Heap::kUndefinedValueRootIndex;
4354  __ CompareRoot(rax, nil_value);
4355  if (expr->op() == Token::EQ_STRICT) {
4356  Split(equal, if_true, if_false, fall_through);
4357  } else {
4358  Heap::RootListIndex other_nil_value = nil == kNullValue ?
4359  Heap::kUndefinedValueRootIndex :
4360  Heap::kNullValueRootIndex;
4361  __ j(equal, if_true);
4362  __ CompareRoot(rax, other_nil_value);
4363  __ j(equal, if_true);
4364  __ JumpIfSmi(rax, if_false);
4365  // It can be an undetectable object.
4368  Immediate(1 << Map::kIsUndetectable));
4369  Split(not_zero, if_true, if_false, fall_through);
4370  }
4371  context()->Plug(if_true, if_false);
4372 }
4373 
4374 
4375 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4377  context()->Plug(rax);
4378 }
4379 
4380 
4381 Register FullCodeGenerator::result_register() {
4382  return rax;
4383 }
4384 
4385 
4386 Register FullCodeGenerator::context_register() {
4387  return rsi;
4388 }
4389 
4390 
4391 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4392  ASSERT(IsAligned(frame_offset, kPointerSize));
4393  __ movq(Operand(rbp, frame_offset), value);
4394 }
4395 
4396 
4397 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4398  __ movq(dst, ContextOperand(rsi, context_index));
4399 }
4400 
4401 
4402 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4403  Scope* declaration_scope = scope()->DeclarationScope();
4404  if (declaration_scope->is_global_scope() ||
4405  declaration_scope->is_module_scope()) {
4406  // Contexts nested in the native context have a canonical empty function
4407  // as their closure, not the anonymous closure containing the global
4408  // code. Pass a smi sentinel and let the runtime look up the empty
4409  // function.
4410  __ Push(Smi::FromInt(0));
4411  } else if (declaration_scope->is_eval_scope()) {
4412  // Contexts created by a call to eval have the same closure as the
4413  // context calling eval, not the anonymous closure containing the eval
4414  // code. Fetch it from the context.
4416  } else {
4417  ASSERT(declaration_scope->is_function_scope());
4419  }
4420 }
4421 
4422 
4423 // ----------------------------------------------------------------------------
4424 // Non-local control flow support.
4425 
4426 
4427 void FullCodeGenerator::EnterFinallyBlock() {
4428  ASSERT(!result_register().is(rdx));
4429  ASSERT(!result_register().is(rcx));
4430  // Cook return address on top of stack (smi encoded Code* delta)
4431  __ pop(rdx);
4432  __ Move(rcx, masm_->CodeObject());
4433  __ subq(rdx, rcx);
4434  __ Integer32ToSmi(rdx, rdx);
4435  __ push(rdx);
4436 
4437  // Store result register while executing finally block.
4438  __ push(result_register());
4439 
4440  // Store pending message while executing finally block.
4441  ExternalReference pending_message_obj =
4442  ExternalReference::address_of_pending_message_obj(isolate());
4443  __ Load(rdx, pending_message_obj);
4444  __ push(rdx);
4445 
4446  ExternalReference has_pending_message =
4447  ExternalReference::address_of_has_pending_message(isolate());
4448  __ Load(rdx, has_pending_message);
4449  __ Integer32ToSmi(rdx, rdx);
4450  __ push(rdx);
4451 
4452  ExternalReference pending_message_script =
4453  ExternalReference::address_of_pending_message_script(isolate());
4454  __ Load(rdx, pending_message_script);
4455  __ push(rdx);
4456 }
4457 
4458 
4459 void FullCodeGenerator::ExitFinallyBlock() {
4460  ASSERT(!result_register().is(rdx));
4461  ASSERT(!result_register().is(rcx));
4462  // Restore pending message from stack.
4463  __ pop(rdx);
4464  ExternalReference pending_message_script =
4465  ExternalReference::address_of_pending_message_script(isolate());
4466  __ Store(pending_message_script, rdx);
4467 
4468  __ pop(rdx);
4469  __ SmiToInteger32(rdx, rdx);
4470  ExternalReference has_pending_message =
4471  ExternalReference::address_of_has_pending_message(isolate());
4472  __ Store(has_pending_message, rdx);
4473 
4474  __ pop(rdx);
4475  ExternalReference pending_message_obj =
4476  ExternalReference::address_of_pending_message_obj(isolate());
4477  __ Store(pending_message_obj, rdx);
4478 
4479  // Restore result register from stack.
4480  __ pop(result_register());
4481 
4482  // Uncook return address.
4483  __ pop(rdx);
4484  __ SmiToInteger32(rdx, rdx);
4485  __ Move(rcx, masm_->CodeObject());
4486  __ addq(rdx, rcx);
4487  __ jmp(rdx);
4488 }
4489 
4490 
4491 #undef __
4492 
4493 #define __ ACCESS_MASM(masm())
4494 
4495 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4496  int* stack_depth,
4497  int* context_length) {
4498  // The macros used here must preserve the result register.
4499 
4500  // Because the handler block contains the context of the finally
4501  // code, we can restore it directly from there for the finally code
4502  // rather than iteratively unwinding contexts via their previous
4503  // links.
4504  __ Drop(*stack_depth); // Down to the handler block.
4505  if (*context_length > 0) {
4506  // Restore the context to its dedicated register and the stack.
4509  }
4510  __ PopTryHandler();
4511  __ call(finally_entry_);
4512 
4513  *stack_depth = 0;
4514  *context_length = 0;
4515  return previous_;
4516 }
4517 
4518 
4519 #undef __
4520 
4521 } } // namespace v8::internal
4522 
4523 #endif // V8_TARGET_ARCH_X64
const Register rdx
static const int kBitFieldOffset
Definition: objects.h:5160
Scope * DeclarationScope()
Definition: scopes.cc:745
const intptr_t kSmiTagMask
Definition: v8.h:4016
VariableDeclaration * function() const
Definition: scopes.h:324
static int SlotOffset(int index)
Definition: contexts.h:425
static const int kBuiltinsOffset
Definition: objects.h:6285
static const int kEnumCacheOffset
Definition: objects.h:2632
static String * cast(Object *obj)
static Smi * FromInt(int value)
Definition: objects-inl.h:981
bool IsFastObjectElementsKind(ElementsKind kind)
const Register rbp
static const int kDataOffset
Definition: objects.h:6624
static const int kGlobalReceiverOffset
Definition: objects.h:6288
int SizeOfCodeGeneratedSince(Label *label)
T Max(T a, T b)
Definition: utils.h:222
Scope * outer_scope() const
Definition: scopes.h:348
const Register rsi
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:5339
bool is_int8(int x)
Definition: assembler.h:836
static const int kSize
Definition: objects.h:6625
SmiIndex SmiToIndex(Register dst, Register src, int shift)
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:232
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:120
static const int kInObjectFieldCount
Definition: objects.h:6679
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3830
const uint32_t kStringRepresentationMask
Definition: objects.h:474
static const int kMaximumSlots
Definition: code-stubs.h:344
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
Definition: objects.h:5800
bool IsOptimizable() const
Definition: compiler.h:151
Variable * parameter(int index) const
Definition: scopes.h:331
PropertyAttributes
MemOperand ContextOperand(Register context, int index)
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
Definition: scopes.cc:735
static const int kHashFieldOffset
Definition: objects.h:7319
#define IN
static const int kLiteralsOffset
Definition: objects.h:6188
#define UNREACHABLE()
Definition: checks.h:50
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
Definition: objects.h:7318
static const int kValueOffset
Definition: objects.h:1342
Variable * arguments() const
Definition: scopes.h:339
static const int kForInSlowCaseMarker
Definition: objects.h:4167
static const int kFirstOffset
Definition: objects.h:2633
NilValue
Definition: v8.h:141
const XMMRegister xmm1
static BailoutId Declarations()
Definition: utils.h:1016
const int kPointerSize
Definition: globals.h:220
static const int kJSReturnSequenceLength
static const int kForInFastCaseMarker
Definition: objects.h:4166
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:5177
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
bool IsAligned(T value, U alignment)
Definition: utils.h:206
const Register rbx
const Register rsp
#define __
static const int kCacheStampOffset
Definition: objects.h:6476
static TestContext * cast(AstContext *context)
Definition: hydrogen.h:721
static const int kDescriptorSize
Definition: objects.h:2642
static const int kPropertiesOffset
Definition: objects.h:2171
int num_parameters() const
Definition: scopes.h:336
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
const Register rax
const Register rdi
static const int kHeaderSize
Definition: objects.h:7517
static const int kElementsOffset
Definition: objects.h:2172
static const int kContainsCachedArrayIndexMask
Definition: objects.h:7374
static BailoutId FunctionEntry()
Definition: utils.h:1015
const uint32_t kStringTag
Definition: objects.h:456
#define BASE_EMBEDDED
Definition: allocation.h:68
bool IsDeclaredVariableMode(VariableMode mode)
Definition: v8globals.h:516
Vector< const char > CStrVector(const char *data)
Definition: utils.h:526
static int OffsetOfElementAt(int index)
Definition: objects.h:2356
static const int kLengthOffset
Definition: objects.h:8332
static const int kMaxLoopNestingMarker
Definition: objects.h:4527
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
Definition: objects.h:2296
static const int kMapOffset
Definition: objects.h:1261
static const int kValueOffset
Definition: objects.h:6468
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:2636
const uint32_t kIsNotStringMask
Definition: objects.h:455
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:545
static const int kLengthOffset
Definition: objects.h:2295
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:168
const Register kScratchRegister
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
v8::Handle< v8::Value > Load(const v8::Arguments &args)
Definition: shell.cc:159
const int kSmiTagSize
Definition: v8.h:4015
const Register rcx
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:926
static const int kConstructorOffset
Definition: objects.h:5127
Condition CheckNonNegativeSmi(Register src)
const int kSmiTag
Definition: v8.h:4014
#define ASSERT_NE(v1, v2)
Definition: checks.h:272
static const int kIsUndetectable
Definition: objects.h:5171
Condition CheckEitherSmi(Register first, Register second, Register scratch=kScratchRegister)
static bool ShouldGenerateLog(Expression *type)
Definition: codegen.cc:153
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
#define FACTORY
Definition: isolate.h:1434
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
static const int kPrototypeOffset
Definition: objects.h:5126
const Register no_reg
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
Definition: objects.h:6385
bool IsImmutableVariableMode(VariableMode mode)
Definition: v8globals.h:526
static const int kNativeContextOffset
Definition: objects.h:6286
const uint32_t kAsciiStringTag
Definition: objects.h:470
static const int kHashShift
Definition: objects.h:7341
T Min(T a, T b)
Definition: utils.h:229
static const int kSharedFunctionInfoOffset
Definition: objects.h:6185
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:1731
static const int kMaxValue
Definition: objects.h:1050
static const int kBitField2Offset
Definition: objects.h:5161
#define VOID
static Handle< Code > GetUninitialized(Token::Value op)
Definition: ic.cc:2565
void check(i::Vector< const char > string)
const uint32_t kStringEncodingMask
Definition: objects.h:468
static const int kInstanceTypeOffset
Definition: objects.h:5158
TypeofState
Definition: codegen.h:70
Scope * scope() const
Definition: compiler.h:67
const XMMRegister xmm0
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag
Definition: objects-inl.h:3923