v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
full-codegen-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_X64
31 
32 #include "code-stubs.h"
33 #include "codegen.h"
34 #include "compiler.h"
35 #include "debug.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
38 #include "parser.h"
39 #include "scopes.h"
40 #include "stub-cache.h"
41 
42 namespace v8 {
43 namespace internal {
44 
45 #define __ ACCESS_MASM(masm_)
46 
47 
48 class JumpPatchSite BASE_EMBEDDED {
49  public:
50  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
51 #ifdef DEBUG
52  info_emitted_ = false;
53 #endif
54  }
55 
56  ~JumpPatchSite() {
57  ASSERT(patch_site_.is_bound() == info_emitted_);
58  }
59 
60  void EmitJumpIfNotSmi(Register reg,
61  Label* target,
62  Label::Distance near_jump = Label::kFar) {
63  __ testb(reg, Immediate(kSmiTagMask));
64  EmitJump(not_carry, target, near_jump); // Always taken before patched.
65  }
66 
67  void EmitJumpIfSmi(Register reg,
68  Label* target,
69  Label::Distance near_jump = Label::kFar) {
70  __ testb(reg, Immediate(kSmiTagMask));
71  EmitJump(carry, target, near_jump); // Never taken before patched.
72  }
73 
74  void EmitPatchInfo() {
75  if (patch_site_.is_bound()) {
76  int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
77  ASSERT(is_int8(delta_to_patch_site));
78  __ testl(rax, Immediate(delta_to_patch_site));
79 #ifdef DEBUG
80  info_emitted_ = true;
81 #endif
82  } else {
83  __ nop(); // Signals no inlined code.
84  }
85  }
86 
87  private:
88  // jc will be patched with jz, jnc will become jnz.
89  void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
90  ASSERT(!patch_site_.is_bound() && !info_emitted_);
91  ASSERT(cc == carry || cc == not_carry);
92  __ bind(&patch_site_);
93  __ j(cc, target, near_jump);
94  }
95 
96  MacroAssembler* masm_;
97  Label patch_site_;
98 #ifdef DEBUG
99  bool info_emitted_;
100 #endif
101 };
102 
103 
104 static void EmitStackCheck(MacroAssembler* masm_,
105  int pointers = 0,
106  Register scratch = rsp) {
107  Isolate* isolate = masm_->isolate();
108  Label ok;
109  ASSERT(scratch.is(rsp) == (pointers == 0));
110  if (pointers != 0) {
111  __ movq(scratch, rsp);
112  __ subq(scratch, Immediate(pointers * kPointerSize));
113  }
114  __ CompareRoot(scratch, Heap::kStackLimitRootIndex);
115  __ j(above_equal, &ok, Label::kNear);
116  __ call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
117  __ bind(&ok);
118 }
119 
120 
121 // Generate code for a JS function. On entry to the function the receiver
122 // and arguments have been pushed on the stack left to right, with the
123 // return address on top of them. The actual argument count matches the
124 // formal parameter count expected by the function.
125 //
126 // The live registers are:
127 // o rdi: the JS function object being called (i.e. ourselves)
128 // o rsi: our context
129 // o rbp: our caller's frame pointer
130 // o rsp: stack pointer (pointing to return address)
131 //
132 // The function builds a JS frame. Please see JavaScriptFrameConstants in
133 // frames-x64.h for its layout.
134 void FullCodeGenerator::Generate() {
135  CompilationInfo* info = info_;
136  handler_table_ =
137  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
138 
139  InitializeFeedbackVector();
140 
141  profiling_counter_ = isolate()->factory()->NewCell(
142  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
143  SetFunctionPosition(function());
144  Comment cmnt(masm_, "[ function compiled by full code generator");
145 
147 
148 #ifdef DEBUG
149  if (strlen(FLAG_stop_at) > 0 &&
150  info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
151  __ int3();
152  }
153 #endif
154 
155  // Sloppy mode functions and builtins need to replace the receiver with the
156  // global proxy when called as functions (without an explicit receiver
157  // object).
158  if (info->strict_mode() == SLOPPY && !info->is_native()) {
159  Label ok;
160  // +1 for return address.
161  StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
162  __ movp(rcx, args.GetReceiverOperand());
163 
164  __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
165  __ j(not_equal, &ok, Label::kNear);
166 
167  __ movp(rcx, GlobalObjectOperand());
169 
170  __ movp(args.GetReceiverOperand(), rcx);
171 
172  __ bind(&ok);
173  }
174 
175  // Open a frame scope to indicate that there is a frame on the stack. The
176  // MANUAL indicates that the scope shouldn't actually generate code to set up
177  // the frame (that is done below).
178  FrameScope frame_scope(masm_, StackFrame::MANUAL);
179 
180  info->set_prologue_offset(masm_->pc_offset());
181  __ Prologue(BUILD_FUNCTION_FRAME);
182  info->AddNoFrameRange(0, masm_->pc_offset());
183 
184  { Comment cmnt(masm_, "[ Allocate locals");
185  int locals_count = info->scope()->num_stack_slots();
186  // Generators allocate locals, if any, in context slots.
187  ASSERT(!info->function()->is_generator() || locals_count == 0);
188  if (locals_count == 1) {
189  __ PushRoot(Heap::kUndefinedValueRootIndex);
190  } else if (locals_count > 1) {
191  if (locals_count >= 128) {
192  EmitStackCheck(masm_, locals_count, rcx);
193  }
194  __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
195  const int kMaxPushes = 32;
196  if (locals_count >= kMaxPushes) {
197  int loop_iterations = locals_count / kMaxPushes;
198  __ movq(rcx, Immediate(loop_iterations));
199  Label loop_header;
200  __ bind(&loop_header);
201  // Do pushes.
202  for (int i = 0; i < kMaxPushes; i++) {
203  __ Push(rdx);
204  }
205  // Continue loop if not done.
206  __ decq(rcx);
207  __ j(not_zero, &loop_header, Label::kNear);
208  }
209  int remaining = locals_count % kMaxPushes;
210  // Emit the remaining pushes.
211  for (int i = 0; i < remaining; i++) {
212  __ Push(rdx);
213  }
214  }
215  }
216 
217  bool function_in_register = true;
218 
219  // Possibly allocate a local context.
220  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
221  if (heap_slots > 0) {
222  Comment cmnt(masm_, "[ Allocate context");
223  // Argument to NewContext is the function, which is still in rdi.
224  if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
225  __ Push(rdi);
226  __ Push(info->scope()->GetScopeInfo());
227  __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
228  } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
229  FastNewContextStub stub(heap_slots);
230  __ CallStub(&stub);
231  } else {
232  __ Push(rdi);
233  __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
234  }
235  function_in_register = false;
236  // Context is returned in rax. It replaces the context passed to us.
237  // It's saved in the stack and kept live in rsi.
238  __ movp(rsi, rax);
240 
241  // Copy any necessary parameters into the context.
242  int num_parameters = info->scope()->num_parameters();
243  for (int i = 0; i < num_parameters; i++) {
244  Variable* var = scope()->parameter(i);
245  if (var->IsContextSlot()) {
246  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
247  (num_parameters - 1 - i) * kPointerSize;
248  // Load parameter from stack.
249  __ movp(rax, Operand(rbp, parameter_offset));
250  // Store it in the context.
251  int context_offset = Context::SlotOffset(var->index());
252  __ movp(Operand(rsi, context_offset), rax);
253  // Update the write barrier. This clobbers rax and rbx.
254  __ RecordWriteContextSlot(
255  rsi, context_offset, rax, rbx, kDontSaveFPRegs);
256  }
257  }
258  }
259 
260  // Possibly allocate an arguments object.
261  Variable* arguments = scope()->arguments();
262  if (arguments != NULL) {
263  // Arguments object must be allocated after the context object, in
264  // case the "arguments" or ".arguments" variables are in the context.
265  Comment cmnt(masm_, "[ Allocate arguments object");
266  if (function_in_register) {
267  __ Push(rdi);
268  } else {
270  }
271  // The receiver is just before the parameters on the caller's stack.
272  int num_parameters = info->scope()->num_parameters();
273  int offset = num_parameters * kPointerSize;
274  __ leap(rdx,
275  Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
276  __ Push(rdx);
277  __ Push(Smi::FromInt(num_parameters));
278  // Arguments to ArgumentsAccessStub:
279  // function, receiver address, parameter count.
280  // The stub will rewrite receiver and parameter count if the previous
281  // stack frame was an arguments adapter frame.
283  if (strict_mode() == STRICT) {
285  } else if (function()->has_duplicate_parameters()) {
287  } else {
289  }
290  ArgumentsAccessStub stub(type);
291  __ CallStub(&stub);
292 
293  SetVar(arguments, rax, rbx, rdx);
294  }
295 
296  if (FLAG_trace) {
297  __ CallRuntime(Runtime::kTraceEnter, 0);
298  }
299 
300  // Visit the declarations and body unless there is an illegal
301  // redeclaration.
302  if (scope()->HasIllegalRedeclaration()) {
303  Comment cmnt(masm_, "[ Declarations");
304  scope()->VisitIllegalRedeclaration(this);
305 
306  } else {
307  PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
308  { Comment cmnt(masm_, "[ Declarations");
309  // For named function expressions, declare the function name as a
310  // constant.
311  if (scope()->is_function_scope() && scope()->function() != NULL) {
312  VariableDeclaration* function = scope()->function();
313  ASSERT(function->proxy()->var()->mode() == CONST ||
314  function->proxy()->var()->mode() == CONST_LEGACY);
315  ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
316  VisitVariableDeclaration(function);
317  }
318  VisitDeclarations(scope()->declarations());
319  }
320 
321  { Comment cmnt(masm_, "[ Stack check");
322  PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
323  EmitStackCheck(masm_);
324  }
325 
326  { Comment cmnt(masm_, "[ Body");
327  ASSERT(loop_depth() == 0);
328  VisitStatements(function()->body());
329  ASSERT(loop_depth() == 0);
330  }
331  }
332 
333  // Always emit a 'return undefined' in case control fell off the end of
334  // the body.
335  { Comment cmnt(masm_, "[ return <undefined>;");
336  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
337  EmitReturnSequence();
338  }
339 }
340 
341 
342 void FullCodeGenerator::ClearAccumulator() {
343  __ Set(rax, 0);
344 }
345 
346 
347 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
348  __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
349  __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
350  Smi::FromInt(-delta));
351 }
352 
353 
354 void FullCodeGenerator::EmitProfilingCounterReset() {
355  int reset_value = FLAG_interrupt_budget;
356  __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
357  __ Move(kScratchRegister, Smi::FromInt(reset_value));
359 }
360 
361 
362 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
363  Label* back_edge_target) {
364  Comment cmnt(masm_, "[ Back edge bookkeeping");
365  Label ok;
366 
367  ASSERT(back_edge_target->is_bound());
368  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
369  int weight = Min(kMaxBackEdgeWeight,
370  Max(1, distance / kCodeSizeMultiplier));
371  EmitProfilingCounterDecrement(weight);
372  __ j(positive, &ok, Label::kNear);
373  __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
374 
375  // Record a mapping of this PC offset to the OSR id. This is used to find
376  // the AST id from the unoptimized code in order to use it as a key into
377  // the deoptimization input data found in the optimized code.
378  RecordBackEdge(stmt->OsrEntryId());
379 
380  EmitProfilingCounterReset();
381 
382  __ bind(&ok);
383  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
384  // Record a mapping of the OSR id to this PC. This is used if the OSR
385  // entry becomes the target of a bailout. We don't expect it to be, but
386  // we want it to work if it is.
387  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
388 }
389 
390 
391 void FullCodeGenerator::EmitReturnSequence() {
392  Comment cmnt(masm_, "[ Return sequence");
393  if (return_label_.is_bound()) {
394  __ jmp(&return_label_);
395  } else {
396  __ bind(&return_label_);
397  if (FLAG_trace) {
398  __ Push(rax);
399  __ CallRuntime(Runtime::kTraceExit, 1);
400  }
401  // Pretend that the exit is a backwards jump to the entry.
402  int weight = 1;
403  if (info_->ShouldSelfOptimize()) {
404  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
405  } else {
406  int distance = masm_->pc_offset();
407  weight = Min(kMaxBackEdgeWeight,
408  Max(1, distance / kCodeSizeMultiplier));
409  }
410  EmitProfilingCounterDecrement(weight);
411  Label ok;
412  __ j(positive, &ok, Label::kNear);
413  __ Push(rax);
414  __ call(isolate()->builtins()->InterruptCheck(),
415  RelocInfo::CODE_TARGET);
416  __ Pop(rax);
417  EmitProfilingCounterReset();
418  __ bind(&ok);
419 #ifdef DEBUG
420  // Add a label for checking the size of the code used for returning.
421  Label check_exit_codesize;
422  masm_->bind(&check_exit_codesize);
423 #endif
424  CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
425  __ RecordJSReturn();
426  // Do not use the leave instruction here because it is too short to
427  // patch with the code required by the debugger.
428  __ movp(rsp, rbp);
429  __ popq(rbp);
430  int no_frame_start = masm_->pc_offset();
431 
432  int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
433  __ Ret(arguments_bytes, rcx);
434 
435 #ifdef ENABLE_DEBUGGER_SUPPORT
436  // Add padding that will be overwritten by a debugger breakpoint. We
437  // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k"
438  // (3 + 1 + 3).
439  const int kPadding = Assembler::kJSReturnSequenceLength - 7;
440  for (int i = 0; i < kPadding; ++i) {
441  masm_->int3();
442  }
443  // Check that the size of the code used for returning is large enough
444  // for the debugger's requirements.
446  masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
447 #endif
448  info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
449  }
450 }
451 
452 
453 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
454  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
455 }
456 
457 
458 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
459  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
460  codegen()->GetVar(result_register(), var);
461 }
462 
463 
464 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
465  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
466  MemOperand operand = codegen()->VarOperand(var, result_register());
467  __ Push(operand);
468 }
469 
470 
471 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
472  codegen()->GetVar(result_register(), var);
473  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
474  codegen()->DoTest(this);
475 }
476 
477 
478 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
479 }
480 
481 
482 void FullCodeGenerator::AccumulatorValueContext::Plug(
483  Heap::RootListIndex index) const {
484  __ LoadRoot(result_register(), index);
485 }
486 
487 
488 void FullCodeGenerator::StackValueContext::Plug(
489  Heap::RootListIndex index) const {
490  __ PushRoot(index);
491 }
492 
493 
494 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
495  codegen()->PrepareForBailoutBeforeSplit(condition(),
496  true,
497  true_label_,
498  false_label_);
499  if (index == Heap::kUndefinedValueRootIndex ||
500  index == Heap::kNullValueRootIndex ||
501  index == Heap::kFalseValueRootIndex) {
502  if (false_label_ != fall_through_) __ jmp(false_label_);
503  } else if (index == Heap::kTrueValueRootIndex) {
504  if (true_label_ != fall_through_) __ jmp(true_label_);
505  } else {
506  __ LoadRoot(result_register(), index);
507  codegen()->DoTest(this);
508  }
509 }
510 
511 
512 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
513 }
514 
515 
516 void FullCodeGenerator::AccumulatorValueContext::Plug(
517  Handle<Object> lit) const {
518  if (lit->IsSmi()) {
519  __ SafeMove(result_register(), Smi::cast(*lit));
520  } else {
521  __ Move(result_register(), lit);
522  }
523 }
524 
525 
526 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
527  if (lit->IsSmi()) {
528  __ SafePush(Smi::cast(*lit));
529  } else {
530  __ Push(lit);
531  }
532 }
533 
534 
535 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
536  codegen()->PrepareForBailoutBeforeSplit(condition(),
537  true,
538  true_label_,
539  false_label_);
540  ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
541  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
542  if (false_label_ != fall_through_) __ jmp(false_label_);
543  } else if (lit->IsTrue() || lit->IsJSObject()) {
544  if (true_label_ != fall_through_) __ jmp(true_label_);
545  } else if (lit->IsString()) {
546  if (String::cast(*lit)->length() == 0) {
547  if (false_label_ != fall_through_) __ jmp(false_label_);
548  } else {
549  if (true_label_ != fall_through_) __ jmp(true_label_);
550  }
551  } else if (lit->IsSmi()) {
552  if (Smi::cast(*lit)->value() == 0) {
553  if (false_label_ != fall_through_) __ jmp(false_label_);
554  } else {
555  if (true_label_ != fall_through_) __ jmp(true_label_);
556  }
557  } else {
558  // For simplicity we always test the accumulator register.
559  __ Move(result_register(), lit);
560  codegen()->DoTest(this);
561  }
562 }
563 
564 
565 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
566  Register reg) const {
567  ASSERT(count > 0);
568  __ Drop(count);
569 }
570 
571 
572 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
573  int count,
574  Register reg) const {
575  ASSERT(count > 0);
576  __ Drop(count);
577  __ Move(result_register(), reg);
578 }
579 
580 
581 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
582  Register reg) const {
583  ASSERT(count > 0);
584  if (count > 1) __ Drop(count - 1);
585  __ movp(Operand(rsp, 0), reg);
586 }
587 
588 
589 void FullCodeGenerator::TestContext::DropAndPlug(int count,
590  Register reg) const {
591  ASSERT(count > 0);
592  // For simplicity we always test the accumulator register.
593  __ Drop(count);
594  __ Move(result_register(), reg);
595  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
596  codegen()->DoTest(this);
597 }
598 
599 
600 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
601  Label* materialize_false) const {
602  ASSERT(materialize_true == materialize_false);
603  __ bind(materialize_true);
604 }
605 
606 
607 void FullCodeGenerator::AccumulatorValueContext::Plug(
608  Label* materialize_true,
609  Label* materialize_false) const {
610  Label done;
611  __ bind(materialize_true);
612  __ Move(result_register(), isolate()->factory()->true_value());
613  __ jmp(&done, Label::kNear);
614  __ bind(materialize_false);
615  __ Move(result_register(), isolate()->factory()->false_value());
616  __ bind(&done);
617 }
618 
619 
620 void FullCodeGenerator::StackValueContext::Plug(
621  Label* materialize_true,
622  Label* materialize_false) const {
623  Label done;
624  __ bind(materialize_true);
625  __ Push(isolate()->factory()->true_value());
626  __ jmp(&done, Label::kNear);
627  __ bind(materialize_false);
628  __ Push(isolate()->factory()->false_value());
629  __ bind(&done);
630 }
631 
632 
633 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
634  Label* materialize_false) const {
635  ASSERT(materialize_true == true_label_);
636  ASSERT(materialize_false == false_label_);
637 }
638 
639 
640 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
641 }
642 
643 
644 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
645  Heap::RootListIndex value_root_index =
646  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
647  __ LoadRoot(result_register(), value_root_index);
648 }
649 
650 
651 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
652  Heap::RootListIndex value_root_index =
653  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
654  __ PushRoot(value_root_index);
655 }
656 
657 
658 void FullCodeGenerator::TestContext::Plug(bool flag) const {
659  codegen()->PrepareForBailoutBeforeSplit(condition(),
660  true,
661  true_label_,
662  false_label_);
663  if (flag) {
664  if (true_label_ != fall_through_) __ jmp(true_label_);
665  } else {
666  if (false_label_ != fall_through_) __ jmp(false_label_);
667  }
668 }
669 
670 
671 void FullCodeGenerator::DoTest(Expression* condition,
672  Label* if_true,
673  Label* if_false,
674  Label* fall_through) {
675  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
676  CallIC(ic, condition->test_id());
677  __ testp(result_register(), result_register());
678  // The stub returns nonzero for true.
679  Split(not_zero, if_true, if_false, fall_through);
680 }
681 
682 
683 void FullCodeGenerator::Split(Condition cc,
684  Label* if_true,
685  Label* if_false,
686  Label* fall_through) {
687  if (if_false == fall_through) {
688  __ j(cc, if_true);
689  } else if (if_true == fall_through) {
690  __ j(NegateCondition(cc), if_false);
691  } else {
692  __ j(cc, if_true);
693  __ jmp(if_false);
694  }
695 }
696 
697 
698 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
699  ASSERT(var->IsStackAllocated());
700  // Offset is negative because higher indexes are at lower addresses.
701  int offset = -var->index() * kPointerSize;
702  // Adjust by a (parameter or local) base offset.
703  if (var->IsParameter()) {
704  offset += kFPOnStackSize + kPCOnStackSize +
705  (info_->scope()->num_parameters() - 1) * kPointerSize;
706  } else {
708  }
709  return Operand(rbp, offset);
710 }
711 
712 
713 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
714  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
715  if (var->IsContextSlot()) {
716  int context_chain_length = scope()->ContextChainLength(var->scope());
717  __ LoadContext(scratch, context_chain_length);
718  return ContextOperand(scratch, var->index());
719  } else {
720  return StackOperand(var);
721  }
722 }
723 
724 
725 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
726  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
727  MemOperand location = VarOperand(var, dest);
728  __ movp(dest, location);
729 }
730 
731 
732 void FullCodeGenerator::SetVar(Variable* var,
733  Register src,
734  Register scratch0,
735  Register scratch1) {
736  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
737  ASSERT(!scratch0.is(src));
738  ASSERT(!scratch0.is(scratch1));
739  ASSERT(!scratch1.is(src));
740  MemOperand location = VarOperand(var, scratch0);
741  __ movp(location, src);
742 
743  // Emit the write barrier code if the location is in the heap.
744  if (var->IsContextSlot()) {
745  int offset = Context::SlotOffset(var->index());
746  __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
747  }
748 }
749 
750 
751 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
752  bool should_normalize,
753  Label* if_true,
754  Label* if_false) {
755  // Only prepare for bailouts before splits if we're in a test
756  // context. Otherwise, we let the Visit function deal with the
757  // preparation to avoid preparing with the same AST id twice.
758  if (!context()->IsTest() || !info_->IsOptimizable()) return;
759 
760  Label skip;
761  if (should_normalize) __ jmp(&skip, Label::kNear);
762  PrepareForBailout(expr, TOS_REG);
763  if (should_normalize) {
764  __ CompareRoot(rax, Heap::kTrueValueRootIndex);
765  Split(equal, if_true, if_false, NULL);
766  __ bind(&skip);
767  }
768 }
769 
770 
771 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
772  // The variable in the declaration always resides in the current context.
773  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
774  if (generate_debug_code_) {
775  // Check that we're not inside a with or catch context.
777  __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
778  __ Check(not_equal, kDeclarationInWithContext);
779  __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
780  __ Check(not_equal, kDeclarationInCatchContext);
781  }
782 }
783 
784 
785 void FullCodeGenerator::VisitVariableDeclaration(
786  VariableDeclaration* declaration) {
787  // If it was not possible to allocate the variable at compile time, we
788  // need to "declare" it at runtime to make sure it actually exists in the
789  // local context.
790  VariableProxy* proxy = declaration->proxy();
791  VariableMode mode = declaration->mode();
792  Variable* variable = proxy->var();
793  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
794  switch (variable->location()) {
796  globals_->Add(variable->name(), zone());
797  globals_->Add(variable->binding_needs_init()
798  ? isolate()->factory()->the_hole_value()
799  : isolate()->factory()->undefined_value(),
800  zone());
801  break;
802 
803  case Variable::PARAMETER:
804  case Variable::LOCAL:
805  if (hole_init) {
806  Comment cmnt(masm_, "[ VariableDeclaration");
807  __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
808  __ movp(StackOperand(variable), kScratchRegister);
809  }
810  break;
811 
812  case Variable::CONTEXT:
813  if (hole_init) {
814  Comment cmnt(masm_, "[ VariableDeclaration");
815  EmitDebugCheckDeclarationContext(variable);
816  __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
817  __ movp(ContextOperand(rsi, variable->index()), kScratchRegister);
818  // No write barrier since the hole value is in old space.
819  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
820  }
821  break;
822 
823  case Variable::LOOKUP: {
824  Comment cmnt(masm_, "[ VariableDeclaration");
825  __ Push(rsi);
826  __ Push(variable->name());
827  // Declaration nodes are always introduced in one of four modes.
829  PropertyAttributes attr =
831  __ Push(Smi::FromInt(attr));
832  // Push initial value, if any.
833  // Note: For variables we must not push an initial value (such as
834  // 'undefined') because we may have a (legal) redeclaration and we
835  // must not destroy the current value.
836  if (hole_init) {
837  __ PushRoot(Heap::kTheHoleValueRootIndex);
838  } else {
839  __ Push(Smi::FromInt(0)); // Indicates no initial value.
840  }
841  __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
842  break;
843  }
844  }
845 }
846 
847 
848 void FullCodeGenerator::VisitFunctionDeclaration(
849  FunctionDeclaration* declaration) {
850  VariableProxy* proxy = declaration->proxy();
851  Variable* variable = proxy->var();
852  switch (variable->location()) {
853  case Variable::UNALLOCATED: {
854  globals_->Add(variable->name(), zone());
855  Handle<SharedFunctionInfo> function =
856  Compiler::BuildFunctionInfo(declaration->fun(), script());
857  // Check for stack-overflow exception.
858  if (function.is_null()) return SetStackOverflow();
859  globals_->Add(function, zone());
860  break;
861  }
862 
863  case Variable::PARAMETER:
864  case Variable::LOCAL: {
865  Comment cmnt(masm_, "[ FunctionDeclaration");
866  VisitForAccumulatorValue(declaration->fun());
867  __ movp(StackOperand(variable), result_register());
868  break;
869  }
870 
871  case Variable::CONTEXT: {
872  Comment cmnt(masm_, "[ FunctionDeclaration");
873  EmitDebugCheckDeclarationContext(variable);
874  VisitForAccumulatorValue(declaration->fun());
875  __ movp(ContextOperand(rsi, variable->index()), result_register());
876  int offset = Context::SlotOffset(variable->index());
877  // We know that we have written a function, which is not a smi.
878  __ RecordWriteContextSlot(rsi,
879  offset,
880  result_register(),
881  rcx,
885  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
886  break;
887  }
888 
889  case Variable::LOOKUP: {
890  Comment cmnt(masm_, "[ FunctionDeclaration");
891  __ Push(rsi);
892  __ Push(variable->name());
893  __ Push(Smi::FromInt(NONE));
894  VisitForStackValue(declaration->fun());
895  __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
896  break;
897  }
898  }
899 }
900 
901 
902 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
903  Variable* variable = declaration->proxy()->var();
904  ASSERT(variable->location() == Variable::CONTEXT);
905  ASSERT(variable->interface()->IsFrozen());
906 
907  Comment cmnt(masm_, "[ ModuleDeclaration");
908  EmitDebugCheckDeclarationContext(variable);
909 
910  // Load instance object.
911  __ LoadContext(rax, scope_->ContextChainLength(scope_->GlobalScope()));
912  __ movp(rax, ContextOperand(rax, variable->interface()->Index()));
914 
915  // Assign it.
916  __ movp(ContextOperand(rsi, variable->index()), rax);
917  // We know that we have written a module, which is not a smi.
918  __ RecordWriteContextSlot(rsi,
919  Context::SlotOffset(variable->index()),
920  rax,
921  rcx,
925  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
926 
927  // Traverse into body.
928  Visit(declaration->module());
929 }
930 
931 
932 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
933  VariableProxy* proxy = declaration->proxy();
934  Variable* variable = proxy->var();
935  switch (variable->location()) {
937  // TODO(rossberg)
938  break;
939 
940  case Variable::CONTEXT: {
941  Comment cmnt(masm_, "[ ImportDeclaration");
942  EmitDebugCheckDeclarationContext(variable);
943  // TODO(rossberg)
944  break;
945  }
946 
947  case Variable::PARAMETER:
948  case Variable::LOCAL:
949  case Variable::LOOKUP:
950  UNREACHABLE();
951  }
952 }
953 
954 
955 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
956  // TODO(rossberg)
957 }
958 
959 
960 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
961  // Call the runtime to declare the globals.
962  __ Push(rsi); // The context is the first argument.
963  __ Push(pairs);
964  __ Push(Smi::FromInt(DeclareGlobalsFlags()));
965  __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
966  // Return value is ignored.
967 }
968 
969 
970 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
971  // Call the runtime to declare the modules.
972  __ Push(descriptions);
973  __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
974  // Return value is ignored.
975 }
976 
977 
978 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
979  Comment cmnt(masm_, "[ SwitchStatement");
980  Breakable nested_statement(this, stmt);
981  SetStatementPosition(stmt);
982 
983  // Keep the switch value on the stack until a case matches.
984  VisitForStackValue(stmt->tag());
985  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
986 
987  ZoneList<CaseClause*>* clauses = stmt->cases();
988  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
989 
990  Label next_test; // Recycled for each test.
991  // Compile all the tests with branches to their bodies.
992  for (int i = 0; i < clauses->length(); i++) {
993  CaseClause* clause = clauses->at(i);
994  clause->body_target()->Unuse();
995 
996  // The default is not a test, but remember it as final fall through.
997  if (clause->is_default()) {
998  default_clause = clause;
999  continue;
1000  }
1001 
1002  Comment cmnt(masm_, "[ Case comparison");
1003  __ bind(&next_test);
1004  next_test.Unuse();
1005 
1006  // Compile the label expression.
1007  VisitForAccumulatorValue(clause->label());
1008 
1009  // Perform the comparison as if via '==='.
1010  __ movp(rdx, Operand(rsp, 0)); // Switch value.
1011  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1012  JumpPatchSite patch_site(masm_);
1013  if (inline_smi_code) {
1014  Label slow_case;
1015  __ movp(rcx, rdx);
1016  __ orp(rcx, rax);
1017  patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
1018 
1019  __ cmpp(rdx, rax);
1020  __ j(not_equal, &next_test);
1021  __ Drop(1); // Switch value is no longer needed.
1022  __ jmp(clause->body_target());
1023  __ bind(&slow_case);
1024  }
1025 
1026  // Record position before stub call for type feedback.
1027  SetSourcePosition(clause->position());
1028  Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1029  CallIC(ic, clause->CompareId());
1030  patch_site.EmitPatchInfo();
1031 
1032  Label skip;
1033  __ jmp(&skip, Label::kNear);
1034  PrepareForBailout(clause, TOS_REG);
1035  __ CompareRoot(rax, Heap::kTrueValueRootIndex);
1036  __ j(not_equal, &next_test);
1037  __ Drop(1);
1038  __ jmp(clause->body_target());
1039  __ bind(&skip);
1040 
1041  __ testp(rax, rax);
1042  __ j(not_equal, &next_test);
1043  __ Drop(1); // Switch value is no longer needed.
1044  __ jmp(clause->body_target());
1045  }
1046 
1047  // Discard the test value and jump to the default if present, otherwise to
1048  // the end of the statement.
1049  __ bind(&next_test);
1050  __ Drop(1); // Switch value is no longer needed.
1051  if (default_clause == NULL) {
1052  __ jmp(nested_statement.break_label());
1053  } else {
1054  __ jmp(default_clause->body_target());
1055  }
1056 
1057  // Compile all the case bodies.
1058  for (int i = 0; i < clauses->length(); i++) {
1059  Comment cmnt(masm_, "[ Case body");
1060  CaseClause* clause = clauses->at(i);
1061  __ bind(clause->body_target());
1062  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1063  VisitStatements(clause->statements());
1064  }
1065 
1066  __ bind(nested_statement.break_label());
1067  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1068 }
1069 
1070 
1071 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1072  Comment cmnt(masm_, "[ ForInStatement");
1073  int slot = stmt->ForInFeedbackSlot();
1074  SetStatementPosition(stmt);
1075 
1076  Label loop, exit;
1077  ForIn loop_statement(this, stmt);
1078  increment_loop_depth();
1079 
1080  // Get the object to enumerate over. If the object is null or undefined, skip
1081  // over the loop. See ECMA-262 version 5, section 12.6.4.
1082  VisitForAccumulatorValue(stmt->enumerable());
1083  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1084  __ j(equal, &exit);
1085  Register null_value = rdi;
1086  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1087  __ cmpp(rax, null_value);
1088  __ j(equal, &exit);
1089 
1090  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1091 
1092  // Convert the object to a JS object.
1093  Label convert, done_convert;
1094  __ JumpIfSmi(rax, &convert);
1095  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1096  __ j(above_equal, &done_convert);
1097  __ bind(&convert);
1098  __ Push(rax);
1099  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1100  __ bind(&done_convert);
1101  __ Push(rax);
1102 
1103  // Check for proxies.
1104  Label call_runtime;
1106  __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
1107  __ j(below_equal, &call_runtime);
1108 
1109  // Check cache validity in generated code. This is a fast case for
1110  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1111  // guarantee cache validity, call the runtime system to check cache
1112  // validity or get the property names in a fixed array.
1113  __ CheckEnumCache(null_value, &call_runtime);
1114 
1115  // The enum cache is valid. Load the map of the object being
1116  // iterated over and use the cache for the iteration.
1117  Label use_cache;
1119  __ jmp(&use_cache, Label::kNear);
1120 
1121  // Get the set of properties to enumerate.
1122  __ bind(&call_runtime);
1123  __ Push(rax); // Duplicate the enumerable object on the stack.
1124  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1125 
1126  // If we got a map from the runtime call, we can do a fast
1127  // modification check. Otherwise, we got a fixed array, and we have
1128  // to do a slow check.
1129  Label fixed_array;
1130  __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1131  Heap::kMetaMapRootIndex);
1132  __ j(not_equal, &fixed_array);
1133 
1134  // We got a map in register rax. Get the enumeration cache from it.
1135  __ bind(&use_cache);
1136 
1137  Label no_descriptors;
1138 
1139  __ EnumLength(rdx, rax);
1140  __ Cmp(rdx, Smi::FromInt(0));
1141  __ j(equal, &no_descriptors);
1142 
1143  __ LoadInstanceDescriptors(rax, rcx);
1146 
1147  // Set up the four remaining stack slots.
1148  __ Push(rax); // Map.
1149  __ Push(rcx); // Enumeration cache.
1150  __ Push(rdx); // Number of valid entries for the map in the enum cache.
1151  __ Push(Smi::FromInt(0)); // Initial index.
1152  __ jmp(&loop);
1153 
1154  __ bind(&no_descriptors);
1155  __ addp(rsp, Immediate(kPointerSize));
1156  __ jmp(&exit);
1157 
1158  // We got a fixed array in register rax. Iterate through that.
1159  Label non_proxy;
1160  __ bind(&fixed_array);
1161 
1162  Handle<Object> feedback = Handle<Object>(
1164  isolate());
1165  StoreFeedbackVectorSlot(slot, feedback);
1166 
1167  // No need for a write barrier, we are storing a Smi in the feedback vector.
1168  __ Move(rbx, FeedbackVector());
1171  __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
1172  __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
1174  __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
1175  __ j(above, &non_proxy);
1176  __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy
1177  __ bind(&non_proxy);
1178  __ Push(rbx); // Smi
1179  __ Push(rax); // Array
1181  __ Push(rax); // Fixed array length (as smi).
1182  __ Push(Smi::FromInt(0)); // Initial index.
1183 
1184  // Generate code for doing the condition check.
1185  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1186  __ bind(&loop);
1187  __ movp(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
1188  __ cmpp(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
1189  __ j(above_equal, loop_statement.break_label());
1190 
1191  // Get the current entry of the array into register rbx.
1192  __ movp(rbx, Operand(rsp, 2 * kPointerSize));
1193  SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1194  __ movp(rbx, FieldOperand(rbx,
1195  index.reg,
1196  index.scale,
1198 
1199  // Get the expected map from the stack or a smi in the
1200  // permanent slow case into register rdx.
1201  __ movp(rdx, Operand(rsp, 3 * kPointerSize));
1202 
1203  // Check if the expected map still matches that of the enumerable.
1204  // If not, we may have to filter the key.
1205  Label update_each;
1206  __ movp(rcx, Operand(rsp, 4 * kPointerSize));
1208  __ j(equal, &update_each, Label::kNear);
1209 
1210  // For proxies, no filtering is done.
1211  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1212  __ Cmp(rdx, Smi::FromInt(0));
1213  __ j(equal, &update_each, Label::kNear);
1214 
1215  // Convert the entry to a string or null if it isn't a property
1216  // anymore. If the property has been removed while iterating, we
1217  // just skip it.
1218  __ Push(rcx); // Enumerable.
1219  __ Push(rbx); // Current entry.
1220  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1221  __ Cmp(rax, Smi::FromInt(0));
1222  __ j(equal, loop_statement.continue_label());
1223  __ movp(rbx, rax);
1224 
1225  // Update the 'each' property or variable from the possibly filtered
1226  // entry in register rbx.
1227  __ bind(&update_each);
1228  __ movp(result_register(), rbx);
1229  // Perform the assignment as if via '='.
1230  { EffectContext context(this);
1231  EmitAssignment(stmt->each());
1232  }
1233 
1234  // Generate code for the body of the loop.
1235  Visit(stmt->body());
1236 
1237  // Generate code for going to the next element by incrementing the
1238  // index (smi) stored on top of the stack.
1239  __ bind(loop_statement.continue_label());
1240  __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1241 
1242  EmitBackEdgeBookkeeping(stmt, &loop);
1243  __ jmp(&loop);
1244 
1245  // Remove the pointers stored on the stack.
1246  __ bind(loop_statement.break_label());
1247  __ addp(rsp, Immediate(5 * kPointerSize));
1248 
1249  // Exit and decrement the loop depth.
1250  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1251  __ bind(&exit);
1252  decrement_loop_depth();
1253 }
1254 
1255 
1256 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1257  Comment cmnt(masm_, "[ ForOfStatement");
1258  SetStatementPosition(stmt);
1259 
1260  Iteration loop_statement(this, stmt);
1261  increment_loop_depth();
1262 
1263  // var iterator = iterable[@@iterator]()
1264  VisitForAccumulatorValue(stmt->assign_iterator());
1265 
1266  // As with for-in, skip the loop if the iterator is null or undefined.
1267  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1268  __ j(equal, loop_statement.break_label());
1269  __ CompareRoot(rax, Heap::kNullValueRootIndex);
1270  __ j(equal, loop_statement.break_label());
1271 
1272  // Convert the iterator to a JS object.
1273  Label convert, done_convert;
1274  __ JumpIfSmi(rax, &convert);
1275  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1276  __ j(above_equal, &done_convert);
1277  __ bind(&convert);
1278  __ Push(rax);
1279  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1280  __ bind(&done_convert);
1281 
1282  // Loop entry.
1283  __ bind(loop_statement.continue_label());
1284 
1285  // result = iterator.next()
1286  VisitForEffect(stmt->next_result());
1287 
1288  // if (result.done) break;
1289  Label result_not_done;
1290  VisitForControl(stmt->result_done(),
1291  loop_statement.break_label(),
1292  &result_not_done,
1293  &result_not_done);
1294  __ bind(&result_not_done);
1295 
1296  // each = result.value
1297  VisitForEffect(stmt->assign_each());
1298 
1299  // Generate code for the body of the loop.
1300  Visit(stmt->body());
1301 
1302  // Check stack before looping.
1303  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1304  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1305  __ jmp(loop_statement.continue_label());
1306 
1307  // Exit and decrement the loop depth.
1308  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1309  __ bind(loop_statement.break_label());
1310  decrement_loop_depth();
1311 }
1312 
1313 
1314 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1315  bool pretenure) {
1316  // Use the fast case closure allocation code that allocates in new
1317  // space for nested functions that don't need literals cloning. If
1318  // we're running with the --always-opt or the --prepare-always-opt
1319  // flag, we need to use the runtime function so that the new function
1320  // we are creating here gets a chance to have its code optimized and
1321  // doesn't just get a copy of the existing unoptimized code.
1322  if (!FLAG_always_opt &&
1323  !FLAG_prepare_always_opt &&
1324  !pretenure &&
1325  scope()->is_function_scope() &&
1326  info->num_literals() == 0) {
1327  FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1328  __ Move(rbx, info);
1329  __ CallStub(&stub);
1330  } else {
1331  __ Push(rsi);
1332  __ Push(info);
1333  __ Push(pretenure
1334  ? isolate()->factory()->true_value()
1335  : isolate()->factory()->false_value());
1336  __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1337  }
1338  context()->Plug(rax);
1339 }
1340 
1341 
1342 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1343  Comment cmnt(masm_, "[ VariableProxy");
1344  EmitVariableLoad(expr);
1345 }
1346 
1347 
1348 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1349  TypeofState typeof_state,
1350  Label* slow) {
1351  Register context = rsi;
1352  Register temp = rdx;
1353 
1354  Scope* s = scope();
1355  while (s != NULL) {
1356  if (s->num_heap_slots() > 0) {
1357  if (s->calls_sloppy_eval()) {
1358  // Check that extension is NULL.
1360  Immediate(0));
1361  __ j(not_equal, slow);
1362  }
1363  // Load next context in chain.
1364  __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1365  // Walk the rest of the chain without clobbering rsi.
1366  context = temp;
1367  }
1368  // If no outer scope calls eval, we do not need to check more
1369  // context extensions. If we have reached an eval scope, we check
1370  // all extensions from this point.
1371  if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1372  s = s->outer_scope();
1373  }
1374 
1375  if (s != NULL && s->is_eval_scope()) {
1376  // Loop up the context chain. There is no frame effect so it is
1377  // safe to use raw labels here.
1378  Label next, fast;
1379  if (!context.is(temp)) {
1380  __ movp(temp, context);
1381  }
1382  // Load map for comparison into register, outside loop.
1383  __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex);
1384  __ bind(&next);
1385  // Terminate at native context.
1387  __ j(equal, &fast, Label::kNear);
1388  // Check that extension is NULL.
1389  __ cmpp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1390  __ j(not_equal, slow);
1391  // Load next context in chain.
1392  __ movp(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1393  __ jmp(&next);
1394  __ bind(&fast);
1395  }
1396 
1397  // All extension objects were empty and it is safe to use a global
1398  // load IC call.
1399  __ movp(rax, GlobalObjectOperand());
1400  __ Move(rcx, var->name());
1401  ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1402  ? NOT_CONTEXTUAL
1403  : CONTEXTUAL;
1404  CallLoadIC(mode);
1405 }
1406 
1407 
1408 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1409  Label* slow) {
1410  ASSERT(var->IsContextSlot());
1411  Register context = rsi;
1412  Register temp = rbx;
1413 
1414  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1415  if (s->num_heap_slots() > 0) {
1416  if (s->calls_sloppy_eval()) {
1417  // Check that extension is NULL.
1419  Immediate(0));
1420  __ j(not_equal, slow);
1421  }
1422  __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1423  // Walk the rest of the chain without clobbering rsi.
1424  context = temp;
1425  }
1426  }
1427  // Check that last extension is NULL.
1428  __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1429  __ j(not_equal, slow);
1430 
1431  // This function is used only for loads, not stores, so it's safe to
1432  // return an rsi-based operand (the write barrier cannot be allowed to
1433  // destroy the rsi register).
1434  return ContextOperand(context, var->index());
1435 }
1436 
1437 
1438 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1439  TypeofState typeof_state,
1440  Label* slow,
1441  Label* done) {
1442  // Generate fast-case code for variables that might be shadowed by
1443  // eval-introduced variables. Eval is used a lot without
1444  // introducing variables. In those cases, we do not want to
1445  // perform a runtime call for all variables in the scope
1446  // containing the eval.
1447  if (var->mode() == DYNAMIC_GLOBAL) {
1448  EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1449  __ jmp(done);
1450  } else if (var->mode() == DYNAMIC_LOCAL) {
1451  Variable* local = var->local_if_not_shadowed();
1452  __ movp(rax, ContextSlotOperandCheckExtensions(local, slow));
1453  if (local->mode() == LET || local->mode() == CONST ||
1454  local->mode() == CONST_LEGACY) {
1455  __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1456  __ j(not_equal, done);
1457  if (local->mode() == CONST_LEGACY) {
1458  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1459  } else { // LET || CONST
1460  __ Push(var->name());
1461  __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1462  }
1463  }
1464  __ jmp(done);
1465  }
1466 }
1467 
1468 
1469 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1470  // Record position before possible IC call.
1471  SetSourcePosition(proxy->position());
1472  Variable* var = proxy->var();
1473 
1474  // Three cases: global variables, lookup variables, and all other types of
1475  // variables.
1476  switch (var->location()) {
1477  case Variable::UNALLOCATED: {
1478  Comment cmnt(masm_, "[ Global variable");
1479  // Use inline caching. Variable name is passed in rcx and the global
1480  // object on the stack.
1481  __ Move(rcx, var->name());
1482  __ movp(rax, GlobalObjectOperand());
1483  CallLoadIC(CONTEXTUAL);
1484  context()->Plug(rax);
1485  break;
1486  }
1487 
1488  case Variable::PARAMETER:
1489  case Variable::LOCAL:
1490  case Variable::CONTEXT: {
1491  Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot"
1492  : "[ Stack slot");
1493  if (var->binding_needs_init()) {
1494  // var->scope() may be NULL when the proxy is located in eval code and
1495  // refers to a potential outside binding. Currently those bindings are
1496  // always looked up dynamically, i.e. in that case
1497  // var->location() == LOOKUP.
1498  // always holds.
1499  ASSERT(var->scope() != NULL);
1500 
1501  // Check if the binding really needs an initialization check. The check
1502  // can be skipped in the following situation: we have a LET or CONST
1503  // binding in harmony mode, both the Variable and the VariableProxy have
1504  // the same declaration scope (i.e. they are both in global code, in the
1505  // same function or in the same eval code) and the VariableProxy is in
1506  // the source physically located after the initializer of the variable.
1507  //
1508  // We cannot skip any initialization checks for CONST in non-harmony
1509  // mode because const variables may be declared but never initialized:
1510  // if (false) { const x; }; var y = x;
1511  //
1512  // The condition on the declaration scopes is a conservative check for
1513  // nested functions that access a binding and are called before the
1514  // binding is initialized:
1515  // function() { f(); let x = 1; function f() { x = 2; } }
1516  //
1517  bool skip_init_check;
1518  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1519  skip_init_check = false;
1520  } else {
1521  // Check that we always have valid source position.
1522  ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1523  ASSERT(proxy->position() != RelocInfo::kNoPosition);
1524  skip_init_check = var->mode() != CONST_LEGACY &&
1525  var->initializer_position() < proxy->position();
1526  }
1527 
1528  if (!skip_init_check) {
1529  // Let and const need a read barrier.
1530  Label done;
1531  GetVar(rax, var);
1532  __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1533  __ j(not_equal, &done, Label::kNear);
1534  if (var->mode() == LET || var->mode() == CONST) {
1535  // Throw a reference error when using an uninitialized let/const
1536  // binding in harmony mode.
1537  __ Push(var->name());
1538  __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1539  } else {
1540  // Uninitalized const bindings outside of harmony mode are unholed.
1541  ASSERT(var->mode() == CONST_LEGACY);
1542  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1543  }
1544  __ bind(&done);
1545  context()->Plug(rax);
1546  break;
1547  }
1548  }
1549  context()->Plug(var);
1550  break;
1551  }
1552 
1553  case Variable::LOOKUP: {
1554  Comment cmnt(masm_, "[ Lookup slot");
1555  Label done, slow;
1556  // Generate code for loading from variables potentially shadowed
1557  // by eval-introduced variables.
1558  EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1559  __ bind(&slow);
1560  __ Push(rsi); // Context.
1561  __ Push(var->name());
1562  __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1563  __ bind(&done);
1564  context()->Plug(rax);
1565  break;
1566  }
1567  }
1568 }
1569 
1570 
1571 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1572  Comment cmnt(masm_, "[ RegExpLiteral");
1573  Label materialized;
1574  // Registers will be used as follows:
1575  // rdi = JS function.
1576  // rcx = literals array.
1577  // rbx = regexp literal.
1578  // rax = regexp literal clone.
1581  int literal_offset =
1582  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1583  __ movp(rbx, FieldOperand(rcx, literal_offset));
1584  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1585  __ j(not_equal, &materialized, Label::kNear);
1586 
1587  // Create regexp literal using runtime function
1588  // Result will be in rax.
1589  __ Push(rcx);
1590  __ Push(Smi::FromInt(expr->literal_index()));
1591  __ Push(expr->pattern());
1592  __ Push(expr->flags());
1593  __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1594  __ movp(rbx, rax);
1595 
1596  __ bind(&materialized);
1598  Label allocated, runtime_allocate;
1599  __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1600  __ jmp(&allocated);
1601 
1602  __ bind(&runtime_allocate);
1603  __ Push(rbx);
1604  __ Push(Smi::FromInt(size));
1605  __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1606  __ Pop(rbx);
1607 
1608  __ bind(&allocated);
1609  // Copy the content into the newly allocated memory.
1610  // (Unroll copy loop once for better throughput).
1611  for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1612  __ movp(rdx, FieldOperand(rbx, i));
1613  __ movp(rcx, FieldOperand(rbx, i + kPointerSize));
1614  __ movp(FieldOperand(rax, i), rdx);
1615  __ movp(FieldOperand(rax, i + kPointerSize), rcx);
1616  }
1617  if ((size % (2 * kPointerSize)) != 0) {
1618  __ movp(rdx, FieldOperand(rbx, size - kPointerSize));
1619  __ movp(FieldOperand(rax, size - kPointerSize), rdx);
1620  }
1621  context()->Plug(rax);
1622 }
1623 
1624 
1625 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1626  if (expression == NULL) {
1627  __ PushRoot(Heap::kNullValueRootIndex);
1628  } else {
1629  VisitForStackValue(expression);
1630  }
1631 }
1632 
1633 
1634 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1635  Comment cmnt(masm_, "[ ObjectLiteral");
1636 
1637  expr->BuildConstantProperties(isolate());
1638  Handle<FixedArray> constant_properties = expr->constant_properties();
1639  int flags = expr->fast_elements()
1640  ? ObjectLiteral::kFastElements
1641  : ObjectLiteral::kNoFlags;
1642  flags |= expr->has_function()
1643  ? ObjectLiteral::kHasFunction
1644  : ObjectLiteral::kNoFlags;
1645  int properties_count = constant_properties->length() / 2;
1646  if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() ||
1647  flags != ObjectLiteral::kFastElements ||
1651  __ Push(Smi::FromInt(expr->literal_index()));
1652  __ Push(constant_properties);
1653  __ Push(Smi::FromInt(flags));
1654  __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1655  } else {
1658  __ Move(rbx, Smi::FromInt(expr->literal_index()));
1659  __ Move(rcx, constant_properties);
1660  __ Move(rdx, Smi::FromInt(flags));
1661  FastCloneShallowObjectStub stub(properties_count);
1662  __ CallStub(&stub);
1663  }
1664 
1665  // If result_saved is true the result is on top of the stack. If
1666  // result_saved is false the result is in rax.
1667  bool result_saved = false;
1668 
1669  // Mark all computed expressions that are bound to a key that
1670  // is shadowed by a later occurrence of the same key. For the
1671  // marked expressions, no store code is emitted.
1672  expr->CalculateEmitStore(zone());
1673 
1674  AccessorTable accessor_table(zone());
1675  for (int i = 0; i < expr->properties()->length(); i++) {
1676  ObjectLiteral::Property* property = expr->properties()->at(i);
1677  if (property->IsCompileTimeValue()) continue;
1678 
1679  Literal* key = property->key();
1680  Expression* value = property->value();
1681  if (!result_saved) {
1682  __ Push(rax); // Save result on the stack
1683  result_saved = true;
1684  }
1685  switch (property->kind()) {
1687  UNREACHABLE();
1688  case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1690  // Fall through.
1691  case ObjectLiteral::Property::COMPUTED:
1692  if (key->value()->IsInternalizedString()) {
1693  if (property->emit_store()) {
1694  VisitForAccumulatorValue(value);
1695  __ Move(rcx, key->value());
1696  __ movp(rdx, Operand(rsp, 0));
1697  CallStoreIC(key->LiteralFeedbackId());
1698  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1699  } else {
1700  VisitForEffect(value);
1701  }
1702  break;
1703  }
1704  __ Push(Operand(rsp, 0)); // Duplicate receiver.
1705  VisitForStackValue(key);
1706  VisitForStackValue(value);
1707  if (property->emit_store()) {
1708  __ Push(Smi::FromInt(NONE)); // PropertyAttributes
1709  __ CallRuntime(Runtime::kSetProperty, 4);
1710  } else {
1711  __ Drop(3);
1712  }
1713  break;
1714  case ObjectLiteral::Property::PROTOTYPE:
1715  __ Push(Operand(rsp, 0)); // Duplicate receiver.
1716  VisitForStackValue(value);
1717  if (property->emit_store()) {
1718  __ CallRuntime(Runtime::kSetPrototype, 2);
1719  } else {
1720  __ Drop(2);
1721  }
1722  break;
1723  case ObjectLiteral::Property::GETTER:
1724  accessor_table.lookup(key)->second->getter = value;
1725  break;
1726  case ObjectLiteral::Property::SETTER:
1727  accessor_table.lookup(key)->second->setter = value;
1728  break;
1729  }
1730  }
1731 
1732  // Emit code to define accessors, using only a single call to the runtime for
1733  // each pair of corresponding getters and setters.
1734  for (AccessorTable::Iterator it = accessor_table.begin();
1735  it != accessor_table.end();
1736  ++it) {
1737  __ Push(Operand(rsp, 0)); // Duplicate receiver.
1738  VisitForStackValue(it->first);
1739  EmitAccessor(it->second->getter);
1740  EmitAccessor(it->second->setter);
1741  __ Push(Smi::FromInt(NONE));
1742  __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1743  }
1744 
1745  if (expr->has_function()) {
1746  ASSERT(result_saved);
1747  __ Push(Operand(rsp, 0));
1748  __ CallRuntime(Runtime::kToFastProperties, 1);
1749  }
1750 
1751  if (result_saved) {
1752  context()->PlugTOS();
1753  } else {
1754  context()->Plug(rax);
1755  }
1756 }
1757 
1758 
1759 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1760  Comment cmnt(masm_, "[ ArrayLiteral");
1761 
1762  expr->BuildConstantElements(isolate());
1763  int flags = expr->depth() == 1
1764  ? ArrayLiteral::kShallowElements
1765  : ArrayLiteral::kNoFlags;
1766 
1767  ZoneList<Expression*>* subexprs = expr->values();
1768  int length = subexprs->length();
1769  Handle<FixedArray> constant_elements = expr->constant_elements();
1770  ASSERT_EQ(2, constant_elements->length());
1771  ElementsKind constant_elements_kind =
1772  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1773  bool has_constant_fast_elements =
1774  IsFastObjectElementsKind(constant_elements_kind);
1775  Handle<FixedArrayBase> constant_elements_values(
1776  FixedArrayBase::cast(constant_elements->get(1)));
1777 
1778  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1779  if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1780  // If the only customer of allocation sites is transitioning, then
1781  // we can turn it off if we don't have anywhere else to transition to.
1782  allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1783  }
1784 
1785  Heap* heap = isolate()->heap();
1786  if (has_constant_fast_elements &&
1787  constant_elements_values->map() == heap->fixed_cow_array_map()) {
1788  // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1789  // change, so it's possible to specialize the stub in advance.
1790  __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1793  __ Move(rbx, Smi::FromInt(expr->literal_index()));
1794  __ Move(rcx, constant_elements);
1795  FastCloneShallowArrayStub stub(
1797  allocation_site_mode,
1798  length);
1799  __ CallStub(&stub);
1800  } else if (expr->depth() > 1 || Serializer::enabled() ||
1804  __ Push(Smi::FromInt(expr->literal_index()));
1805  __ Push(constant_elements);
1806  __ Push(Smi::FromInt(flags));
1807  __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1808  } else {
1809  ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1810  FLAG_smi_only_arrays);
1813 
1814  // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1815  // change, so it's possible to specialize the stub in advance.
1816  if (has_constant_fast_elements) {
1818  }
1819 
1822  __ Move(rbx, Smi::FromInt(expr->literal_index()));
1823  __ Move(rcx, constant_elements);
1824  FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1825  __ CallStub(&stub);
1826  }
1827 
1828  bool result_saved = false; // Is the result saved to the stack?
1829 
1830  // Emit code to evaluate all the non-constant subexpressions and to store
1831  // them into the newly cloned array.
1832  for (int i = 0; i < length; i++) {
1833  Expression* subexpr = subexprs->at(i);
1834  // If the subexpression is a literal or a simple materialized literal it
1835  // is already set in the cloned array.
1836  if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1837 
1838  if (!result_saved) {
1839  __ Push(rax); // array literal
1840  __ Push(Smi::FromInt(expr->literal_index()));
1841  result_saved = true;
1842  }
1843  VisitForAccumulatorValue(subexpr);
1844 
1845  if (IsFastObjectElementsKind(constant_elements_kind)) {
1846  // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1847  // cannot transition and don't need to call the runtime stub.
1848  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1849  __ movp(rbx, Operand(rsp, kPointerSize)); // Copy of array literal.
1851  // Store the subexpression value in the array's elements.
1852  __ movp(FieldOperand(rbx, offset), result_register());
1853  // Update the write barrier for the array store.
1854  __ RecordWriteField(rbx, offset, result_register(), rcx,
1858  } else {
1859  // Store the subexpression value in the array's elements.
1860  __ Move(rcx, Smi::FromInt(i));
1861  StoreArrayLiteralElementStub stub;
1862  __ CallStub(&stub);
1863  }
1864 
1865  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1866  }
1867 
1868  if (result_saved) {
1869  __ addp(rsp, Immediate(kPointerSize)); // literal index
1870  context()->PlugTOS();
1871  } else {
1872  context()->Plug(rax);
1873  }
1874 }
1875 
1876 
1877 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1878  ASSERT(expr->target()->IsValidLeftHandSide());
1879 
1880  Comment cmnt(masm_, "[ Assignment");
1881 
1882  // Left-hand side can only be a property, a global or a (parameter or local)
1883  // slot.
1884  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1885  LhsKind assign_type = VARIABLE;
1886  Property* property = expr->target()->AsProperty();
1887  if (property != NULL) {
1888  assign_type = (property->key()->IsPropertyName())
1889  ? NAMED_PROPERTY
1890  : KEYED_PROPERTY;
1891  }
1892 
1893  // Evaluate LHS expression.
1894  switch (assign_type) {
1895  case VARIABLE:
1896  // Nothing to do here.
1897  break;
1898  case NAMED_PROPERTY:
1899  if (expr->is_compound()) {
1900  // We need the receiver both on the stack and in the accumulator.
1901  VisitForAccumulatorValue(property->obj());
1902  __ Push(result_register());
1903  } else {
1904  VisitForStackValue(property->obj());
1905  }
1906  break;
1907  case KEYED_PROPERTY: {
1908  if (expr->is_compound()) {
1909  VisitForStackValue(property->obj());
1910  VisitForAccumulatorValue(property->key());
1911  __ movp(rdx, Operand(rsp, 0));
1912  __ Push(rax);
1913  } else {
1914  VisitForStackValue(property->obj());
1915  VisitForStackValue(property->key());
1916  }
1917  break;
1918  }
1919  }
1920 
1921  // For compound assignments we need another deoptimization point after the
1922  // variable/property load.
1923  if (expr->is_compound()) {
1924  { AccumulatorValueContext context(this);
1925  switch (assign_type) {
1926  case VARIABLE:
1927  EmitVariableLoad(expr->target()->AsVariableProxy());
1928  PrepareForBailout(expr->target(), TOS_REG);
1929  break;
1930  case NAMED_PROPERTY:
1931  EmitNamedPropertyLoad(property);
1932  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1933  break;
1934  case KEYED_PROPERTY:
1935  EmitKeyedPropertyLoad(property);
1936  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1937  break;
1938  }
1939  }
1940 
1941  Token::Value op = expr->binary_op();
1942  __ Push(rax); // Left operand goes on the stack.
1943  VisitForAccumulatorValue(expr->value());
1944 
1945  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1946  ? OVERWRITE_RIGHT
1947  : NO_OVERWRITE;
1948  SetSourcePosition(expr->position() + 1);
1949  AccumulatorValueContext context(this);
1950  if (ShouldInlineSmiCase(op)) {
1951  EmitInlineSmiBinaryOp(expr->binary_operation(),
1952  op,
1953  mode,
1954  expr->target(),
1955  expr->value());
1956  } else {
1957  EmitBinaryOp(expr->binary_operation(), op, mode);
1958  }
1959  // Deoptimization point in case the binary operation may have side effects.
1960  PrepareForBailout(expr->binary_operation(), TOS_REG);
1961  } else {
1962  VisitForAccumulatorValue(expr->value());
1963  }
1964 
1965  // Record source position before possible IC call.
1966  SetSourcePosition(expr->position());
1967 
1968  // Store the value.
1969  switch (assign_type) {
1970  case VARIABLE:
1971  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1972  expr->op());
1973  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1974  context()->Plug(rax);
1975  break;
1976  case NAMED_PROPERTY:
1977  EmitNamedPropertyAssignment(expr);
1978  break;
1979  case KEYED_PROPERTY:
1980  EmitKeyedPropertyAssignment(expr);
1981  break;
1982  }
1983 }
1984 
1985 
1986 void FullCodeGenerator::VisitYield(Yield* expr) {
1987  Comment cmnt(masm_, "[ Yield");
1988  // Evaluate yielded value first; the initial iterator definition depends on
1989  // this. It stays on the stack while we update the iterator.
1990  VisitForStackValue(expr->expression());
1991 
1992  switch (expr->yield_kind()) {
1993  case Yield::SUSPEND:
1994  // Pop value from top-of-stack slot; box result into result register.
1995  EmitCreateIteratorResult(false);
1996  __ Push(result_register());
1997  // Fall through.
1998  case Yield::INITIAL: {
1999  Label suspend, continuation, post_runtime, resume;
2000 
2001  __ jmp(&suspend);
2002 
2003  __ bind(&continuation);
2004  __ jmp(&resume);
2005 
2006  __ bind(&suspend);
2007  VisitForAccumulatorValue(expr->generator_object());
2008  ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2010  Smi::FromInt(continuation.pos()));
2012  __ movp(rcx, rsi);
2013  __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2014  kDontSaveFPRegs);
2016  __ cmpp(rsp, rbx);
2017  __ j(equal, &post_runtime);
2018  __ Push(rax); // generator object
2019  __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2020  __ movp(context_register(),
2022  __ bind(&post_runtime);
2023 
2024  __ Pop(result_register());
2025  EmitReturnSequence();
2026 
2027  __ bind(&resume);
2028  context()->Plug(result_register());
2029  break;
2030  }
2031 
2032  case Yield::FINAL: {
2033  VisitForAccumulatorValue(expr->generator_object());
2034  __ Move(FieldOperand(result_register(),
2037  // Pop value from top-of-stack slot, box result into result register.
2038  EmitCreateIteratorResult(true);
2039  EmitUnwindBeforeReturn();
2040  EmitReturnSequence();
2041  break;
2042  }
2043 
2044  case Yield::DELEGATING: {
2045  VisitForStackValue(expr->generator_object());
2046 
2047  // Initial stack layout is as follows:
2048  // [sp + 1 * kPointerSize] iter
2049  // [sp + 0 * kPointerSize] g
2050 
2051  Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2052  Label l_next, l_call, l_loop;
2053  // Initial send value is undefined.
2054  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2055  __ jmp(&l_next);
2056 
2057  // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2058  __ bind(&l_catch);
2059  handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2060  __ LoadRoot(rcx, Heap::kthrow_stringRootIndex); // "throw"
2061  __ Push(rcx);
2062  __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2063  __ Push(rax); // exception
2064  __ jmp(&l_call);
2065 
2066  // try { received = %yield result }
2067  // Shuffle the received result above a try handler and yield it without
2068  // re-boxing.
2069  __ bind(&l_try);
2070  __ Pop(rax); // result
2071  __ PushTryHandler(StackHandler::CATCH, expr->index());
2072  const int handler_size = StackHandlerConstants::kSize;
2073  __ Push(rax); // result
2074  __ jmp(&l_suspend);
2075  __ bind(&l_continuation);
2076  __ jmp(&l_resume);
2077  __ bind(&l_suspend);
2078  const int generator_object_depth = kPointerSize + handler_size;
2079  __ movp(rax, Operand(rsp, generator_object_depth));
2080  __ Push(rax); // g
2081  ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2083  Smi::FromInt(l_continuation.pos()));
2085  __ movp(rcx, rsi);
2086  __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2087  kDontSaveFPRegs);
2088  __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2089  __ movp(context_register(),
2091  __ Pop(rax); // result
2092  EmitReturnSequence();
2093  __ bind(&l_resume); // received in rax
2094  __ PopTryHandler();
2095 
2096  // receiver = iter; f = 'next'; arg = received;
2097  __ bind(&l_next);
2098  __ LoadRoot(rcx, Heap::knext_stringRootIndex); // "next"
2099  __ Push(rcx);
2100  __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2101  __ Push(rax); // received
2102 
2103  // result = receiver[f](arg);
2104  __ bind(&l_call);
2105  __ movp(rdx, Operand(rsp, kPointerSize));
2106  __ movp(rax, Operand(rsp, 2 * kPointerSize));
2107  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2108  CallIC(ic, TypeFeedbackId::None());
2109  __ movp(rdi, rax);
2110  __ movp(Operand(rsp, 2 * kPointerSize), rdi);
2111  CallFunctionStub stub(1, CALL_AS_METHOD);
2112  __ CallStub(&stub);
2113 
2115  __ Drop(1); // The function is still on the stack; drop it.
2116 
2117  // if (!result.done) goto l_try;
2118  __ bind(&l_loop);
2119  __ Push(rax); // save result
2120  __ LoadRoot(rcx, Heap::kdone_stringRootIndex); // "done"
2121  CallLoadIC(NOT_CONTEXTUAL); // result.done in rax
2122  Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2123  CallIC(bool_ic);
2124  __ testp(result_register(), result_register());
2125  __ j(zero, &l_try);
2126 
2127  // result.value
2128  __ Pop(rax); // result
2129  __ LoadRoot(rcx, Heap::kvalue_stringRootIndex); // "value"
2130  CallLoadIC(NOT_CONTEXTUAL); // result.value in rax
2131  context()->DropAndPlug(2, rax); // drop iter and g
2132  break;
2133  }
2134  }
2135 }
2136 
2137 
2138 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2139  Expression *value,
2140  JSGeneratorObject::ResumeMode resume_mode) {
2141  // The value stays in rax, and is ultimately read by the resumed generator, as
2142  // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
2143  // is read to throw the value when the resumed generator is already closed.
2144  // rbx will hold the generator object until the activation has been resumed.
2145  VisitForStackValue(generator);
2146  VisitForAccumulatorValue(value);
2147  __ Pop(rbx);
2148 
2149  // Check generator state.
2150  Label wrong_state, closed_state, done;
2154  Smi::FromInt(0));
2155  __ j(equal, &closed_state);
2156  __ j(less, &wrong_state);
2157 
2158  // Load suspended function and context.
2161 
2162  // Push receiver.
2164 
2165  // Push holes for arguments to generator function.
2167  __ movsxlq(rdx,
2168  FieldOperand(rdx,
2170  __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
2171  Label push_argument_holes, push_frame;
2172  __ bind(&push_argument_holes);
2173  __ subp(rdx, Immediate(1));
2174  __ j(carry, &push_frame);
2175  __ Push(rcx);
2176  __ jmp(&push_argument_holes);
2177 
2178  // Enter a new JavaScript frame, and initialize its slots as they were when
2179  // the generator was suspended.
2180  Label resume_frame;
2181  __ bind(&push_frame);
2182  __ call(&resume_frame);
2183  __ jmp(&done);
2184  __ bind(&resume_frame);
2185  __ pushq(rbp); // Caller's frame pointer.
2186  __ movp(rbp, rsp);
2187  __ Push(rsi); // Callee's context.
2188  __ Push(rdi); // Callee's JS Function.
2189 
2190  // Load the operand stack size.
2193  __ SmiToInteger32(rdx, rdx);
2194 
2195  // If we are sending a value and there is no operand stack, we can jump back
2196  // in directly.
2197  if (resume_mode == JSGeneratorObject::NEXT) {
2198  Label slow_resume;
2199  __ cmpp(rdx, Immediate(0));
2200  __ j(not_zero, &slow_resume);
2202  __ SmiToInteger64(rcx,
2204  __ addp(rdx, rcx);
2207  __ jmp(rdx);
2208  __ bind(&slow_resume);
2209  }
2210 
2211  // Otherwise, we push holes for the operand stack and call the runtime to fix
2212  // up the stack and the handlers.
2213  Label push_operand_holes, call_resume;
2214  __ bind(&push_operand_holes);
2215  __ subp(rdx, Immediate(1));
2216  __ j(carry, &call_resume);
2217  __ Push(rcx);
2218  __ jmp(&push_operand_holes);
2219  __ bind(&call_resume);
2220  __ Push(rbx);
2221  __ Push(result_register());
2222  __ Push(Smi::FromInt(resume_mode));
2223  __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
2224  // Not reached: the runtime call returns elsewhere.
2225  __ Abort(kGeneratorFailedToResume);
2226 
2227  // Reach here when generator is closed.
2228  __ bind(&closed_state);
2229  if (resume_mode == JSGeneratorObject::NEXT) {
2230  // Return completed iterator result when generator is closed.
2231  __ PushRoot(Heap::kUndefinedValueRootIndex);
2232  // Pop value from top-of-stack slot; box result into result register.
2233  EmitCreateIteratorResult(true);
2234  } else {
2235  // Throw the provided value.
2236  __ Push(rax);
2237  __ CallRuntime(Runtime::kHiddenThrow, 1);
2238  }
2239  __ jmp(&done);
2240 
2241  // Throw error if we attempt to operate on a running generator.
2242  __ bind(&wrong_state);
2243  __ Push(rbx);
2244  __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
2245 
2246  __ bind(&done);
2247  context()->Plug(result_register());
2248 }
2249 
2250 
2251 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2252  Label gc_required;
2253  Label allocated;
2254 
2255  Handle<Map> map(isolate()->native_context()->generator_result_map());
2256 
2257  __ Allocate(map->instance_size(), rax, rcx, rdx, &gc_required, TAG_OBJECT);
2258  __ jmp(&allocated);
2259 
2260  __ bind(&gc_required);
2261  __ Push(Smi::FromInt(map->instance_size()));
2262  __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
2263  __ movp(context_register(),
2265 
2266  __ bind(&allocated);
2267  __ Move(rbx, map);
2268  __ Pop(rcx);
2269  __ Move(rdx, isolate()->factory()->ToBoolean(done));
2270  ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2273  isolate()->factory()->empty_fixed_array());
2275  isolate()->factory()->empty_fixed_array());
2277  rcx);
2279  rdx);
2280 
2281  // Only the value field needs a write barrier, as the other values are in the
2282  // root set.
2284  rcx, rdx, kDontSaveFPRegs);
2285 }
2286 
2287 
2288 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2289  SetSourcePosition(prop->position());
2290  Literal* key = prop->key()->AsLiteral();
2291  __ Move(rcx, key->value());
2292  CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2293 }
2294 
2295 
2296 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2297  SetSourcePosition(prop->position());
2298  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2299  CallIC(ic, prop->PropertyFeedbackId());
2300 }
2301 
2302 
2303 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2304  Token::Value op,
2305  OverwriteMode mode,
2306  Expression* left,
2307  Expression* right) {
2308  // Do combined smi check of the operands. Left operand is on the
2309  // stack (popped into rdx). Right operand is in rax but moved into
2310  // rcx to make the shifts easier.
2311  Label done, stub_call, smi_case;
2312  __ Pop(rdx);
2313  __ movp(rcx, rax);
2314  __ orp(rax, rdx);
2315  JumpPatchSite patch_site(masm_);
2316  patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
2317 
2318  __ bind(&stub_call);
2319  __ movp(rax, rcx);
2320  BinaryOpICStub stub(op, mode);
2321  CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2322  patch_site.EmitPatchInfo();
2323  __ jmp(&done, Label::kNear);
2324 
2325  __ bind(&smi_case);
2326  switch (op) {
2327  case Token::SAR:
2328  __ SmiShiftArithmeticRight(rax, rdx, rcx);
2329  break;
2330  case Token::SHL:
2331  __ SmiShiftLeft(rax, rdx, rcx);
2332  break;
2333  case Token::SHR:
2334  __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
2335  break;
2336  case Token::ADD:
2337  __ SmiAdd(rax, rdx, rcx, &stub_call);
2338  break;
2339  case Token::SUB:
2340  __ SmiSub(rax, rdx, rcx, &stub_call);
2341  break;
2342  case Token::MUL:
2343  __ SmiMul(rax, rdx, rcx, &stub_call);
2344  break;
2345  case Token::BIT_OR:
2346  __ SmiOr(rax, rdx, rcx);
2347  break;
2348  case Token::BIT_AND:
2349  __ SmiAnd(rax, rdx, rcx);
2350  break;
2351  case Token::BIT_XOR:
2352  __ SmiXor(rax, rdx, rcx);
2353  break;
2354  default:
2355  UNREACHABLE();
2356  break;
2357  }
2358 
2359  __ bind(&done);
2360  context()->Plug(rax);
2361 }
2362 
2363 
2364 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2365  Token::Value op,
2366  OverwriteMode mode) {
2367  __ Pop(rdx);
2368  BinaryOpICStub stub(op, mode);
2369  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2370  CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2371  patch_site.EmitPatchInfo();
2372  context()->Plug(rax);
2373 }
2374 
2375 
2376 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2377  ASSERT(expr->IsValidLeftHandSide());
2378 
2379  // Left-hand side can only be a property, a global or a (parameter or local)
2380  // slot.
2381  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2382  LhsKind assign_type = VARIABLE;
2383  Property* prop = expr->AsProperty();
2384  if (prop != NULL) {
2385  assign_type = (prop->key()->IsPropertyName())
2386  ? NAMED_PROPERTY
2387  : KEYED_PROPERTY;
2388  }
2389 
2390  switch (assign_type) {
2391  case VARIABLE: {
2392  Variable* var = expr->AsVariableProxy()->var();
2393  EffectContext context(this);
2394  EmitVariableAssignment(var, Token::ASSIGN);
2395  break;
2396  }
2397  case NAMED_PROPERTY: {
2398  __ Push(rax); // Preserve value.
2399  VisitForAccumulatorValue(prop->obj());
2400  __ movp(rdx, rax);
2401  __ Pop(rax); // Restore value.
2402  __ Move(rcx, prop->key()->AsLiteral()->value());
2403  CallStoreIC();
2404  break;
2405  }
2406  case KEYED_PROPERTY: {
2407  __ Push(rax); // Preserve value.
2408  VisitForStackValue(prop->obj());
2409  VisitForAccumulatorValue(prop->key());
2410  __ movp(rcx, rax);
2411  __ Pop(rdx);
2412  __ Pop(rax); // Restore value.
2413  Handle<Code> ic = strict_mode() == SLOPPY
2414  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2415  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2416  CallIC(ic);
2417  break;
2418  }
2419  }
2420  context()->Plug(rax);
2421 }
2422 
2423 
2424 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2425  Variable* var, MemOperand location) {
2426  __ movp(location, rax);
2427  if (var->IsContextSlot()) {
2428  __ movp(rdx, rax);
2429  __ RecordWriteContextSlot(
2430  rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2431  }
2432 }
2433 
2434 
2435 void FullCodeGenerator::EmitCallStoreContextSlot(
2436  Handle<String> name, StrictMode strict_mode) {
2437  __ Push(rax); // Value.
2438  __ Push(rsi); // Context.
2439  __ Push(name);
2440  __ Push(Smi::FromInt(strict_mode));
2441  __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2442 }
2443 
2444 
2445 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2446  Token::Value op) {
2447  if (var->IsUnallocated()) {
2448  // Global var, const, or let.
2449  __ Move(rcx, var->name());
2450  __ movp(rdx, GlobalObjectOperand());
2451  CallStoreIC();
2452 
2453  } else if (op == Token::INIT_CONST_LEGACY) {
2454  // Const initializers need a write barrier.
2455  ASSERT(!var->IsParameter()); // No const parameters.
2456  if (var->IsLookupSlot()) {
2457  __ Push(rax);
2458  __ Push(rsi);
2459  __ Push(var->name());
2460  __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2461  } else {
2462  ASSERT(var->IsStackLocal() || var->IsContextSlot());
2463  Label skip;
2464  MemOperand location = VarOperand(var, rcx);
2465  __ movp(rdx, location);
2466  __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2467  __ j(not_equal, &skip);
2468  EmitStoreToStackLocalOrContextSlot(var, location);
2469  __ bind(&skip);
2470  }
2471 
2472  } else if (var->mode() == LET && op != Token::INIT_LET) {
2473  // Non-initializing assignment to let variable needs a write barrier.
2474  if (var->IsLookupSlot()) {
2475  EmitCallStoreContextSlot(var->name(), strict_mode());
2476  } else {
2477  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2478  Label assign;
2479  MemOperand location = VarOperand(var, rcx);
2480  __ movp(rdx, location);
2481  __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2482  __ j(not_equal, &assign, Label::kNear);
2483  __ Push(var->name());
2484  __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2485  __ bind(&assign);
2486  EmitStoreToStackLocalOrContextSlot(var, location);
2487  }
2488 
2489  } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2490  // Assignment to var or initializing assignment to let/const
2491  // in harmony mode.
2492  if (var->IsLookupSlot()) {
2493  EmitCallStoreContextSlot(var->name(), strict_mode());
2494  } else {
2495  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2496  MemOperand location = VarOperand(var, rcx);
2497  if (generate_debug_code_ && op == Token::INIT_LET) {
2498  // Check for an uninitialized let binding.
2499  __ movp(rdx, location);
2500  __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2501  __ Check(equal, kLetBindingReInitialization);
2502  }
2503  EmitStoreToStackLocalOrContextSlot(var, location);
2504  }
2505  }
2506  // Non-initializing assignments to consts are ignored.
2507 }
2508 
2509 
2510 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2511  // Assignment to a property, using a named store IC.
2512  Property* prop = expr->target()->AsProperty();
2513  ASSERT(prop != NULL);
2514  ASSERT(prop->key()->AsLiteral() != NULL);
2515 
2516  // Record source code position before IC call.
2517  SetSourcePosition(expr->position());
2518  __ Move(rcx, prop->key()->AsLiteral()->value());
2519  __ Pop(rdx);
2520  CallStoreIC(expr->AssignmentFeedbackId());
2521 
2522  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2523  context()->Plug(rax);
2524 }
2525 
2526 
2527 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2528  // Assignment to a property, using a keyed store IC.
2529 
2530  __ Pop(rcx);
2531  __ Pop(rdx);
2532  // Record source code position before IC call.
2533  SetSourcePosition(expr->position());
2534  Handle<Code> ic = strict_mode() == SLOPPY
2535  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2536  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2537  CallIC(ic, expr->AssignmentFeedbackId());
2538 
2539  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2540  context()->Plug(rax);
2541 }
2542 
2543 
2544 void FullCodeGenerator::VisitProperty(Property* expr) {
2545  Comment cmnt(masm_, "[ Property");
2546  Expression* key = expr->key();
2547 
2548  if (key->IsPropertyName()) {
2549  VisitForAccumulatorValue(expr->obj());
2550  EmitNamedPropertyLoad(expr);
2551  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2552  context()->Plug(rax);
2553  } else {
2554  VisitForStackValue(expr->obj());
2555  VisitForAccumulatorValue(expr->key());
2556  __ Pop(rdx);
2557  EmitKeyedPropertyLoad(expr);
2558  context()->Plug(rax);
2559  }
2560 }
2561 
2562 
2563 void FullCodeGenerator::CallIC(Handle<Code> code,
2564  TypeFeedbackId ast_id) {
2565  ic_total_count_++;
2566  __ call(code, RelocInfo::CODE_TARGET, ast_id);
2567 }
2568 
2569 
2570 // Code common for calls using the IC.
2571 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2572  Expression* callee = expr->expression();
2573  ZoneList<Expression*>* args = expr->arguments();
2574  int arg_count = args->length();
2575 
2577  // Get the target function;
2578  if (callee->IsVariableProxy()) {
2579  { StackValueContext context(this);
2580  EmitVariableLoad(callee->AsVariableProxy());
2581  PrepareForBailout(callee, NO_REGISTERS);
2582  }
2583  // Push undefined as receiver. This is patched in the method prologue if it
2584  // is a sloppy mode method.
2585  __ Push(isolate()->factory()->undefined_value());
2586  flags = NO_CALL_FUNCTION_FLAGS;
2587  } else {
2588  // Load the function from the receiver.
2589  ASSERT(callee->IsProperty());
2590  __ movp(rax, Operand(rsp, 0));
2591  EmitNamedPropertyLoad(callee->AsProperty());
2592  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2593  // Push the target function under the receiver.
2594  __ Push(Operand(rsp, 0));
2595  __ movp(Operand(rsp, kPointerSize), rax);
2596  flags = CALL_AS_METHOD;
2597  }
2598 
2599  // Load the arguments.
2600  { PreservePositionScope scope(masm()->positions_recorder());
2601  for (int i = 0; i < arg_count; i++) {
2602  VisitForStackValue(args->at(i));
2603  }
2604  }
2605 
2606  // Record source position for debugger.
2607  SetSourcePosition(expr->position());
2608  CallFunctionStub stub(arg_count, flags);
2609  __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2610  __ CallStub(&stub);
2611 
2612  RecordJSReturnSite(expr);
2613 
2614  // Restore context register.
2616 
2617  context()->DropAndPlug(1, rax);
2618 }
2619 
2620 
2621 // Common code for calls using the IC.
2622 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2623  Expression* key) {
2624  // Load the key.
2625  VisitForAccumulatorValue(key);
2626 
2627  Expression* callee = expr->expression();
2628  ZoneList<Expression*>* args = expr->arguments();
2629  int arg_count = args->length();
2630 
2631  // Load the function from the receiver.
2632  ASSERT(callee->IsProperty());
2633  __ movp(rdx, Operand(rsp, 0));
2634  EmitKeyedPropertyLoad(callee->AsProperty());
2635  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2636 
2637  // Push the target function under the receiver.
2638  __ Push(Operand(rsp, 0));
2639  __ movp(Operand(rsp, kPointerSize), rax);
2640 
2641  // Load the arguments.
2642  { PreservePositionScope scope(masm()->positions_recorder());
2643  for (int i = 0; i < arg_count; i++) {
2644  VisitForStackValue(args->at(i));
2645  }
2646  }
2647 
2648  // Record source position for debugger.
2649  SetSourcePosition(expr->position());
2650  CallFunctionStub stub(arg_count, CALL_AS_METHOD);
2651  __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2652  __ CallStub(&stub);
2653 
2654  RecordJSReturnSite(expr);
2655  // Restore context register.
2657 
2658  context()->DropAndPlug(1, rax);
2659 }
2660 
2661 
2662 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2663  // Code common for calls using the call stub.
2664  ZoneList<Expression*>* args = expr->arguments();
2665  int arg_count = args->length();
2666  { PreservePositionScope scope(masm()->positions_recorder());
2667  for (int i = 0; i < arg_count; i++) {
2668  VisitForStackValue(args->at(i));
2669  }
2670  }
2671  // Record source position for debugger.
2672  SetSourcePosition(expr->position());
2673 
2674  Handle<Object> uninitialized =
2676  StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2677  __ Move(rbx, FeedbackVector());
2678  __ Move(rdx, Smi::FromInt(expr->CallFeedbackSlot()));
2679 
2680  // Record call targets in unoptimized code.
2681  CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2682  __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2683  __ CallStub(&stub);
2684  RecordJSReturnSite(expr);
2685  // Restore context register.
2687  // Discard the function left on TOS.
2688  context()->DropAndPlug(1, rax);
2689 }
2690 
2691 
2692 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2693  // Push copy of the first argument or undefined if it doesn't exist.
2694  if (arg_count > 0) {
2695  __ Push(Operand(rsp, arg_count * kPointerSize));
2696  } else {
2697  __ PushRoot(Heap::kUndefinedValueRootIndex);
2698  }
2699 
2700  // Push the receiver of the enclosing function and do runtime call.
2701  StackArgumentsAccessor args(rbp, info_->scope()->num_parameters());
2702  __ Push(args.GetReceiverOperand());
2703 
2704  // Push the language mode.
2705  __ Push(Smi::FromInt(strict_mode()));
2706 
2707  // Push the start position of the scope the calls resides in.
2708  __ Push(Smi::FromInt(scope()->start_position()));
2709 
2710  // Do the runtime call.
2711  __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2712 }
2713 
2714 
2715 void FullCodeGenerator::VisitCall(Call* expr) {
2716 #ifdef DEBUG
2717  // We want to verify that RecordJSReturnSite gets called on all paths
2718  // through this function. Avoid early returns.
2719  expr->return_is_recorded_ = false;
2720 #endif
2721 
2722  Comment cmnt(masm_, "[ Call");
2723  Expression* callee = expr->expression();
2724  Call::CallType call_type = expr->GetCallType(isolate());
2725 
2726  if (call_type == Call::POSSIBLY_EVAL_CALL) {
2727  // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2728  // to resolve the function we need to call and the receiver of the call.
2729  // Then we call the resolved function using the given arguments.
2730  ZoneList<Expression*>* args = expr->arguments();
2731  int arg_count = args->length();
2732  { PreservePositionScope pos_scope(masm()->positions_recorder());
2733  VisitForStackValue(callee);
2734  __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot.
2735 
2736  // Push the arguments.
2737  for (int i = 0; i < arg_count; i++) {
2738  VisitForStackValue(args->at(i));
2739  }
2740 
2741  // Push a copy of the function (found below the arguments) and resolve
2742  // eval.
2743  __ Push(Operand(rsp, (arg_count + 1) * kPointerSize));
2744  EmitResolvePossiblyDirectEval(arg_count);
2745 
2746  // The runtime call returns a pair of values in rax (function) and
2747  // rdx (receiver). Touch up the stack with the right values.
2748  __ movp(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
2749  __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2750  }
2751  // Record source position for debugger.
2752  SetSourcePosition(expr->position());
2753  CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
2754  __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2755  __ CallStub(&stub);
2756  RecordJSReturnSite(expr);
2757  // Restore context register.
2759  context()->DropAndPlug(1, rax);
2760  } else if (call_type == Call::GLOBAL_CALL) {
2761  EmitCallWithIC(expr);
2762 
2763  } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2764  // Call to a lookup slot (dynamically introduced variable).
2765  VariableProxy* proxy = callee->AsVariableProxy();
2766  Label slow, done;
2767 
2768  { PreservePositionScope scope(masm()->positions_recorder());
2769  // Generate code for loading from variables potentially shadowed by
2770  // eval-introduced variables.
2771  EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2772  }
2773  __ bind(&slow);
2774  // Call the runtime to find the function to call (returned in rax) and
2775  // the object holding it (returned in rdx).
2776  __ Push(context_register());
2777  __ Push(proxy->name());
2778  __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2779  __ Push(rax); // Function.
2780  __ Push(rdx); // Receiver.
2781 
2782  // If fast case code has been generated, emit code to push the function
2783  // and receiver and have the slow path jump around this code.
2784  if (done.is_linked()) {
2785  Label call;
2786  __ jmp(&call, Label::kNear);
2787  __ bind(&done);
2788  // Push function.
2789  __ Push(rax);
2790  // The receiver is implicitly the global receiver. Indicate this by
2791  // passing the hole to the call function stub.
2792  __ PushRoot(Heap::kUndefinedValueRootIndex);
2793  __ bind(&call);
2794  }
2795 
2796  // The receiver is either the global receiver or an object found by
2797  // LoadContextSlot.
2798  EmitCallWithStub(expr);
2799  } else if (call_type == Call::PROPERTY_CALL) {
2800  Property* property = callee->AsProperty();
2801  { PreservePositionScope scope(masm()->positions_recorder());
2802  VisitForStackValue(property->obj());
2803  }
2804  if (property->key()->IsPropertyName()) {
2805  EmitCallWithIC(expr);
2806  } else {
2807  EmitKeyedCallWithIC(expr, property->key());
2808  }
2809  } else {
2810  ASSERT(call_type == Call::OTHER_CALL);
2811  // Call to an arbitrary expression not handled specially above.
2812  { PreservePositionScope scope(masm()->positions_recorder());
2813  VisitForStackValue(callee);
2814  }
2815  __ PushRoot(Heap::kUndefinedValueRootIndex);
2816  // Emit function call.
2817  EmitCallWithStub(expr);
2818  }
2819 
2820 #ifdef DEBUG
2821  // RecordJSReturnSite should have been called.
2822  ASSERT(expr->return_is_recorded_);
2823 #endif
2824 }
2825 
2826 
2827 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2828  Comment cmnt(masm_, "[ CallNew");
2829  // According to ECMA-262, section 11.2.2, page 44, the function
2830  // expression in new calls must be evaluated before the
2831  // arguments.
2832 
2833  // Push constructor on the stack. If it's not a function it's used as
2834  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2835  // ignored.
2836  VisitForStackValue(expr->expression());
2837 
2838  // Push the arguments ("left-to-right") on the stack.
2839  ZoneList<Expression*>* args = expr->arguments();
2840  int arg_count = args->length();
2841  for (int i = 0; i < arg_count; i++) {
2842  VisitForStackValue(args->at(i));
2843  }
2844 
2845  // Call the construct call builtin that handles allocation and
2846  // constructor invocation.
2847  SetSourcePosition(expr->position());
2848 
2849  // Load function and argument count into rdi and rax.
2850  __ Set(rax, arg_count);
2851  __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
2852 
2853  // Record call targets in unoptimized code, but not in the snapshot.
2854  Handle<Object> uninitialized =
2856  StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2857  if (FLAG_pretenuring_call_new) {
2858  StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
2859  isolate()->factory()->NewAllocationSite());
2860  ASSERT(expr->AllocationSiteFeedbackSlot() ==
2861  expr->CallNewFeedbackSlot() + 1);
2862  }
2863 
2864  __ Move(rbx, FeedbackVector());
2865  __ Move(rdx, Smi::FromInt(expr->CallNewFeedbackSlot()));
2866 
2867  CallConstructStub stub(RECORD_CALL_TARGET);
2868  __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2869  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2870  context()->Plug(rax);
2871 }
2872 
2873 
2874 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2875  ZoneList<Expression*>* args = expr->arguments();
2876  ASSERT(args->length() == 1);
2877 
2878  VisitForAccumulatorValue(args->at(0));
2879 
2880  Label materialize_true, materialize_false;
2881  Label* if_true = NULL;
2882  Label* if_false = NULL;
2883  Label* fall_through = NULL;
2884  context()->PrepareTest(&materialize_true, &materialize_false,
2885  &if_true, &if_false, &fall_through);
2886 
2887  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2888  __ JumpIfSmi(rax, if_true);
2889  __ jmp(if_false);
2890 
2891  context()->Plug(if_true, if_false);
2892 }
2893 
2894 
2895 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2896  ZoneList<Expression*>* args = expr->arguments();
2897  ASSERT(args->length() == 1);
2898 
2899  VisitForAccumulatorValue(args->at(0));
2900 
2901  Label materialize_true, materialize_false;
2902  Label* if_true = NULL;
2903  Label* if_false = NULL;
2904  Label* fall_through = NULL;
2905  context()->PrepareTest(&materialize_true, &materialize_false,
2906  &if_true, &if_false, &fall_through);
2907 
2908  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2909  Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
2910  Split(non_negative_smi, if_true, if_false, fall_through);
2911 
2912  context()->Plug(if_true, if_false);
2913 }
2914 
2915 
2916 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2917  ZoneList<Expression*>* args = expr->arguments();
2918  ASSERT(args->length() == 1);
2919 
2920  VisitForAccumulatorValue(args->at(0));
2921 
2922  Label materialize_true, materialize_false;
2923  Label* if_true = NULL;
2924  Label* if_false = NULL;
2925  Label* fall_through = NULL;
2926  context()->PrepareTest(&materialize_true, &materialize_false,
2927  &if_true, &if_false, &fall_through);
2928 
2929  __ JumpIfSmi(rax, if_false);
2930  __ CompareRoot(rax, Heap::kNullValueRootIndex);
2931  __ j(equal, if_true);
2933  // Undetectable objects behave like undefined when tested with typeof.
2935  Immediate(1 << Map::kIsUndetectable));
2936  __ j(not_zero, if_false);
2938  __ cmpp(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2939  __ j(below, if_false);
2940  __ cmpp(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2941  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2942  Split(below_equal, if_true, if_false, fall_through);
2943 
2944  context()->Plug(if_true, if_false);
2945 }
2946 
2947 
2948 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2949  ZoneList<Expression*>* args = expr->arguments();
2950  ASSERT(args->length() == 1);
2951 
2952  VisitForAccumulatorValue(args->at(0));
2953 
2954  Label materialize_true, materialize_false;
2955  Label* if_true = NULL;
2956  Label* if_false = NULL;
2957  Label* fall_through = NULL;
2958  context()->PrepareTest(&materialize_true, &materialize_false,
2959  &if_true, &if_false, &fall_through);
2960 
2961  __ JumpIfSmi(rax, if_false);
2962  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
2963  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2964  Split(above_equal, if_true, if_false, fall_through);
2965 
2966  context()->Plug(if_true, if_false);
2967 }
2968 
2969 
2970 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2971  ZoneList<Expression*>* args = expr->arguments();
2972  ASSERT(args->length() == 1);
2973 
2974  VisitForAccumulatorValue(args->at(0));
2975 
2976  Label materialize_true, materialize_false;
2977  Label* if_true = NULL;
2978  Label* if_false = NULL;
2979  Label* fall_through = NULL;
2980  context()->PrepareTest(&materialize_true, &materialize_false,
2981  &if_true, &if_false, &fall_through);
2982 
2983  __ JumpIfSmi(rax, if_false);
2986  Immediate(1 << Map::kIsUndetectable));
2987  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2988  Split(not_zero, if_true, if_false, fall_through);
2989 
2990  context()->Plug(if_true, if_false);
2991 }
2992 
2993 
2994 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2995  CallRuntime* expr) {
2996  ZoneList<Expression*>* args = expr->arguments();
2997  ASSERT(args->length() == 1);
2998 
2999  VisitForAccumulatorValue(args->at(0));
3000 
3001  Label materialize_true, materialize_false, skip_lookup;
3002  Label* if_true = NULL;
3003  Label* if_false = NULL;
3004  Label* fall_through = NULL;
3005  context()->PrepareTest(&materialize_true, &materialize_false,
3006  &if_true, &if_false, &fall_through);
3007 
3008  __ AssertNotSmi(rax);
3009 
3010  // Check whether this map has already been checked to be safe for default
3011  // valueOf.
3015  __ j(not_zero, &skip_lookup);
3016 
3017  // Check for fast case object. Generate false result for slow case object.
3019  __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3020  __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
3021  __ j(equal, if_false);
3022 
3023  // Look for valueOf string in the descriptor array, and indicate false if
3024  // found. Since we omit an enumeration index check, if it is added via a
3025  // transition that shares its descriptor array, this is a false positive.
3026  Label entry, loop, done;
3027 
3028  // Skip loop if no descriptors are valid.
3029  __ NumberOfOwnDescriptors(rcx, rbx);
3030  __ cmpp(rcx, Immediate(0));
3031  __ j(equal, &done);
3032 
3033  __ LoadInstanceDescriptors(rbx, r8);
3034  // rbx: descriptor array.
3035  // rcx: valid entries in the descriptor array.
3036  // Calculate the end of the descriptor array.
3037  __ imulp(rcx, rcx, Immediate(DescriptorArray::kDescriptorSize));
3038  SmiIndex index = masm_->SmiToIndex(rdx, rcx, kPointerSizeLog2);
3039  __ leap(rcx,
3040  Operand(
3041  r8, index.reg, index.scale, DescriptorArray::kFirstOffset));
3042  // Calculate location of the first key name.
3043  __ addp(r8, Immediate(DescriptorArray::kFirstOffset));
3044  // Loop through all the keys in the descriptor array. If one of these is the
3045  // internalized string "valueOf" the result is false.
3046  __ jmp(&entry);
3047  __ bind(&loop);
3048  __ movp(rdx, FieldOperand(r8, 0));
3049  __ Cmp(rdx, isolate()->factory()->value_of_string());
3050  __ j(equal, if_false);
3051  __ addp(r8, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3052  __ bind(&entry);
3053  __ cmpp(r8, rcx);
3054  __ j(not_equal, &loop);
3055 
3056  __ bind(&done);
3057 
3058  // Set the bit in the map to indicate that there is no local valueOf field.
3061 
3062  __ bind(&skip_lookup);
3063 
3064  // If a valueOf property is not found on the object check that its
3065  // prototype is the un-modified String prototype. If not result is false.
3067  __ testp(rcx, Immediate(kSmiTagMask));
3068  __ j(zero, if_false);
3069  __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3072  __ cmpp(rcx,
3074  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3075  Split(equal, if_true, if_false, fall_through);
3076 
3077  context()->Plug(if_true, if_false);
3078 }
3079 
3080 
3081 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3082  ZoneList<Expression*>* args = expr->arguments();
3083  ASSERT(args->length() == 1);
3084 
3085  VisitForAccumulatorValue(args->at(0));
3086 
3087  Label materialize_true, materialize_false;
3088  Label* if_true = NULL;
3089  Label* if_false = NULL;
3090  Label* fall_through = NULL;
3091  context()->PrepareTest(&materialize_true, &materialize_false,
3092  &if_true, &if_false, &fall_through);
3093 
3094  __ JumpIfSmi(rax, if_false);
3095  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3096  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3097  Split(equal, if_true, if_false, fall_through);
3098 
3099  context()->Plug(if_true, if_false);
3100 }
3101 
3102 
3103 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3104  ZoneList<Expression*>* args = expr->arguments();
3105  ASSERT(args->length() == 1);
3106 
3107  VisitForAccumulatorValue(args->at(0));
3108 
3109  Label materialize_true, materialize_false;
3110  Label* if_true = NULL;
3111  Label* if_false = NULL;
3112  Label* fall_through = NULL;
3113  context()->PrepareTest(&materialize_true, &materialize_false,
3114  &if_true, &if_false, &fall_through);
3115 
3116  Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3117  __ CheckMap(rax, map, if_false, DO_SMI_CHECK);
3119  Immediate(0x1));
3120  __ j(no_overflow, if_false);
3122  Immediate(0x00000000));
3123  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3124  Split(equal, if_true, if_false, fall_through);
3125 
3126  context()->Plug(if_true, if_false);
3127 }
3128 
3129 
3130 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3131  ZoneList<Expression*>* args = expr->arguments();
3132  ASSERT(args->length() == 1);
3133 
3134  VisitForAccumulatorValue(args->at(0));
3135 
3136  Label materialize_true, materialize_false;
3137  Label* if_true = NULL;
3138  Label* if_false = NULL;
3139  Label* fall_through = NULL;
3140  context()->PrepareTest(&materialize_true, &materialize_false,
3141  &if_true, &if_false, &fall_through);
3142 
3143  __ JumpIfSmi(rax, if_false);
3144  __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
3145  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3146  Split(equal, if_true, if_false, fall_through);
3147 
3148  context()->Plug(if_true, if_false);
3149 }
3150 
3151 
3152 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3153  ZoneList<Expression*>* args = expr->arguments();
3154  ASSERT(args->length() == 1);
3155 
3156  VisitForAccumulatorValue(args->at(0));
3157 
3158  Label materialize_true, materialize_false;
3159  Label* if_true = NULL;
3160  Label* if_false = NULL;
3161  Label* fall_through = NULL;
3162  context()->PrepareTest(&materialize_true, &materialize_false,
3163  &if_true, &if_false, &fall_through);
3164 
3165  __ JumpIfSmi(rax, if_false);
3166  __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
3167  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3168  Split(equal, if_true, if_false, fall_through);
3169 
3170  context()->Plug(if_true, if_false);
3171 }
3172 
3173 
3174 
3175 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3176  ASSERT(expr->arguments()->length() == 0);
3177 
3178  Label materialize_true, materialize_false;
3179  Label* if_true = NULL;
3180  Label* if_false = NULL;
3181  Label* fall_through = NULL;
3182  context()->PrepareTest(&materialize_true, &materialize_false,
3183  &if_true, &if_false, &fall_through);
3184 
3185  // Get the frame pointer for the calling frame.
3187 
3188  // Skip the arguments adaptor frame if it exists.
3189  Label check_frame_marker;
3192  __ j(not_equal, &check_frame_marker);
3194 
3195  // Check the marker in the calling frame.
3196  __ bind(&check_frame_marker);
3198  Smi::FromInt(StackFrame::CONSTRUCT));
3199  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3200  Split(equal, if_true, if_false, fall_through);
3201 
3202  context()->Plug(if_true, if_false);
3203 }
3204 
3205 
3206 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3207  ZoneList<Expression*>* args = expr->arguments();
3208  ASSERT(args->length() == 2);
3209 
3210  // Load the two objects into registers and perform the comparison.
3211  VisitForStackValue(args->at(0));
3212  VisitForAccumulatorValue(args->at(1));
3213 
3214  Label materialize_true, materialize_false;
3215  Label* if_true = NULL;
3216  Label* if_false = NULL;
3217  Label* fall_through = NULL;
3218  context()->PrepareTest(&materialize_true, &materialize_false,
3219  &if_true, &if_false, &fall_through);
3220 
3221  __ Pop(rbx);
3222  __ cmpp(rax, rbx);
3223  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3224  Split(equal, if_true, if_false, fall_through);
3225 
3226  context()->Plug(if_true, if_false);
3227 }
3228 
3229 
3230 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3231  ZoneList<Expression*>* args = expr->arguments();
3232  ASSERT(args->length() == 1);
3233 
3234  // ArgumentsAccessStub expects the key in rdx and the formal
3235  // parameter count in rax.
3236  VisitForAccumulatorValue(args->at(0));
3237  __ movp(rdx, rax);
3238  __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3239  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3240  __ CallStub(&stub);
3241  context()->Plug(rax);
3242 }
3243 
3244 
3245 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3246  ASSERT(expr->arguments()->length() == 0);
3247 
3248  Label exit;
3249  // Get the number of formal parameters.
3250  __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3251 
3252  // Check if the calling frame is an arguments adaptor frame.
3256  __ j(not_equal, &exit, Label::kNear);
3257 
3258  // Arguments adaptor case: Read the arguments length from the
3259  // adaptor frame.
3261 
3262  __ bind(&exit);
3263  __ AssertSmi(rax);
3264  context()->Plug(rax);
3265 }
3266 
3267 
3268 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3269  ZoneList<Expression*>* args = expr->arguments();
3270  ASSERT(args->length() == 1);
3271  Label done, null, function, non_function_constructor;
3272 
3273  VisitForAccumulatorValue(args->at(0));
3274 
3275  // If the object is a smi, we return null.
3276  __ JumpIfSmi(rax, &null);
3277 
3278  // Check that the object is a JS object but take special care of JS
3279  // functions to make sure they have 'Function' as their class.
3280  // Assume that there are only two callable types, and one of them is at
3281  // either end of the type range for JS object types. Saves extra comparisons.
3283  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
3284  // Map is now in rax.
3285  __ j(below, &null);
3288  __ j(equal, &function);
3289 
3290  __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
3292  LAST_SPEC_OBJECT_TYPE - 1);
3293  __ j(equal, &function);
3294  // Assume that there is no larger type.
3296 
3297  // Check if the constructor in the map is a JS function.
3299  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3300  __ j(not_equal, &non_function_constructor);
3301 
3302  // rax now contains the constructor function. Grab the
3303  // instance class name from there.
3306  __ jmp(&done);
3307 
3308  // Functions have class 'Function'.
3309  __ bind(&function);
3310  __ Move(rax, isolate()->factory()->function_class_string());
3311  __ jmp(&done);
3312 
3313  // Objects with a non-function constructor have class 'Object'.
3314  __ bind(&non_function_constructor);
3315  __ Move(rax, isolate()->factory()->Object_string());
3316  __ jmp(&done);
3317 
3318  // Non-JS objects have class null.
3319  __ bind(&null);
3320  __ LoadRoot(rax, Heap::kNullValueRootIndex);
3321 
3322  // All done.
3323  __ bind(&done);
3324 
3325  context()->Plug(rax);
3326 }
3327 
3328 
3329 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3330  // Conditionally generate a log call.
3331  // Args:
3332  // 0 (literal string): The type of logging (corresponds to the flags).
3333  // This is used to determine whether or not to generate the log call.
3334  // 1 (string): Format string. Access the string at argument index 2
3335  // with '%2s' (see Logger::LogRuntime for all the formats).
3336  // 2 (array): Arguments to the format string.
3337  ZoneList<Expression*>* args = expr->arguments();
3338  ASSERT_EQ(args->length(), 3);
3339  if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3340  VisitForStackValue(args->at(1));
3341  VisitForStackValue(args->at(2));
3342  __ CallRuntime(Runtime::kHiddenLog, 2);
3343  }
3344  // Finally, we're expected to leave a value on the top of the stack.
3345  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3346  context()->Plug(rax);
3347 }
3348 
3349 
3350 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3351  // Load the arguments on the stack and call the stub.
3352  SubStringStub stub;
3353  ZoneList<Expression*>* args = expr->arguments();
3354  ASSERT(args->length() == 3);
3355  VisitForStackValue(args->at(0));
3356  VisitForStackValue(args->at(1));
3357  VisitForStackValue(args->at(2));
3358  __ CallStub(&stub);
3359  context()->Plug(rax);
3360 }
3361 
3362 
3363 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3364  // Load the arguments on the stack and call the stub.
3365  RegExpExecStub stub;
3366  ZoneList<Expression*>* args = expr->arguments();
3367  ASSERT(args->length() == 4);
3368  VisitForStackValue(args->at(0));
3369  VisitForStackValue(args->at(1));
3370  VisitForStackValue(args->at(2));
3371  VisitForStackValue(args->at(3));
3372  __ CallStub(&stub);
3373  context()->Plug(rax);
3374 }
3375 
3376 
3377 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3378  ZoneList<Expression*>* args = expr->arguments();
3379  ASSERT(args->length() == 1);
3380 
3381  VisitForAccumulatorValue(args->at(0)); // Load the object.
3382 
3383  Label done;
3384  // If the object is a smi return the object.
3385  __ JumpIfSmi(rax, &done);
3386  // If the object is not a value type, return the object.
3387  __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
3388  __ j(not_equal, &done);
3390 
3391  __ bind(&done);
3392  context()->Plug(rax);
3393 }
3394 
3395 
3396 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3397  ZoneList<Expression*>* args = expr->arguments();
3398  ASSERT(args->length() == 2);
3399  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3400  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3401 
3402  VisitForAccumulatorValue(args->at(0)); // Load the object.
3403 
3404  Label runtime, done, not_date_object;
3405  Register object = rax;
3406  Register result = rax;
3407  Register scratch = rcx;
3408 
3409  __ JumpIfSmi(object, &not_date_object);
3410  __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3411  __ j(not_equal, &not_date_object);
3412 
3413  if (index->value() == 0) {
3414  __ movp(result, FieldOperand(object, JSDate::kValueOffset));
3415  __ jmp(&done);
3416  } else {
3417  if (index->value() < JSDate::kFirstUncachedField) {
3418  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3419  Operand stamp_operand = __ ExternalOperand(stamp);
3420  __ movp(scratch, stamp_operand);
3421  __ cmpp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3422  __ j(not_equal, &runtime, Label::kNear);
3423  __ movp(result, FieldOperand(object, JSDate::kValueOffset +
3424  kPointerSize * index->value()));
3425  __ jmp(&done);
3426  }
3427  __ bind(&runtime);
3428  __ PrepareCallCFunction(2);
3429  __ movp(arg_reg_1, object);
3430  __ Move(arg_reg_2, index, Assembler::RelocInfoNone());
3431  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3433  __ jmp(&done);
3434  }
3435 
3436  __ bind(&not_date_object);
3437  __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3438  __ bind(&done);
3439  context()->Plug(rax);
3440 }
3441 
3442 
3443 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3444  ZoneList<Expression*>* args = expr->arguments();
3445  ASSERT_EQ(3, args->length());
3446 
3447  Register string = rax;
3448  Register index = rbx;
3449  Register value = rcx;
3450 
3451  VisitForStackValue(args->at(1)); // index
3452  VisitForStackValue(args->at(2)); // value
3453  VisitForAccumulatorValue(args->at(0)); // string
3454  __ Pop(value);
3455  __ Pop(index);
3456 
3457  if (FLAG_debug_code) {
3458  __ Check(__ CheckSmi(value), kNonSmiValue);
3459  __ Check(__ CheckSmi(index), kNonSmiValue);
3460  }
3461 
3462  __ SmiToInteger32(value, value);
3463  __ SmiToInteger32(index, index);
3464 
3465  if (FLAG_debug_code) {
3466  static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3467  __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3468  }
3469 
3470  __ movb(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3471  value);
3472  context()->Plug(string);
3473 }
3474 
3475 
3476 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3477  ZoneList<Expression*>* args = expr->arguments();
3478  ASSERT_EQ(3, args->length());
3479 
3480  Register string = rax;
3481  Register index = rbx;
3482  Register value = rcx;
3483 
3484  VisitForStackValue(args->at(1)); // index
3485  VisitForStackValue(args->at(2)); // value
3486  VisitForAccumulatorValue(args->at(0)); // string
3487  __ Pop(value);
3488  __ Pop(index);
3489 
3490  if (FLAG_debug_code) {
3491  __ Check(__ CheckSmi(value), kNonSmiValue);
3492  __ Check(__ CheckSmi(index), kNonSmiValue);
3493  }
3494 
3495  __ SmiToInteger32(value, value);
3496  __ SmiToInteger32(index, index);
3497 
3498  if (FLAG_debug_code) {
3499  static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3500  __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3501  }
3502 
3503  __ movw(FieldOperand(string, index, times_2, SeqTwoByteString::kHeaderSize),
3504  value);
3505  context()->Plug(rax);
3506 }
3507 
3508 
3509 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3510  // Load the arguments on the stack and call the runtime function.
3511  ZoneList<Expression*>* args = expr->arguments();
3512  ASSERT(args->length() == 2);
3513  VisitForStackValue(args->at(0));
3514  VisitForStackValue(args->at(1));
3515  MathPowStub stub(MathPowStub::ON_STACK);
3516  __ CallStub(&stub);
3517  context()->Plug(rax);
3518 }
3519 
3520 
3521 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3522  ZoneList<Expression*>* args = expr->arguments();
3523  ASSERT(args->length() == 2);
3524 
3525  VisitForStackValue(args->at(0)); // Load the object.
3526  VisitForAccumulatorValue(args->at(1)); // Load the value.
3527  __ Pop(rbx); // rax = value. rbx = object.
3528 
3529  Label done;
3530  // If the object is a smi, return the value.
3531  __ JumpIfSmi(rbx, &done);
3532 
3533  // If the object is not a value type, return the value.
3534  __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
3535  __ j(not_equal, &done);
3536 
3537  // Store the value.
3539  // Update the write barrier. Save the value as it will be
3540  // overwritten by the write barrier code and is needed afterward.
3541  __ movp(rdx, rax);
3542  __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs);
3543 
3544  __ bind(&done);
3545  context()->Plug(rax);
3546 }
3547 
3548 
3549 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3550  ZoneList<Expression*>* args = expr->arguments();
3551  ASSERT_EQ(args->length(), 1);
3552 
3553  // Load the argument into rax and call the stub.
3554  VisitForAccumulatorValue(args->at(0));
3555 
3556  NumberToStringStub stub;
3557  __ CallStub(&stub);
3558  context()->Plug(rax);
3559 }
3560 
3561 
3562 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3563  ZoneList<Expression*>* args = expr->arguments();
3564  ASSERT(args->length() == 1);
3565 
3566  VisitForAccumulatorValue(args->at(0));
3567 
3568  Label done;
3569  StringCharFromCodeGenerator generator(rax, rbx);
3570  generator.GenerateFast(masm_);
3571  __ jmp(&done);
3572 
3573  NopRuntimeCallHelper call_helper;
3574  generator.GenerateSlow(masm_, call_helper);
3575 
3576  __ bind(&done);
3577  context()->Plug(rbx);
3578 }
3579 
3580 
3581 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3582  ZoneList<Expression*>* args = expr->arguments();
3583  ASSERT(args->length() == 2);
3584 
3585  VisitForStackValue(args->at(0));
3586  VisitForAccumulatorValue(args->at(1));
3587 
3588  Register object = rbx;
3589  Register index = rax;
3590  Register result = rdx;
3591 
3592  __ Pop(object);
3593 
3594  Label need_conversion;
3595  Label index_out_of_range;
3596  Label done;
3597  StringCharCodeAtGenerator generator(object,
3598  index,
3599  result,
3600  &need_conversion,
3601  &need_conversion,
3602  &index_out_of_range,
3604  generator.GenerateFast(masm_);
3605  __ jmp(&done);
3606 
3607  __ bind(&index_out_of_range);
3608  // When the index is out of range, the spec requires us to return
3609  // NaN.
3610  __ LoadRoot(result, Heap::kNanValueRootIndex);
3611  __ jmp(&done);
3612 
3613  __ bind(&need_conversion);
3614  // Move the undefined value into the result register, which will
3615  // trigger conversion.
3616  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3617  __ jmp(&done);
3618 
3619  NopRuntimeCallHelper call_helper;
3620  generator.GenerateSlow(masm_, call_helper);
3621 
3622  __ bind(&done);
3623  context()->Plug(result);
3624 }
3625 
3626 
3627 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3628  ZoneList<Expression*>* args = expr->arguments();
3629  ASSERT(args->length() == 2);
3630 
3631  VisitForStackValue(args->at(0));
3632  VisitForAccumulatorValue(args->at(1));
3633 
3634  Register object = rbx;
3635  Register index = rax;
3636  Register scratch = rdx;
3637  Register result = rax;
3638 
3639  __ Pop(object);
3640 
3641  Label need_conversion;
3642  Label index_out_of_range;
3643  Label done;
3644  StringCharAtGenerator generator(object,
3645  index,
3646  scratch,
3647  result,
3648  &need_conversion,
3649  &need_conversion,
3650  &index_out_of_range,
3652  generator.GenerateFast(masm_);
3653  __ jmp(&done);
3654 
3655  __ bind(&index_out_of_range);
3656  // When the index is out of range, the spec requires us to return
3657  // the empty string.
3658  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3659  __ jmp(&done);
3660 
3661  __ bind(&need_conversion);
3662  // Move smi zero into the result register, which will trigger
3663  // conversion.
3664  __ Move(result, Smi::FromInt(0));
3665  __ jmp(&done);
3666 
3667  NopRuntimeCallHelper call_helper;
3668  generator.GenerateSlow(masm_, call_helper);
3669 
3670  __ bind(&done);
3671  context()->Plug(result);
3672 }
3673 
3674 
3675 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3676  ZoneList<Expression*>* args = expr->arguments();
3677  ASSERT_EQ(2, args->length());
3678  VisitForStackValue(args->at(0));
3679  VisitForAccumulatorValue(args->at(1));
3680 
3681  __ Pop(rdx);
3682  StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
3683  __ CallStub(&stub);
3684  context()->Plug(rax);
3685 }
3686 
3687 
3688 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3689  ZoneList<Expression*>* args = expr->arguments();
3690  ASSERT_EQ(2, args->length());
3691 
3692  VisitForStackValue(args->at(0));
3693  VisitForStackValue(args->at(1));
3694 
3695  StringCompareStub stub;
3696  __ CallStub(&stub);
3697  context()->Plug(rax);
3698 }
3699 
3700 
3701 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3702  // Load the argument on the stack and call the runtime function.
3703  ZoneList<Expression*>* args = expr->arguments();
3704  ASSERT(args->length() == 1);
3705  VisitForStackValue(args->at(0));
3706  __ CallRuntime(Runtime::kMath_log, 1);
3707  context()->Plug(rax);
3708 }
3709 
3710 
3711 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3712  // Load the argument on the stack and call the runtime function.
3713  ZoneList<Expression*>* args = expr->arguments();
3714  ASSERT(args->length() == 1);
3715  VisitForStackValue(args->at(0));
3716  __ CallRuntime(Runtime::kMath_sqrt, 1);
3717  context()->Plug(rax);
3718 }
3719 
3720 
3721 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3722  ZoneList<Expression*>* args = expr->arguments();
3723  ASSERT(args->length() >= 2);
3724 
3725  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3726  for (int i = 0; i < arg_count + 1; i++) {
3727  VisitForStackValue(args->at(i));
3728  }
3729  VisitForAccumulatorValue(args->last()); // Function.
3730 
3731  Label runtime, done;
3732  // Check for non-function argument (including proxy).
3733  __ JumpIfSmi(rax, &runtime);
3734  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3735  __ j(not_equal, &runtime);
3736 
3737  // InvokeFunction requires the function in rdi. Move it in there.
3738  __ movp(rdi, result_register());
3739  ParameterCount count(arg_count);
3740  __ InvokeFunction(rdi, count, CALL_FUNCTION, NullCallWrapper());
3742  __ jmp(&done);
3743 
3744  __ bind(&runtime);
3745  __ Push(rax);
3746  __ CallRuntime(Runtime::kCall, args->length());
3747  __ bind(&done);
3748 
3749  context()->Plug(rax);
3750 }
3751 
3752 
3753 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3754  RegExpConstructResultStub stub;
3755  ZoneList<Expression*>* args = expr->arguments();
3756  ASSERT(args->length() == 3);
3757  VisitForStackValue(args->at(0));
3758  VisitForStackValue(args->at(1));
3759  VisitForAccumulatorValue(args->at(2));
3760  __ Pop(rbx);
3761  __ Pop(rcx);
3762  __ CallStub(&stub);
3763  context()->Plug(rax);
3764 }
3765 
3766 
3767 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3768  ZoneList<Expression*>* args = expr->arguments();
3769  ASSERT_EQ(2, args->length());
3770 
3771  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3772  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3773 
3774  Handle<FixedArray> jsfunction_result_caches(
3775  isolate()->native_context()->jsfunction_result_caches());
3776  if (jsfunction_result_caches->length() <= cache_id) {
3777  __ Abort(kAttemptToUseUndefinedCache);
3778  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3779  context()->Plug(rax);
3780  return;
3781  }
3782 
3783  VisitForAccumulatorValue(args->at(1));
3784 
3785  Register key = rax;
3786  Register cache = rbx;
3787  Register tmp = rcx;
3789  __ movp(cache,
3791  __ movp(cache,
3793  __ movp(cache,
3794  FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3795 
3796  Label done, not_found;
3797  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3799  // tmp now holds finger offset as a smi.
3800  SmiIndex index =
3801  __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
3802  __ cmpp(key, FieldOperand(cache,
3803  index.reg,
3804  index.scale,
3806  __ j(not_equal, &not_found, Label::kNear);
3807  __ movp(rax, FieldOperand(cache,
3808  index.reg,
3809  index.scale,
3810  FixedArray::kHeaderSize + kPointerSize));
3811  __ jmp(&done, Label::kNear);
3812 
3813  __ bind(&not_found);
3814  // Call runtime to perform the lookup.
3815  __ Push(cache);
3816  __ Push(key);
3817  __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3818 
3819  __ bind(&done);
3820  context()->Plug(rax);
3821 }
3822 
3823 
3824 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3825  ZoneList<Expression*>* args = expr->arguments();
3826  ASSERT(args->length() == 1);
3827 
3828  VisitForAccumulatorValue(args->at(0));
3829 
3830  Label materialize_true, materialize_false;
3831  Label* if_true = NULL;
3832  Label* if_false = NULL;
3833  Label* fall_through = NULL;
3834  context()->PrepareTest(&materialize_true, &materialize_false,
3835  &if_true, &if_false, &fall_through);
3836 
3839  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3840  __ j(zero, if_true);
3841  __ jmp(if_false);
3842 
3843  context()->Plug(if_true, if_false);
3844 }
3845 
3846 
3847 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3848  ZoneList<Expression*>* args = expr->arguments();
3849  ASSERT(args->length() == 1);
3850  VisitForAccumulatorValue(args->at(0));
3851 
3852  __ AssertString(rax);
3853 
3856  __ IndexFromHash(rax, rax);
3857 
3858  context()->Plug(rax);
3859 }
3860 
3861 
3862 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3863  Label bailout, return_result, done, one_char_separator, long_separator,
3864  non_trivial_array, not_size_one_array, loop,
3865  loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3866  ZoneList<Expression*>* args = expr->arguments();
3867  ASSERT(args->length() == 2);
3868  // We will leave the separator on the stack until the end of the function.
3869  VisitForStackValue(args->at(1));
3870  // Load this to rax (= array)
3871  VisitForAccumulatorValue(args->at(0));
3872  // All aliases of the same register have disjoint lifetimes.
3873  Register array = rax;
3874  Register elements = no_reg; // Will be rax.
3875 
3876  Register index = rdx;
3877 
3878  Register string_length = rcx;
3879 
3880  Register string = rsi;
3881 
3882  Register scratch = rbx;
3883 
3884  Register array_length = rdi;
3885  Register result_pos = no_reg; // Will be rdi.
3886 
3887  Operand separator_operand = Operand(rsp, 2 * kPointerSize);
3888  Operand result_operand = Operand(rsp, 1 * kPointerSize);
3889  Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
3890  // Separator operand is already pushed. Make room for the two
3891  // other stack fields, and clear the direction flag in anticipation
3892  // of calling CopyBytes.
3893  __ subp(rsp, Immediate(2 * kPointerSize));
3894  __ cld();
3895  // Check that the array is a JSArray
3896  __ JumpIfSmi(array, &bailout);
3897  __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3898  __ j(not_equal, &bailout);
3899 
3900  // Check that the array has fast elements.
3901  __ CheckFastElements(scratch, &bailout);
3902 
3903  // Array has fast elements, so its length must be a smi.
3904  // If the array has length zero, return the empty string.
3905  __ movp(array_length, FieldOperand(array, JSArray::kLengthOffset));
3906  __ SmiCompare(array_length, Smi::FromInt(0));
3907  __ j(not_zero, &non_trivial_array);
3908  __ LoadRoot(rax, Heap::kempty_stringRootIndex);
3909  __ jmp(&return_result);
3910 
3911  // Save the array length on the stack.
3912  __ bind(&non_trivial_array);
3913  __ SmiToInteger32(array_length, array_length);
3914  __ movl(array_length_operand, array_length);
3915 
3916  // Save the FixedArray containing array's elements.
3917  // End of array's live range.
3918  elements = array;
3919  __ movp(elements, FieldOperand(array, JSArray::kElementsOffset));
3920  array = no_reg;
3921 
3922 
3923  // Check that all array elements are sequential ASCII strings, and
3924  // accumulate the sum of their lengths, as a smi-encoded value.
3925  __ Set(index, 0);
3926  __ Set(string_length, 0);
3927  // Loop condition: while (index < array_length).
3928  // Live loop registers: index(int32), array_length(int32), string(String*),
3929  // scratch, string_length(int32), elements(FixedArray*).
3930  if (generate_debug_code_) {
3931  __ cmpp(index, array_length);
3932  __ Assert(below, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3933  }
3934  __ bind(&loop);
3935  __ movp(string, FieldOperand(elements,
3936  index,
3939  __ JumpIfSmi(string, &bailout);
3940  __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
3941  __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3942  __ andb(scratch, Immediate(
3944  __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
3945  __ j(not_equal, &bailout);
3946  __ AddSmiField(string_length,
3948  __ j(overflow, &bailout);
3949  __ incl(index);
3950  __ cmpl(index, array_length);
3951  __ j(less, &loop);
3952 
3953  // Live registers:
3954  // string_length: Sum of string lengths.
3955  // elements: FixedArray of strings.
3956  // index: Array length.
3957  // array_length: Array length.
3958 
3959  // If array_length is 1, return elements[0], a string.
3960  __ cmpl(array_length, Immediate(1));
3961  __ j(not_equal, &not_size_one_array);
3962  __ movp(rax, FieldOperand(elements, FixedArray::kHeaderSize));
3963  __ jmp(&return_result);
3964 
3965  __ bind(&not_size_one_array);
3966 
3967  // End of array_length live range.
3968  result_pos = array_length;
3969  array_length = no_reg;
3970 
3971  // Live registers:
3972  // string_length: Sum of string lengths.
3973  // elements: FixedArray of strings.
3974  // index: Array length.
3975 
3976  // Check that the separator is a sequential ASCII string.
3977  __ movp(string, separator_operand);
3978  __ JumpIfSmi(string, &bailout);
3979  __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
3980  __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3981  __ andb(scratch, Immediate(
3983  __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
3984  __ j(not_equal, &bailout);
3985 
3986  // Live registers:
3987  // string_length: Sum of string lengths.
3988  // elements: FixedArray of strings.
3989  // index: Array length.
3990  // string: Separator string.
3991 
3992  // Add (separator length times (array_length - 1)) to string_length.
3993  __ SmiToInteger32(scratch,
3995  __ decl(index);
3996  __ imull(scratch, index);
3997  __ j(overflow, &bailout);
3998  __ addl(string_length, scratch);
3999  __ j(overflow, &bailout);
4000 
4001  // Live registers and stack values:
4002  // string_length: Total length of result string.
4003  // elements: FixedArray of strings.
4004  __ AllocateAsciiString(result_pos, string_length, scratch,
4005  index, string, &bailout);
4006  __ movp(result_operand, result_pos);
4007  __ leap(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4008 
4009  __ movp(string, separator_operand);
4010  __ SmiCompare(FieldOperand(string, SeqOneByteString::kLengthOffset),
4011  Smi::FromInt(1));
4012  __ j(equal, &one_char_separator);
4013  __ j(greater, &long_separator);
4014 
4015 
4016  // Empty separator case:
4017  __ Set(index, 0);
4018  __ movl(scratch, array_length_operand);
4019  __ jmp(&loop_1_condition);
4020  // Loop condition: while (index < array_length).
4021  __ bind(&loop_1);
4022  // Each iteration of the loop concatenates one string to the result.
4023  // Live values in registers:
4024  // index: which element of the elements array we are adding to the result.
4025  // result_pos: the position to which we are currently copying characters.
4026  // elements: the FixedArray of strings we are joining.
4027  // scratch: array length.
4028 
4029  // Get string = array[index].
4030  __ movp(string, FieldOperand(elements, index,
4033  __ SmiToInteger32(string_length,
4035  __ leap(string,
4037  __ CopyBytes(result_pos, string, string_length);
4038  __ incl(index);
4039  __ bind(&loop_1_condition);
4040  __ cmpl(index, scratch);
4041  __ j(less, &loop_1); // Loop while (index < array_length).
4042  __ jmp(&done);
4043 
4044  // Generic bailout code used from several places.
4045  __ bind(&bailout);
4046  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
4047  __ jmp(&return_result);
4048 
4049 
4050  // One-character separator case
4051  __ bind(&one_char_separator);
4052  // Get the separator ASCII character value.
4053  // Register "string" holds the separator.
4054  __ movzxbl(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4055  __ Set(index, 0);
4056  // Jump into the loop after the code that copies the separator, so the first
4057  // element is not preceded by a separator
4058  __ jmp(&loop_2_entry);
4059  // Loop condition: while (index < length).
4060  __ bind(&loop_2);
4061  // Each iteration of the loop concatenates one string to the result.
4062  // Live values in registers:
4063  // elements: The FixedArray of strings we are joining.
4064  // index: which element of the elements array we are adding to the result.
4065  // result_pos: the position to which we are currently copying characters.
4066  // scratch: Separator character.
4067 
4068  // Copy the separator character to the result.
4069  __ movb(Operand(result_pos, 0), scratch);
4070  __ incp(result_pos);
4071 
4072  __ bind(&loop_2_entry);
4073  // Get string = array[index].
4074  __ movp(string, FieldOperand(elements, index,
4077  __ SmiToInteger32(string_length,
4079  __ leap(string,
4081  __ CopyBytes(result_pos, string, string_length);
4082  __ incl(index);
4083  __ cmpl(index, array_length_operand);
4084  __ j(less, &loop_2); // End while (index < length).
4085  __ jmp(&done);
4086 
4087 
4088  // Long separator case (separator is more than one character).
4089  __ bind(&long_separator);
4090 
4091  // Make elements point to end of elements array, and index
4092  // count from -array_length to zero, so we don't need to maintain
4093  // a loop limit.
4094  __ movl(index, array_length_operand);
4095  __ leap(elements, FieldOperand(elements, index, times_pointer_size,
4097  __ negq(index);
4098 
4099  // Replace separator string with pointer to its first character, and
4100  // make scratch be its length.
4101  __ movp(string, separator_operand);
4102  __ SmiToInteger32(scratch,
4104  __ leap(string,
4106  __ movp(separator_operand, string);
4107 
4108  // Jump into the loop after the code that copies the separator, so the first
4109  // element is not preceded by a separator
4110  __ jmp(&loop_3_entry);
4111  // Loop condition: while (index < length).
4112  __ bind(&loop_3);
4113  // Each iteration of the loop concatenates one string to the result.
4114  // Live values in registers:
4115  // index: which element of the elements array we are adding to the result.
4116  // result_pos: the position to which we are currently copying characters.
4117  // scratch: Separator length.
4118  // separator_operand (rsp[0x10]): Address of first char of separator.
4119 
4120  // Copy the separator to the result.
4121  __ movp(string, separator_operand);
4122  __ movl(string_length, scratch);
4123  __ CopyBytes(result_pos, string, string_length, 2);
4124 
4125  __ bind(&loop_3_entry);
4126  // Get string = array[index].
4127  __ movp(string, Operand(elements, index, times_pointer_size, 0));
4128  __ SmiToInteger32(string_length,
4130  __ leap(string,
4132  __ CopyBytes(result_pos, string, string_length);
4133  __ incq(index);
4134  __ j(not_equal, &loop_3); // Loop while (index < 0).
4135 
4136  __ bind(&done);
4137  __ movp(rax, result_operand);
4138 
4139  __ bind(&return_result);
4140  // Drop temp values from the stack, and restore context register.
4141  __ addp(rsp, Immediate(3 * kPointerSize));
4143  context()->Plug(rax);
4144 }
4145 
4146 
4147 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4148  if (expr->function() != NULL &&
4149  expr->function()->intrinsic_type == Runtime::INLINE) {
4150  Comment cmnt(masm_, "[ InlineRuntimeCall");
4151  EmitInlineRuntimeCall(expr);
4152  return;
4153  }
4154 
4155  Comment cmnt(masm_, "[ CallRuntime");
4156  ZoneList<Expression*>* args = expr->arguments();
4157  int arg_count = args->length();
4158 
4159  if (expr->is_jsruntime()) {
4160  // Push the builtins object as receiver.
4161  __ movp(rax, GlobalObjectOperand());
4163 
4164  // Load the function from the receiver.
4165  __ movp(rax, Operand(rsp, 0));
4166  __ Move(rcx, expr->name());
4167  CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4168 
4169  // Push the target function under the receiver.
4170  __ Push(Operand(rsp, 0));
4171  __ movp(Operand(rsp, kPointerSize), rax);
4172 
4173  // Push the arguments ("left-to-right").
4174  for (int i = 0; i < arg_count; i++) {
4175  VisitForStackValue(args->at(i));
4176  }
4177 
4178  // Record source position of the IC call.
4179  SetSourcePosition(expr->position());
4180  CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
4181  __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
4182  __ CallStub(&stub);
4183 
4184  // Restore context register.
4186  context()->DropAndPlug(1, rax);
4187 
4188  } else {
4189  // Push the arguments ("left-to-right").
4190  for (int i = 0; i < arg_count; i++) {
4191  VisitForStackValue(args->at(i));
4192  }
4193 
4194  // Call the C runtime.
4195  __ CallRuntime(expr->function(), arg_count);
4196  context()->Plug(rax);
4197  }
4198 }
4199 
4200 
4201 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4202  switch (expr->op()) {
4203  case Token::DELETE: {
4204  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4205  Property* property = expr->expression()->AsProperty();
4206  VariableProxy* proxy = expr->expression()->AsVariableProxy();
4207 
4208  if (property != NULL) {
4209  VisitForStackValue(property->obj());
4210  VisitForStackValue(property->key());
4211  __ Push(Smi::FromInt(strict_mode()));
4212  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4213  context()->Plug(rax);
4214  } else if (proxy != NULL) {
4215  Variable* var = proxy->var();
4216  // Delete of an unqualified identifier is disallowed in strict mode
4217  // but "delete this" is allowed.
4218  ASSERT(strict_mode() == SLOPPY || var->is_this());
4219  if (var->IsUnallocated()) {
4220  __ Push(GlobalObjectOperand());
4221  __ Push(var->name());
4222  __ Push(Smi::FromInt(SLOPPY));
4223  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4224  context()->Plug(rax);
4225  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4226  // Result of deleting non-global variables is false. 'this' is
4227  // not really a variable, though we implement it as one. The
4228  // subexpression does not have side effects.
4229  context()->Plug(var->is_this());
4230  } else {
4231  // Non-global variable. Call the runtime to try to delete from the
4232  // context where the variable was introduced.
4233  __ Push(context_register());
4234  __ Push(var->name());
4235  __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
4236  context()->Plug(rax);
4237  }
4238  } else {
4239  // Result of deleting non-property, non-variable reference is true.
4240  // The subexpression may have side effects.
4241  VisitForEffect(expr->expression());
4242  context()->Plug(true);
4243  }
4244  break;
4245  }
4246 
4247  case Token::VOID: {
4248  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4249  VisitForEffect(expr->expression());
4250  context()->Plug(Heap::kUndefinedValueRootIndex);
4251  break;
4252  }
4253 
4254  case Token::NOT: {
4255  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4256  if (context()->IsEffect()) {
4257  // Unary NOT has no side effects so it's only necessary to visit the
4258  // subexpression. Match the optimizing compiler by not branching.
4259  VisitForEffect(expr->expression());
4260  } else if (context()->IsTest()) {
4261  const TestContext* test = TestContext::cast(context());
4262  // The labels are swapped for the recursive call.
4263  VisitForControl(expr->expression(),
4264  test->false_label(),
4265  test->true_label(),
4266  test->fall_through());
4267  context()->Plug(test->true_label(), test->false_label());
4268  } else {
4269  // We handle value contexts explicitly rather than simply visiting
4270  // for control and plugging the control flow into the context,
4271  // because we need to prepare a pair of extra administrative AST ids
4272  // for the optimizing compiler.
4273  ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4274  Label materialize_true, materialize_false, done;
4275  VisitForControl(expr->expression(),
4276  &materialize_false,
4277  &materialize_true,
4278  &materialize_true);
4279  __ bind(&materialize_true);
4280  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4281  if (context()->IsAccumulatorValue()) {
4282  __ LoadRoot(rax, Heap::kTrueValueRootIndex);
4283  } else {
4284  __ PushRoot(Heap::kTrueValueRootIndex);
4285  }
4286  __ jmp(&done, Label::kNear);
4287  __ bind(&materialize_false);
4288  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4289  if (context()->IsAccumulatorValue()) {
4290  __ LoadRoot(rax, Heap::kFalseValueRootIndex);
4291  } else {
4292  __ PushRoot(Heap::kFalseValueRootIndex);
4293  }
4294  __ bind(&done);
4295  }
4296  break;
4297  }
4298 
4299  case Token::TYPEOF: {
4300  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4301  { StackValueContext context(this);
4302  VisitForTypeofValue(expr->expression());
4303  }
4304  __ CallRuntime(Runtime::kTypeof, 1);
4305  context()->Plug(rax);
4306  break;
4307  }
4308 
4309  default:
4310  UNREACHABLE();
4311  }
4312 }
4313 
4314 
4315 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4316  ASSERT(expr->expression()->IsValidLeftHandSide());
4317 
4318  Comment cmnt(masm_, "[ CountOperation");
4319  SetSourcePosition(expr->position());
4320 
4321  // Expression can only be a property, a global or a (parameter or local)
4322  // slot.
4323  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4324  LhsKind assign_type = VARIABLE;
4325  Property* prop = expr->expression()->AsProperty();
4326  // In case of a property we use the uninitialized expression context
4327  // of the key to detect a named property.
4328  if (prop != NULL) {
4329  assign_type =
4330  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4331  }
4332 
4333  // Evaluate expression and get value.
4334  if (assign_type == VARIABLE) {
4335  ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4336  AccumulatorValueContext context(this);
4337  EmitVariableLoad(expr->expression()->AsVariableProxy());
4338  } else {
4339  // Reserve space for result of postfix operation.
4340  if (expr->is_postfix() && !context()->IsEffect()) {
4341  __ Push(Smi::FromInt(0));
4342  }
4343  if (assign_type == NAMED_PROPERTY) {
4344  VisitForAccumulatorValue(prop->obj());
4345  __ Push(rax); // Copy of receiver, needed for later store.
4346  EmitNamedPropertyLoad(prop);
4347  } else {
4348  VisitForStackValue(prop->obj());
4349  VisitForAccumulatorValue(prop->key());
4350  __ movp(rdx, Operand(rsp, 0)); // Leave receiver on stack
4351  __ Push(rax); // Copy of key, needed for later store.
4352  EmitKeyedPropertyLoad(prop);
4353  }
4354  }
4355 
4356  // We need a second deoptimization point after loading the value
4357  // in case evaluating the property load my have a side effect.
4358  if (assign_type == VARIABLE) {
4359  PrepareForBailout(expr->expression(), TOS_REG);
4360  } else {
4361  PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4362  }
4363 
4364  // Inline smi case if we are in a loop.
4365  Label done, stub_call;
4366  JumpPatchSite patch_site(masm_);
4367  if (ShouldInlineSmiCase(expr->op())) {
4368  Label slow;
4369  patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
4370 
4371  // Save result for postfix expressions.
4372  if (expr->is_postfix()) {
4373  if (!context()->IsEffect()) {
4374  // Save the result on the stack. If we have a named or keyed property
4375  // we store the result under the receiver that is currently on top
4376  // of the stack.
4377  switch (assign_type) {
4378  case VARIABLE:
4379  __ Push(rax);
4380  break;
4381  case NAMED_PROPERTY:
4382  __ movp(Operand(rsp, kPointerSize), rax);
4383  break;
4384  case KEYED_PROPERTY:
4385  __ movp(Operand(rsp, 2 * kPointerSize), rax);
4386  break;
4387  }
4388  }
4389  }
4390 
4391  SmiOperationExecutionMode mode;
4392  mode.Add(PRESERVE_SOURCE_REGISTER);
4393  mode.Add(BAILOUT_ON_NO_OVERFLOW);
4394  if (expr->op() == Token::INC) {
4395  __ SmiAddConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4396  } else {
4397  __ SmiSubConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4398  }
4399  __ jmp(&stub_call, Label::kNear);
4400  __ bind(&slow);
4401  }
4402 
4403  ToNumberStub convert_stub;
4404  __ CallStub(&convert_stub);
4405 
4406  // Save result for postfix expressions.
4407  if (expr->is_postfix()) {
4408  if (!context()->IsEffect()) {
4409  // Save the result on the stack. If we have a named or keyed property
4410  // we store the result under the receiver that is currently on top
4411  // of the stack.
4412  switch (assign_type) {
4413  case VARIABLE:
4414  __ Push(rax);
4415  break;
4416  case NAMED_PROPERTY:
4417  __ movp(Operand(rsp, kPointerSize), rax);
4418  break;
4419  case KEYED_PROPERTY:
4420  __ movp(Operand(rsp, 2 * kPointerSize), rax);
4421  break;
4422  }
4423  }
4424  }
4425 
4426  // Record position before stub call.
4427  SetSourcePosition(expr->position());
4428 
4429  // Call stub for +1/-1.
4430  __ bind(&stub_call);
4431  __ movp(rdx, rax);
4432  __ Move(rax, Smi::FromInt(1));
4433  BinaryOpICStub stub(expr->binary_op(), NO_OVERWRITE);
4434  CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4435  patch_site.EmitPatchInfo();
4436  __ bind(&done);
4437 
4438  // Store the value returned in rax.
4439  switch (assign_type) {
4440  case VARIABLE:
4441  if (expr->is_postfix()) {
4442  // Perform the assignment as if via '='.
4443  { EffectContext context(this);
4444  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4445  Token::ASSIGN);
4446  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4447  context.Plug(rax);
4448  }
4449  // For all contexts except kEffect: We have the result on
4450  // top of the stack.
4451  if (!context()->IsEffect()) {
4452  context()->PlugTOS();
4453  }
4454  } else {
4455  // Perform the assignment as if via '='.
4456  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4457  Token::ASSIGN);
4458  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4459  context()->Plug(rax);
4460  }
4461  break;
4462  case NAMED_PROPERTY: {
4463  __ Move(rcx, prop->key()->AsLiteral()->value());
4464  __ Pop(rdx);
4465  CallStoreIC(expr->CountStoreFeedbackId());
4466  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4467  if (expr->is_postfix()) {
4468  if (!context()->IsEffect()) {
4469  context()->PlugTOS();
4470  }
4471  } else {
4472  context()->Plug(rax);
4473  }
4474  break;
4475  }
4476  case KEYED_PROPERTY: {
4477  __ Pop(rcx);
4478  __ Pop(rdx);
4479  Handle<Code> ic = strict_mode() == SLOPPY
4480  ? isolate()->builtins()->KeyedStoreIC_Initialize()
4481  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4482  CallIC(ic, expr->CountStoreFeedbackId());
4483  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4484  if (expr->is_postfix()) {
4485  if (!context()->IsEffect()) {
4486  context()->PlugTOS();
4487  }
4488  } else {
4489  context()->Plug(rax);
4490  }
4491  break;
4492  }
4493  }
4494 }
4495 
4496 
4497 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4498  VariableProxy* proxy = expr->AsVariableProxy();
4499  ASSERT(!context()->IsEffect());
4500  ASSERT(!context()->IsTest());
4501 
4502  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4503  Comment cmnt(masm_, "[ Global variable");
4504  __ Move(rcx, proxy->name());
4505  __ movp(rax, GlobalObjectOperand());
4506  // Use a regular load, not a contextual load, to avoid a reference
4507  // error.
4508  CallLoadIC(NOT_CONTEXTUAL);
4509  PrepareForBailout(expr, TOS_REG);
4510  context()->Plug(rax);
4511  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4512  Comment cmnt(masm_, "[ Lookup slot");
4513  Label done, slow;
4514 
4515  // Generate code for loading from variables potentially shadowed
4516  // by eval-introduced variables.
4517  EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4518 
4519  __ bind(&slow);
4520  __ Push(rsi);
4521  __ Push(proxy->name());
4522  __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4523  PrepareForBailout(expr, TOS_REG);
4524  __ bind(&done);
4525 
4526  context()->Plug(rax);
4527  } else {
4528  // This expression cannot throw a reference error at the top level.
4529  VisitInDuplicateContext(expr);
4530  }
4531 }
4532 
4533 
4534 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4535  Expression* sub_expr,
4536  Handle<String> check) {
4537  Label materialize_true, materialize_false;
4538  Label* if_true = NULL;
4539  Label* if_false = NULL;
4540  Label* fall_through = NULL;
4541  context()->PrepareTest(&materialize_true, &materialize_false,
4542  &if_true, &if_false, &fall_through);
4543 
4544  { AccumulatorValueContext context(this);
4545  VisitForTypeofValue(sub_expr);
4546  }
4547  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4548 
4549  if (check->Equals(isolate()->heap()->number_string())) {
4550  __ JumpIfSmi(rax, if_true);
4552  __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
4553  Split(equal, if_true, if_false, fall_through);
4554  } else if (check->Equals(isolate()->heap()->string_string())) {
4555  __ JumpIfSmi(rax, if_false);
4556  // Check for undetectable objects => false.
4557  __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
4558  __ j(above_equal, if_false);
4559  __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4560  Immediate(1 << Map::kIsUndetectable));
4561  Split(zero, if_true, if_false, fall_through);
4562  } else if (check->Equals(isolate()->heap()->symbol_string())) {
4563  __ JumpIfSmi(rax, if_false);
4564  __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
4565  Split(equal, if_true, if_false, fall_through);
4566  } else if (check->Equals(isolate()->heap()->boolean_string())) {
4567  __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4568  __ j(equal, if_true);
4569  __ CompareRoot(rax, Heap::kFalseValueRootIndex);
4570  Split(equal, if_true, if_false, fall_through);
4571  } else if (FLAG_harmony_typeof &&
4572  check->Equals(isolate()->heap()->null_string())) {
4573  __ CompareRoot(rax, Heap::kNullValueRootIndex);
4574  Split(equal, if_true, if_false, fall_through);
4575  } else if (check->Equals(isolate()->heap()->undefined_string())) {
4576  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4577  __ j(equal, if_true);
4578  __ JumpIfSmi(rax, if_false);
4579  // Check for undetectable objects => true.
4581  __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4582  Immediate(1 << Map::kIsUndetectable));
4583  Split(not_zero, if_true, if_false, fall_through);
4584  } else if (check->Equals(isolate()->heap()->function_string())) {
4585  __ JumpIfSmi(rax, if_false);
4587  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
4588  __ j(equal, if_true);
4589  __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
4590  Split(equal, if_true, if_false, fall_through);
4591  } else if (check->Equals(isolate()->heap()->object_string())) {
4592  __ JumpIfSmi(rax, if_false);
4593  if (!FLAG_harmony_typeof) {
4594  __ CompareRoot(rax, Heap::kNullValueRootIndex);
4595  __ j(equal, if_true);
4596  }
4597  __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
4598  __ j(below, if_false);
4599  __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4600  __ j(above, if_false);
4601  // Check for undetectable objects => false.
4602  __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4603  Immediate(1 << Map::kIsUndetectable));
4604  Split(zero, if_true, if_false, fall_through);
4605  } else {
4606  if (if_false != fall_through) __ jmp(if_false);
4607  }
4608  context()->Plug(if_true, if_false);
4609 }
4610 
4611 
4612 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4613  Comment cmnt(masm_, "[ CompareOperation");
4614  SetSourcePosition(expr->position());
4615 
4616  // First we try a fast inlined version of the compare when one of
4617  // the operands is a literal.
4618  if (TryLiteralCompare(expr)) return;
4619 
4620  // Always perform the comparison for its control flow. Pack the result
4621  // into the expression's context after the comparison is performed.
4622  Label materialize_true, materialize_false;
4623  Label* if_true = NULL;
4624  Label* if_false = NULL;
4625  Label* fall_through = NULL;
4626  context()->PrepareTest(&materialize_true, &materialize_false,
4627  &if_true, &if_false, &fall_through);
4628 
4629  Token::Value op = expr->op();
4630  VisitForStackValue(expr->left());
4631  switch (op) {
4632  case Token::IN:
4633  VisitForStackValue(expr->right());
4634  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4635  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4636  __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4637  Split(equal, if_true, if_false, fall_through);
4638  break;
4639 
4640  case Token::INSTANCEOF: {
4641  VisitForStackValue(expr->right());
4642  InstanceofStub stub(InstanceofStub::kNoFlags);
4643  __ CallStub(&stub);
4644  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4645  __ testp(rax, rax);
4646  // The stub returns 0 for true.
4647  Split(zero, if_true, if_false, fall_through);
4648  break;
4649  }
4650 
4651  default: {
4652  VisitForAccumulatorValue(expr->right());
4654  __ Pop(rdx);
4655 
4656  bool inline_smi_code = ShouldInlineSmiCase(op);
4657  JumpPatchSite patch_site(masm_);
4658  if (inline_smi_code) {
4659  Label slow_case;
4660  __ movp(rcx, rdx);
4661  __ orp(rcx, rax);
4662  patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4663  __ cmpp(rdx, rax);
4664  Split(cc, if_true, if_false, NULL);
4665  __ bind(&slow_case);
4666  }
4667 
4668  // Record position and call the compare IC.
4669  SetSourcePosition(expr->position());
4670  Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4671  CallIC(ic, expr->CompareOperationFeedbackId());
4672  patch_site.EmitPatchInfo();
4673 
4674  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4675  __ testp(rax, rax);
4676  Split(cc, if_true, if_false, fall_through);
4677  }
4678  }
4679 
4680  // Convert the result of the comparison into one expected for this
4681  // expression's context.
4682  context()->Plug(if_true, if_false);
4683 }
4684 
4685 
4686 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4687  Expression* sub_expr,
4688  NilValue nil) {
4689  Label materialize_true, materialize_false;
4690  Label* if_true = NULL;
4691  Label* if_false = NULL;
4692  Label* fall_through = NULL;
4693  context()->PrepareTest(&materialize_true, &materialize_false,
4694  &if_true, &if_false, &fall_through);
4695 
4696  VisitForAccumulatorValue(sub_expr);
4697  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4698  if (expr->op() == Token::EQ_STRICT) {
4699  Heap::RootListIndex nil_value = nil == kNullValue ?
4700  Heap::kNullValueRootIndex :
4701  Heap::kUndefinedValueRootIndex;
4702  __ CompareRoot(rax, nil_value);
4703  Split(equal, if_true, if_false, fall_through);
4704  } else {
4705  Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4706  CallIC(ic, expr->CompareOperationFeedbackId());
4707  __ testp(rax, rax);
4708  Split(not_zero, if_true, if_false, fall_through);
4709  }
4710  context()->Plug(if_true, if_false);
4711 }
4712 
4713 
4714 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4716  context()->Plug(rax);
4717 }
4718 
4719 
4720 Register FullCodeGenerator::result_register() {
4721  return rax;
4722 }
4723 
4724 
4725 Register FullCodeGenerator::context_register() {
4726  return rsi;
4727 }
4728 
4729 
4730 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4731  ASSERT(IsAligned(frame_offset, kPointerSize));
4732  __ movp(Operand(rbp, frame_offset), value);
4733 }
4734 
4735 
4736 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4737  __ movp(dst, ContextOperand(rsi, context_index));
4738 }
4739 
4740 
4741 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4742  Scope* declaration_scope = scope()->DeclarationScope();
4743  if (declaration_scope->is_global_scope() ||
4744  declaration_scope->is_module_scope()) {
4745  // Contexts nested in the native context have a canonical empty function
4746  // as their closure, not the anonymous closure containing the global
4747  // code. Pass a smi sentinel and let the runtime look up the empty
4748  // function.
4749  __ Push(Smi::FromInt(0));
4750  } else if (declaration_scope->is_eval_scope()) {
4751  // Contexts created by a call to eval have the same closure as the
4752  // context calling eval, not the anonymous closure containing the eval
4753  // code. Fetch it from the context.
4755  } else {
4756  ASSERT(declaration_scope->is_function_scope());
4758  }
4759 }
4760 
4761 
4762 // ----------------------------------------------------------------------------
4763 // Non-local control flow support.
4764 
4765 
4766 void FullCodeGenerator::EnterFinallyBlock() {
4767  ASSERT(!result_register().is(rdx));
4768  ASSERT(!result_register().is(rcx));
4769  // Cook return address on top of stack (smi encoded Code* delta)
4770  __ PopReturnAddressTo(rdx);
4771  __ Move(rcx, masm_->CodeObject());
4772  __ subp(rdx, rcx);
4773  __ Integer32ToSmi(rdx, rdx);
4774  __ Push(rdx);
4775 
4776  // Store result register while executing finally block.
4777  __ Push(result_register());
4778 
4779  // Store pending message while executing finally block.
4780  ExternalReference pending_message_obj =
4781  ExternalReference::address_of_pending_message_obj(isolate());
4782  __ Load(rdx, pending_message_obj);
4783  __ Push(rdx);
4784 
4785  ExternalReference has_pending_message =
4786  ExternalReference::address_of_has_pending_message(isolate());
4787  __ Load(rdx, has_pending_message);
4788  __ Integer32ToSmi(rdx, rdx);
4789  __ Push(rdx);
4790 
4791  ExternalReference pending_message_script =
4792  ExternalReference::address_of_pending_message_script(isolate());
4793  __ Load(rdx, pending_message_script);
4794  __ Push(rdx);
4795 }
4796 
4797 
4798 void FullCodeGenerator::ExitFinallyBlock() {
4799  ASSERT(!result_register().is(rdx));
4800  ASSERT(!result_register().is(rcx));
4801  // Restore pending message from stack.
4802  __ Pop(rdx);
4803  ExternalReference pending_message_script =
4804  ExternalReference::address_of_pending_message_script(isolate());
4805  __ Store(pending_message_script, rdx);
4806 
4807  __ Pop(rdx);
4808  __ SmiToInteger32(rdx, rdx);
4809  ExternalReference has_pending_message =
4810  ExternalReference::address_of_has_pending_message(isolate());
4811  __ Store(has_pending_message, rdx);
4812 
4813  __ Pop(rdx);
4814  ExternalReference pending_message_obj =
4815  ExternalReference::address_of_pending_message_obj(isolate());
4816  __ Store(pending_message_obj, rdx);
4817 
4818  // Restore result register from stack.
4819  __ Pop(result_register());
4820 
4821  // Uncook return address.
4822  __ Pop(rdx);
4823  __ SmiToInteger32(rdx, rdx);
4824  __ Move(rcx, masm_->CodeObject());
4825  __ addp(rdx, rcx);
4826  __ jmp(rdx);
4827 }
4828 
4829 
4830 #undef __
4831 
4832 #define __ ACCESS_MASM(masm())
4833 
4834 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4835  int* stack_depth,
4836  int* context_length) {
4837  // The macros used here must preserve the result register.
4838 
4839  // Because the handler block contains the context of the finally
4840  // code, we can restore it directly from there for the finally code
4841  // rather than iteratively unwinding contexts via their previous
4842  // links.
4843  __ Drop(*stack_depth); // Down to the handler block.
4844  if (*context_length > 0) {
4845  // Restore the context to its dedicated register and the stack.
4848  }
4849  __ PopTryHandler();
4850  __ call(finally_entry_);
4851 
4852  *stack_depth = 0;
4853  *context_length = 0;
4854  return previous_;
4855 }
4856 
4857 
4858 #undef __
4859 
4860 
4861 static const byte kJnsInstruction = 0x79;
4862 static const byte kJnsOffset = 0x1d;
4863 static const byte kNopByteOne = 0x66;
4864 static const byte kNopByteTwo = 0x90;
4865 #ifdef DEBUG
4866 static const byte kCallInstruction = 0xe8;
4867 #endif
4868 
4869 
4870 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4871  Address pc,
4872  BackEdgeState target_state,
4873  Code* replacement_code) {
4874  Address call_target_address = pc - kIntSize;
4875  Address jns_instr_address = call_target_address - 3;
4876  Address jns_offset_address = call_target_address - 2;
4877 
4878  switch (target_state) {
4879  case INTERRUPT:
4880  // sub <profiling_counter>, <delta> ;; Not changed
4881  // jns ok
4882  // call <interrupt stub>
4883  // ok:
4884  *jns_instr_address = kJnsInstruction;
4885  *jns_offset_address = kJnsOffset;
4886  break;
4887  case ON_STACK_REPLACEMENT:
4888  case OSR_AFTER_STACK_CHECK:
4889  // sub <profiling_counter>, <delta> ;; Not changed
4890  // nop
4891  // nop
4892  // call <on-stack replacment>
4893  // ok:
4894  *jns_instr_address = kNopByteOne;
4895  *jns_offset_address = kNopByteTwo;
4896  break;
4897  }
4898 
4899  Assembler::set_target_address_at(call_target_address,
4900  unoptimized_code,
4901  replacement_code->entry());
4902  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4903  unoptimized_code, call_target_address, replacement_code);
4904 }
4905 
4906 
4908  Isolate* isolate,
4909  Code* unoptimized_code,
4910  Address pc) {
4911  Address call_target_address = pc - kIntSize;
4912  Address jns_instr_address = call_target_address - 3;
4913  ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
4914 
4915  if (*jns_instr_address == kJnsInstruction) {
4916  ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
4917  ASSERT_EQ(isolate->builtins()->InterruptCheck()->entry(),
4918  Assembler::target_address_at(call_target_address,
4919  unoptimized_code));
4920  return INTERRUPT;
4921  }
4922 
4923  ASSERT_EQ(kNopByteOne, *jns_instr_address);
4924  ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
4925 
4926  if (Assembler::target_address_at(call_target_address,
4927  unoptimized_code) ==
4928  isolate->builtins()->OnStackReplacement()->entry()) {
4929  return ON_STACK_REPLACEMENT;
4930  }
4931 
4932  ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4933  Assembler::target_address_at(call_target_address,
4934  unoptimized_code));
4935  return OSR_AFTER_STACK_CHECK;
4936 }
4937 
4938 
4939 } } // namespace v8::internal
4940 
4941 #endif // V8_TARGET_ARCH_X64
static const int kFunctionOffset
Definition: objects.h:7324
byte * Address
Definition: globals.h:186
const Register rdx
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
static RelocInfo::Mode RelocInfoNone()
static const int kHashFieldOffset
Definition: objects.h:8629
static const int kBitFieldOffset
Definition: objects.h:6461
Scope * DeclarationScope()
Definition: scopes.cc:743
Isolate * isolate() const
Definition: assembler.h:62
const intptr_t kSmiTagMask
Definition: v8.h:5480
static const int kForInFastCaseMarker
Definition: objects.h:8230
VariableDeclaration * function() const
Definition: scopes.h:326
static const int kCodeEntryOffset
Definition: objects.h:7518
static const int kValueOffset
Definition: objects.h:9547
static int SlotOffset(int index)
Definition: contexts.h:498
static const int kBuiltinsOffset
Definition: objects.h:7610
static Handle< Code > GetUninitialized(Isolate *isolate)
Definition: code-stubs.h:2385
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
static const int kEnumCacheOffset
Definition: objects.h:3499
static String * cast(Object *obj)
const uint32_t kTwoByteStringTag
Definition: objects.h:610
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
static const int kResultValuePropertyOffset
Definition: objects.h:7342
bool IsFastObjectElementsKind(ElementsKind kind)
const Register rbp
static TypeFeedbackId None()
Definition: utils.h:1149
static Handle< Code > GetUninitialized(Isolate *isolate, Token::Value op)
Definition: ic.cc:2489
static const int kGlobalReceiverOffset
Definition: objects.h:7613
T Max(T a, T b)
Definition: utils.h:227
Scope * outer_scope() const
Definition: scopes.h:350
const Register rsi
static const int kGeneratorClosed
Definition: objects.h:7321
static const unsigned int kContainsCachedArrayIndexMask
Definition: objects.h:8673
static const int kForInSlowCaseMarker
Definition: objects.h:8231
static bool enabled()
Definition: serialize.h:485
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
static const int kSize
Definition: objects.h:7922
static const int kResultDonePropertyOffset
Definition: objects.h:7343
SmiIndex SmiToIndex(Register dst, Register src, int shift)
#define ASSERT(condition)
Definition: checks.h:329
static const int kContextOffset
Definition: frames.h:185
const int kPointerSizeLog2
Definition: globals.h:281
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:121
static const int kInObjectFieldCount
Definition: objects.h:7976
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3090
const uint32_t kStringRepresentationMask
Definition: objects.h:615
static const int kReceiverOffset
Definition: objects.h:7326
MemOperand GlobalObjectOperand()
static const int kCallerFPOffset
Definition: frames.h:188
static const int kInstanceClassNameOffset
Definition: objects.h:7107
Factory * factory()
Definition: isolate.h:995
bool IsOptimizable() const
Definition: compiler.h:232
Variable * parameter(int index) const
Definition: scopes.h:333
PropertyAttributes
MemOperand ContextOperand(Register context, int index)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
const int kIntSize
Definition: globals.h:263
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
Definition: scopes.cc:721
kInstanceClassNameOffset flag
Definition: objects-inl.h:5115
uint8_t byte
Definition: globals.h:185
#define IN
static const int kLiteralsOffset
Definition: objects.h:7524
#define UNREACHABLE()
Definition: checks.h:52
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static Condition ComputeCondition(Token::Value op)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
static const int kLengthOffset
Definition: objects.h:8905
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Definition: flags.cc:665
Variable * arguments() const
Definition: scopes.h:341
static const int kFirstOffset
Definition: objects.h:3500
NilValue
Definition: v8.h:133
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1278
static BailoutId Declarations()
Definition: utils.h:1166
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
const int kPointerSize
Definition: globals.h:268
void check(i::Vector< const uint8_t > string)
static const int kJSReturnSequenceLength
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:6478
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
bool IsAligned(T value, U alignment)
Definition: utils.h:211
const Register rbx
const Register rsp
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define __
const int kFPOnStackSize
Definition: globals.h:271
static const int kCallerSPOffset
Definition: frames.h:190
static const int kCacheStampOffset
Definition: objects.h:7787
const Register pc
static const int kDescriptorSize
Definition: objects.h:3509
static const int kPropertiesOffset
Definition: objects.h:2755
int num_parameters() const
Definition: scopes.h:338
const Register rax
static const int kMarkerOffset
Definition: frames.h:184
static const int kExpressionsOffset
Definition: frames.h:183
const Register rdi
static const int kHeaderSize
Definition: objects.h:9042
static const int kElementsOffset
Definition: objects.h:2756
static BailoutId FunctionEntry()
Definition: utils.h:1165
const uint32_t kStringTag
Definition: objects.h:598
#define BASE_EMBEDDED
Definition: allocation.h:68
OverwriteMode
Definition: ic.h:690
bool IsDeclaredVariableMode(VariableMode mode)
Definition: v8globals.h:503
Vector< const char > CStrVector(const char *data)
Definition: utils.h:574
static int OffsetOfElementAt(int index)
Definition: objects.h:3070
static const int kLengthOffset
Definition: objects.h:10076
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
Definition: objects.h:3016
Scope * GlobalScope()
Definition: scopes.cc:734
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
Definition: code-stubs.h:1406
static const int kContextOffset
Definition: objects.h:7325
void Load(const v8::FunctionCallbackInfo< v8::Value > &args)
Definition: shell.cc:171
const Register arg_reg_1
static const int kMapOffset
Definition: objects.h:1890
static const int kValueOffset
Definition: objects.h:7779
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:3503
const uint32_t kIsNotStringMask
Definition: objects.h:597
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:545
static const int kLengthOffset
Definition: objects.h:3015
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:6675
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:206
static const int kContextOffset
Definition: frames.h:97
const Register kScratchRegister
static const int kFormalParameterCountOffset
Definition: objects.h:7156
const int kSmiTagSize
Definition: v8.h:5479
void CopyBytes(uint8_t *target, uint8_t *source)
Definition: runtime.cc:1309
const Register r8
const Register rcx
static const int kGeneratorExecuting
Definition: objects.h:7320
Condition NegateCondition(Condition cond)
static bool ShouldGenerateLog(Isolate *isolate, Expression *type)
Definition: codegen.cc:191
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
static const int kContinuationOffset
Definition: objects.h:7327
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:996
static const int kConstructorOffset
Definition: objects.h:6428
const Register arg_reg_2
const uint32_t kOneByteStringTag
Definition: objects.h:611
Condition CheckNonNegativeSmi(Register src)
const int kSmiTag
Definition: v8.h:5478
#define ASSERT_NE(v1, v2)
Definition: checks.h:331
static const int kIsUndetectable
Definition: objects.h:6472
static const int kPrototypeOffset
Definition: objects.h:6427
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target)
static const int kHashShift
Definition: objects.h:8642
const Register no_reg
static const int kValueOffset
Definition: objects.h:7701
bool IsImmutableVariableMode(VariableMode mode)
Definition: v8globals.h:513
static const int kNativeContextOffset
Definition: objects.h:7611
void AddNoFrameRange(int from, int to)
Definition: compiler.h:296
const int kPCOnStackSize
Definition: globals.h:270
T Min(T a, T b)
Definition: utils.h:234
static const int kSharedFunctionInfoOffset
Definition: objects.h:7521
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:2121
static const int kBitField2Offset
Definition: objects.h:6462
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
#define VOID
static const int kExponentOffset
Definition: objects.h:1977
const uint32_t kStringEncodingMask
Definition: objects.h:609
static const int kInstanceTypeOffset
Definition: objects.h:6459
static const int kOperandStackOffset
Definition: objects.h:7328
static const int kMantissaOffset
Definition: objects.h:1976
TypeofState
Definition: codegen.h:69
Scope * scope() const
Definition: compiler.h:78