v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
full-codegen-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_X64)
31 
32 #include "code-stubs.h"
33 #include "codegen.h"
34 #include "compiler.h"
35 #include "debug.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
38 #include "parser.h"
39 #include "scopes.h"
40 #include "stub-cache.h"
41 
42 namespace v8 {
43 namespace internal {
44 
45 #define __ ACCESS_MASM(masm_)
46 
47 
48 class JumpPatchSite BASE_EMBEDDED {
49  public:
50  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
51 #ifdef DEBUG
52  info_emitted_ = false;
53 #endif
54  }
55 
56  ~JumpPatchSite() {
57  ASSERT(patch_site_.is_bound() == info_emitted_);
58  }
59 
60  void EmitJumpIfNotSmi(Register reg,
61  Label* target,
62  Label::Distance near_jump = Label::kFar) {
63  __ testb(reg, Immediate(kSmiTagMask));
64  EmitJump(not_carry, target, near_jump); // Always taken before patched.
65  }
66 
67  void EmitJumpIfSmi(Register reg,
68  Label* target,
69  Label::Distance near_jump = Label::kFar) {
70  __ testb(reg, Immediate(kSmiTagMask));
71  EmitJump(carry, target, near_jump); // Never taken before patched.
72  }
73 
74  void EmitPatchInfo() {
75  if (patch_site_.is_bound()) {
76  int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
77  ASSERT(is_int8(delta_to_patch_site));
78  __ testl(rax, Immediate(delta_to_patch_site));
79 #ifdef DEBUG
80  info_emitted_ = true;
81 #endif
82  } else {
83  __ nop(); // Signals no inlined code.
84  }
85  }
86 
87  private:
88  // jc will be patched with jz, jnc will become jnz.
89  void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
90  ASSERT(!patch_site_.is_bound() && !info_emitted_);
91  ASSERT(cc == carry || cc == not_carry);
92  __ bind(&patch_site_);
93  __ j(cc, target, near_jump);
94  }
95 
96  MacroAssembler* masm_;
97  Label patch_site_;
98 #ifdef DEBUG
99  bool info_emitted_;
100 #endif
101 };
102 
103 
104 // Generate code for a JS function. On entry to the function the receiver
105 // and arguments have been pushed on the stack left to right, with the
106 // return address on top of them. The actual argument count matches the
107 // formal parameter count expected by the function.
108 //
109 // The live registers are:
110 // o rdi: the JS function object being called (i.e. ourselves)
111 // o rsi: our context
112 // o rbp: our caller's frame pointer
113 // o rsp: stack pointer (pointing to return address)
114 //
115 // The function builds a JS frame. Please see JavaScriptFrameConstants in
116 // frames-x64.h for its layout.
117 void FullCodeGenerator::Generate() {
118  CompilationInfo* info = info_;
119  handler_table_ =
120  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
121  profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
122  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
123  SetFunctionPosition(function());
124  Comment cmnt(masm_, "[ function compiled by full code generator");
125 
126 #ifdef DEBUG
127  if (strlen(FLAG_stop_at) > 0 &&
128  info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
129  __ int3();
130  }
131 #endif
132 
133  // Strict mode functions and builtins need to replace the receiver
134  // with undefined when called as functions (without an explicit
135  // receiver object). rcx is zero for method calls and non-zero for
136  // function calls.
137  if (!info->is_classic_mode() || info->is_native()) {
138  Label ok;
139  __ testq(rcx, rcx);
140  __ j(zero, &ok, Label::kNear);
141  // +1 for return address.
142  int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
143  __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
144  __ movq(Operand(rsp, receiver_offset), kScratchRegister);
145  __ bind(&ok);
146  }
147 
148  // Open a frame scope to indicate that there is a frame on the stack. The
149  // MANUAL indicates that the scope shouldn't actually generate code to set up
150  // the frame (that is done below).
151  FrameScope frame_scope(masm_, StackFrame::MANUAL);
152 
153  __ push(rbp); // Caller's frame pointer.
154  __ movq(rbp, rsp);
155  __ push(rsi); // Callee's context.
156  __ push(rdi); // Callee's JS Function.
157 
158  { Comment cmnt(masm_, "[ Allocate locals");
159  int locals_count = info->scope()->num_stack_slots();
160  if (locals_count == 1) {
161  __ PushRoot(Heap::kUndefinedValueRootIndex);
162  } else if (locals_count > 1) {
163  __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
164  for (int i = 0; i < locals_count; i++) {
165  __ push(rdx);
166  }
167  }
168  }
169 
170  bool function_in_register = true;
171 
172  // Possibly allocate a local context.
173  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
174  if (heap_slots > 0) {
175  Comment cmnt(masm_, "[ Allocate local context");
176  // Argument to NewContext is the function, which is still in rdi.
177  __ push(rdi);
178  if (heap_slots <= FastNewContextStub::kMaximumSlots) {
179  FastNewContextStub stub(heap_slots);
180  __ CallStub(&stub);
181  } else {
182  __ CallRuntime(Runtime::kNewFunctionContext, 1);
183  }
184  function_in_register = false;
185  // Context is returned in both rax and rsi. It replaces the context
186  // passed to us. It's saved in the stack and kept live in rsi.
188 
189  // Copy any necessary parameters into the context.
190  int num_parameters = info->scope()->num_parameters();
191  for (int i = 0; i < num_parameters; i++) {
192  Variable* var = scope()->parameter(i);
193  if (var->IsContextSlot()) {
194  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
195  (num_parameters - 1 - i) * kPointerSize;
196  // Load parameter from stack.
197  __ movq(rax, Operand(rbp, parameter_offset));
198  // Store it in the context.
199  int context_offset = Context::SlotOffset(var->index());
200  __ movq(Operand(rsi, context_offset), rax);
201  // Update the write barrier. This clobbers rax and rbx.
202  __ RecordWriteContextSlot(
203  rsi, context_offset, rax, rbx, kDontSaveFPRegs);
204  }
205  }
206  }
207 
208  // Possibly allocate an arguments object.
209  Variable* arguments = scope()->arguments();
210  if (arguments != NULL) {
211  // Arguments object must be allocated after the context object, in
212  // case the "arguments" or ".arguments" variables are in the context.
213  Comment cmnt(masm_, "[ Allocate arguments object");
214  if (function_in_register) {
215  __ push(rdi);
216  } else {
218  }
219  // The receiver is just before the parameters on the caller's stack.
220  int num_parameters = info->scope()->num_parameters();
221  int offset = num_parameters * kPointerSize;
222  __ lea(rdx,
223  Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
224  __ push(rdx);
225  __ Push(Smi::FromInt(num_parameters));
226  // Arguments to ArgumentsAccessStub:
227  // function, receiver address, parameter count.
228  // The stub will rewrite receiver and parameter count if the previous
229  // stack frame was an arguments adapter frame.
231  if (!is_classic_mode()) {
233  } else if (function()->has_duplicate_parameters()) {
235  } else {
237  }
238  ArgumentsAccessStub stub(type);
239  __ CallStub(&stub);
240 
241  SetVar(arguments, rax, rbx, rdx);
242  }
243 
244  if (FLAG_trace) {
245  __ CallRuntime(Runtime::kTraceEnter, 0);
246  }
247 
248  // Visit the declarations and body unless there is an illegal
249  // redeclaration.
250  if (scope()->HasIllegalRedeclaration()) {
251  Comment cmnt(masm_, "[ Declarations");
252  scope()->VisitIllegalRedeclaration(this);
253 
254  } else {
255  PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
256  { Comment cmnt(masm_, "[ Declarations");
257  // For named function expressions, declare the function name as a
258  // constant.
259  if (scope()->is_function_scope() && scope()->function() != NULL) {
260  VariableDeclaration* function = scope()->function();
261  ASSERT(function->proxy()->var()->mode() == CONST ||
262  function->proxy()->var()->mode() == CONST_HARMONY);
263  ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
264  VisitVariableDeclaration(function);
265  }
266  VisitDeclarations(scope()->declarations());
267  }
268 
269  { Comment cmnt(masm_, "[ Stack check");
270  PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
271  Label ok;
272  __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
273  __ j(above_equal, &ok, Label::kNear);
274  StackCheckStub stub;
275  __ CallStub(&stub);
276  __ bind(&ok);
277  }
278 
279  { Comment cmnt(masm_, "[ Body");
280  ASSERT(loop_depth() == 0);
281  VisitStatements(function()->body());
282  ASSERT(loop_depth() == 0);
283  }
284  }
285 
286  // Always emit a 'return undefined' in case control fell off the end of
287  // the body.
288  { Comment cmnt(masm_, "[ return <undefined>;");
289  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
290  EmitReturnSequence();
291  }
292 }
293 
294 
295 void FullCodeGenerator::ClearAccumulator() {
296  __ Set(rax, 0);
297 }
298 
299 
300 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
301  __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
303  Smi::FromInt(-delta));
304 }
305 
306 
307 void FullCodeGenerator::EmitProfilingCounterReset() {
308  int reset_value = FLAG_interrupt_budget;
309  if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
310  // Self-optimization is a one-off thing; if it fails, don't try again.
311  reset_value = Smi::kMaxValue;
312  }
313  if (isolate()->IsDebuggerActive()) {
314  // Detect debug break requests as soon as possible.
315  reset_value = 10;
316  }
317  __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
318  __ movq(kScratchRegister,
319  reinterpret_cast<uint64_t>(Smi::FromInt(reset_value)),
323 }
324 
325 
326 static const int kMaxBackEdgeWeight = 127;
327 static const int kBackEdgeDistanceDivisor = 162;
328 
329 
330 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
331  Label* back_edge_target) {
332  Comment cmnt(masm_, "[ Stack check");
333  Label ok;
334 
335  if (FLAG_count_based_interrupts) {
336  int weight = 1;
337  if (FLAG_weighted_back_edges) {
338  ASSERT(back_edge_target->is_bound());
339  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
340  weight = Min(kMaxBackEdgeWeight,
341  Max(1, distance / kBackEdgeDistanceDivisor));
342  }
343  EmitProfilingCounterDecrement(weight);
344  __ j(positive, &ok, Label::kNear);
345  InterruptStub stub;
346  __ CallStub(&stub);
347  } else {
348  __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
349  __ j(above_equal, &ok, Label::kNear);
350  StackCheckStub stub;
351  __ CallStub(&stub);
352  }
353 
354  // Record a mapping of this PC offset to the OSR id. This is used to find
355  // the AST id from the unoptimized code in order to use it as a key into
356  // the deoptimization input data found in the optimized code.
357  RecordStackCheck(stmt->OsrEntryId());
358 
359  // Loop stack checks can be patched to perform on-stack replacement. In
360  // order to decide whether or not to perform OSR we embed the loop depth
361  // in a test instruction after the call so we can extract it from the OSR
362  // builtin.
363  ASSERT(loop_depth() > 0);
364  __ testl(rax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
365 
366  if (FLAG_count_based_interrupts) {
367  EmitProfilingCounterReset();
368  }
369 
370  __ bind(&ok);
371  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
372  // Record a mapping of the OSR id to this PC. This is used if the OSR
373  // entry becomes the target of a bailout. We don't expect it to be, but
374  // we want it to work if it is.
375  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
376 }
377 
378 
379 void FullCodeGenerator::EmitReturnSequence() {
380  Comment cmnt(masm_, "[ Return sequence");
381  if (return_label_.is_bound()) {
382  __ jmp(&return_label_);
383  } else {
384  __ bind(&return_label_);
385  if (FLAG_trace) {
386  __ push(rax);
387  __ CallRuntime(Runtime::kTraceExit, 1);
388  }
389  if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
390  // Pretend that the exit is a backwards jump to the entry.
391  int weight = 1;
392  if (info_->ShouldSelfOptimize()) {
393  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
394  } else if (FLAG_weighted_back_edges) {
395  int distance = masm_->pc_offset();
396  weight = Min(kMaxBackEdgeWeight,
397  Max(1, distance = kBackEdgeDistanceDivisor));
398  }
399  EmitProfilingCounterDecrement(weight);
400  Label ok;
401  __ j(positive, &ok, Label::kNear);
402  __ push(rax);
403  if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
405  __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
406  } else {
407  InterruptStub stub;
408  __ CallStub(&stub);
409  }
410  __ pop(rax);
411  EmitProfilingCounterReset();
412  __ bind(&ok);
413  }
414 #ifdef DEBUG
415  // Add a label for checking the size of the code used for returning.
416  Label check_exit_codesize;
417  masm_->bind(&check_exit_codesize);
418 #endif
419  CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
420  __ RecordJSReturn();
421  // Do not use the leave instruction here because it is too short to
422  // patch with the code required by the debugger.
423  __ movq(rsp, rbp);
424  __ pop(rbp);
425 
426  int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
427  __ Ret(arguments_bytes, rcx);
428 
429 #ifdef ENABLE_DEBUGGER_SUPPORT
430  // Add padding that will be overwritten by a debugger breakpoint. We
431  // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k"
432  // (3 + 1 + 3).
433  const int kPadding = Assembler::kJSReturnSequenceLength - 7;
434  for (int i = 0; i < kPadding; ++i) {
435  masm_->int3();
436  }
437  // Check that the size of the code used for returning is large enough
438  // for the debugger's requirements.
440  masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
441 #endif
442  }
443 }
444 
445 
446 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
447  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
448 }
449 
450 
451 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
452  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
453  codegen()->GetVar(result_register(), var);
454 }
455 
456 
457 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
458  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
459  MemOperand operand = codegen()->VarOperand(var, result_register());
460  __ push(operand);
461 }
462 
463 
464 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
465  codegen()->GetVar(result_register(), var);
466  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
467  codegen()->DoTest(this);
468 }
469 
470 
471 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
472 }
473 
474 
475 void FullCodeGenerator::AccumulatorValueContext::Plug(
476  Heap::RootListIndex index) const {
477  __ LoadRoot(result_register(), index);
478 }
479 
480 
481 void FullCodeGenerator::StackValueContext::Plug(
482  Heap::RootListIndex index) const {
483  __ PushRoot(index);
484 }
485 
486 
487 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
488  codegen()->PrepareForBailoutBeforeSplit(condition(),
489  true,
490  true_label_,
491  false_label_);
492  if (index == Heap::kUndefinedValueRootIndex ||
493  index == Heap::kNullValueRootIndex ||
494  index == Heap::kFalseValueRootIndex) {
495  if (false_label_ != fall_through_) __ jmp(false_label_);
496  } else if (index == Heap::kTrueValueRootIndex) {
497  if (true_label_ != fall_through_) __ jmp(true_label_);
498  } else {
499  __ LoadRoot(result_register(), index);
500  codegen()->DoTest(this);
501  }
502 }
503 
504 
505 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
506 }
507 
508 
509 void FullCodeGenerator::AccumulatorValueContext::Plug(
510  Handle<Object> lit) const {
511  __ Move(result_register(), lit);
512 }
513 
514 
515 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
516  __ Push(lit);
517 }
518 
519 
520 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
521  codegen()->PrepareForBailoutBeforeSplit(condition(),
522  true,
523  true_label_,
524  false_label_);
525  ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
526  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
527  if (false_label_ != fall_through_) __ jmp(false_label_);
528  } else if (lit->IsTrue() || lit->IsJSObject()) {
529  if (true_label_ != fall_through_) __ jmp(true_label_);
530  } else if (lit->IsString()) {
531  if (String::cast(*lit)->length() == 0) {
532  if (false_label_ != fall_through_) __ jmp(false_label_);
533  } else {
534  if (true_label_ != fall_through_) __ jmp(true_label_);
535  }
536  } else if (lit->IsSmi()) {
537  if (Smi::cast(*lit)->value() == 0) {
538  if (false_label_ != fall_through_) __ jmp(false_label_);
539  } else {
540  if (true_label_ != fall_through_) __ jmp(true_label_);
541  }
542  } else {
543  // For simplicity we always test the accumulator register.
544  __ Move(result_register(), lit);
545  codegen()->DoTest(this);
546  }
547 }
548 
549 
550 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
551  Register reg) const {
552  ASSERT(count > 0);
553  __ Drop(count);
554 }
555 
556 
557 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
558  int count,
559  Register reg) const {
560  ASSERT(count > 0);
561  __ Drop(count);
562  __ Move(result_register(), reg);
563 }
564 
565 
566 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
567  Register reg) const {
568  ASSERT(count > 0);
569  if (count > 1) __ Drop(count - 1);
570  __ movq(Operand(rsp, 0), reg);
571 }
572 
573 
574 void FullCodeGenerator::TestContext::DropAndPlug(int count,
575  Register reg) const {
576  ASSERT(count > 0);
577  // For simplicity we always test the accumulator register.
578  __ Drop(count);
579  __ Move(result_register(), reg);
580  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
581  codegen()->DoTest(this);
582 }
583 
584 
585 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
586  Label* materialize_false) const {
587  ASSERT(materialize_true == materialize_false);
588  __ bind(materialize_true);
589 }
590 
591 
592 void FullCodeGenerator::AccumulatorValueContext::Plug(
593  Label* materialize_true,
594  Label* materialize_false) const {
595  Label done;
596  __ bind(materialize_true);
597  __ Move(result_register(), isolate()->factory()->true_value());
598  __ jmp(&done, Label::kNear);
599  __ bind(materialize_false);
600  __ Move(result_register(), isolate()->factory()->false_value());
601  __ bind(&done);
602 }
603 
604 
605 void FullCodeGenerator::StackValueContext::Plug(
606  Label* materialize_true,
607  Label* materialize_false) const {
608  Label done;
609  __ bind(materialize_true);
610  __ Push(isolate()->factory()->true_value());
611  __ jmp(&done, Label::kNear);
612  __ bind(materialize_false);
613  __ Push(isolate()->factory()->false_value());
614  __ bind(&done);
615 }
616 
617 
618 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
619  Label* materialize_false) const {
620  ASSERT(materialize_true == true_label_);
621  ASSERT(materialize_false == false_label_);
622 }
623 
624 
625 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
626 }
627 
628 
629 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
630  Heap::RootListIndex value_root_index =
631  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
632  __ LoadRoot(result_register(), value_root_index);
633 }
634 
635 
636 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
637  Heap::RootListIndex value_root_index =
638  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
639  __ PushRoot(value_root_index);
640 }
641 
642 
643 void FullCodeGenerator::TestContext::Plug(bool flag) const {
644  codegen()->PrepareForBailoutBeforeSplit(condition(),
645  true,
646  true_label_,
647  false_label_);
648  if (flag) {
649  if (true_label_ != fall_through_) __ jmp(true_label_);
650  } else {
651  if (false_label_ != fall_through_) __ jmp(false_label_);
652  }
653 }
654 
655 
656 void FullCodeGenerator::DoTest(Expression* condition,
657  Label* if_true,
658  Label* if_false,
659  Label* fall_through) {
660  ToBooleanStub stub(result_register());
661  __ push(result_register());
662  __ CallStub(&stub, condition->test_id());
663  __ testq(result_register(), result_register());
664  // The stub returns nonzero for true.
665  Split(not_zero, if_true, if_false, fall_through);
666 }
667 
668 
669 void FullCodeGenerator::Split(Condition cc,
670  Label* if_true,
671  Label* if_false,
672  Label* fall_through) {
673  if (if_false == fall_through) {
674  __ j(cc, if_true);
675  } else if (if_true == fall_through) {
676  __ j(NegateCondition(cc), if_false);
677  } else {
678  __ j(cc, if_true);
679  __ jmp(if_false);
680  }
681 }
682 
683 
684 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
685  ASSERT(var->IsStackAllocated());
686  // Offset is negative because higher indexes are at lower addresses.
687  int offset = -var->index() * kPointerSize;
688  // Adjust by a (parameter or local) base offset.
689  if (var->IsParameter()) {
690  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
691  } else {
693  }
694  return Operand(rbp, offset);
695 }
696 
697 
698 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
699  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
700  if (var->IsContextSlot()) {
701  int context_chain_length = scope()->ContextChainLength(var->scope());
702  __ LoadContext(scratch, context_chain_length);
703  return ContextOperand(scratch, var->index());
704  } else {
705  return StackOperand(var);
706  }
707 }
708 
709 
710 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
711  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
712  MemOperand location = VarOperand(var, dest);
713  __ movq(dest, location);
714 }
715 
716 
717 void FullCodeGenerator::SetVar(Variable* var,
718  Register src,
719  Register scratch0,
720  Register scratch1) {
721  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
722  ASSERT(!scratch0.is(src));
723  ASSERT(!scratch0.is(scratch1));
724  ASSERT(!scratch1.is(src));
725  MemOperand location = VarOperand(var, scratch0);
726  __ movq(location, src);
727 
728  // Emit the write barrier code if the location is in the heap.
729  if (var->IsContextSlot()) {
730  int offset = Context::SlotOffset(var->index());
731  __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
732  }
733 }
734 
735 
736 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
737  bool should_normalize,
738  Label* if_true,
739  Label* if_false) {
740  // Only prepare for bailouts before splits if we're in a test
741  // context. Otherwise, we let the Visit function deal with the
742  // preparation to avoid preparing with the same AST id twice.
743  if (!context()->IsTest() || !info_->IsOptimizable()) return;
744 
745  Label skip;
746  if (should_normalize) __ jmp(&skip, Label::kNear);
747  PrepareForBailout(expr, TOS_REG);
748  if (should_normalize) {
749  __ CompareRoot(rax, Heap::kTrueValueRootIndex);
750  Split(equal, if_true, if_false, NULL);
751  __ bind(&skip);
752  }
753 }
754 
755 
756 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
757  // The variable in the declaration always resides in the current function
758  // context.
759  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
760  if (FLAG_debug_code) {
761  // Check that we're not inside a with or catch context.
763  __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
764  __ Check(not_equal, "Declaration in with context.");
765  __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
766  __ Check(not_equal, "Declaration in catch context.");
767  }
768 }
769 
770 
771 void FullCodeGenerator::VisitVariableDeclaration(
772  VariableDeclaration* declaration) {
773  // If it was not possible to allocate the variable at compile time, we
774  // need to "declare" it at runtime to make sure it actually exists in the
775  // local context.
776  VariableProxy* proxy = declaration->proxy();
777  VariableMode mode = declaration->mode();
778  Variable* variable = proxy->var();
779  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
780  switch (variable->location()) {
782  globals_->Add(variable->name(), zone());
783  globals_->Add(variable->binding_needs_init()
784  ? isolate()->factory()->the_hole_value()
785  : isolate()->factory()->undefined_value(),
786  zone());
787  break;
788 
789  case Variable::PARAMETER:
790  case Variable::LOCAL:
791  if (hole_init) {
792  Comment cmnt(masm_, "[ VariableDeclaration");
793  __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
794  __ movq(StackOperand(variable), kScratchRegister);
795  }
796  break;
797 
798  case Variable::CONTEXT:
799  if (hole_init) {
800  Comment cmnt(masm_, "[ VariableDeclaration");
801  EmitDebugCheckDeclarationContext(variable);
802  __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
803  __ movq(ContextOperand(rsi, variable->index()), kScratchRegister);
804  // No write barrier since the hole value is in old space.
805  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
806  }
807  break;
808 
809  case Variable::LOOKUP: {
810  Comment cmnt(masm_, "[ VariableDeclaration");
811  __ push(rsi);
812  __ Push(variable->name());
813  // Declaration nodes are always introduced in one of four modes.
814  ASSERT(mode == VAR || mode == LET ||
815  mode == CONST || mode == CONST_HARMONY);
816  PropertyAttributes attr =
817  (mode == CONST || mode == CONST_HARMONY) ? READ_ONLY : NONE;
818  __ Push(Smi::FromInt(attr));
819  // Push initial value, if any.
820  // Note: For variables we must not push an initial value (such as
821  // 'undefined') because we may have a (legal) redeclaration and we
822  // must not destroy the current value.
823  if (hole_init) {
824  __ PushRoot(Heap::kTheHoleValueRootIndex);
825  } else {
826  __ Push(Smi::FromInt(0)); // Indicates no initial value.
827  }
828  __ CallRuntime(Runtime::kDeclareContextSlot, 4);
829  break;
830  }
831  }
832 }
833 
834 
835 void FullCodeGenerator::VisitFunctionDeclaration(
836  FunctionDeclaration* declaration) {
837  VariableProxy* proxy = declaration->proxy();
838  Variable* variable = proxy->var();
839  switch (variable->location()) {
840  case Variable::UNALLOCATED: {
841  globals_->Add(variable->name(), zone());
842  Handle<SharedFunctionInfo> function =
843  Compiler::BuildFunctionInfo(declaration->fun(), script());
844  // Check for stack-overflow exception.
845  if (function.is_null()) return SetStackOverflow();
846  globals_->Add(function, zone());
847  break;
848  }
849 
850  case Variable::PARAMETER:
851  case Variable::LOCAL: {
852  Comment cmnt(masm_, "[ FunctionDeclaration");
853  VisitForAccumulatorValue(declaration->fun());
854  __ movq(StackOperand(variable), result_register());
855  break;
856  }
857 
858  case Variable::CONTEXT: {
859  Comment cmnt(masm_, "[ FunctionDeclaration");
860  EmitDebugCheckDeclarationContext(variable);
861  VisitForAccumulatorValue(declaration->fun());
862  __ movq(ContextOperand(rsi, variable->index()), result_register());
863  int offset = Context::SlotOffset(variable->index());
864  // We know that we have written a function, which is not a smi.
865  __ RecordWriteContextSlot(rsi,
866  offset,
867  result_register(),
868  rcx,
872  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
873  break;
874  }
875 
876  case Variable::LOOKUP: {
877  Comment cmnt(masm_, "[ FunctionDeclaration");
878  __ push(rsi);
879  __ Push(variable->name());
880  __ Push(Smi::FromInt(NONE));
881  VisitForStackValue(declaration->fun());
882  __ CallRuntime(Runtime::kDeclareContextSlot, 4);
883  break;
884  }
885  }
886 }
887 
888 
889 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
890  VariableProxy* proxy = declaration->proxy();
891  Variable* variable = proxy->var();
892  Handle<JSModule> instance = declaration->module()->interface()->Instance();
893  ASSERT(!instance.is_null());
894 
895  switch (variable->location()) {
896  case Variable::UNALLOCATED: {
897  Comment cmnt(masm_, "[ ModuleDeclaration");
898  globals_->Add(variable->name(), zone());
899  globals_->Add(instance, zone());
900  Visit(declaration->module());
901  break;
902  }
903 
904  case Variable::CONTEXT: {
905  Comment cmnt(masm_, "[ ModuleDeclaration");
906  EmitDebugCheckDeclarationContext(variable);
907  __ Move(ContextOperand(rsi, variable->index()), instance);
908  Visit(declaration->module());
909  break;
910  }
911 
912  case Variable::PARAMETER:
913  case Variable::LOCAL:
914  case Variable::LOOKUP:
915  UNREACHABLE();
916  }
917 }
918 
919 
920 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
921  VariableProxy* proxy = declaration->proxy();
922  Variable* variable = proxy->var();
923  switch (variable->location()) {
925  // TODO(rossberg)
926  break;
927 
928  case Variable::CONTEXT: {
929  Comment cmnt(masm_, "[ ImportDeclaration");
930  EmitDebugCheckDeclarationContext(variable);
931  // TODO(rossberg)
932  break;
933  }
934 
935  case Variable::PARAMETER:
936  case Variable::LOCAL:
937  case Variable::LOOKUP:
938  UNREACHABLE();
939  }
940 }
941 
942 
943 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
944  // TODO(rossberg)
945 }
946 
947 
948 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
949  // Call the runtime to declare the globals.
950  __ push(rsi); // The context is the first argument.
951  __ Push(pairs);
952  __ Push(Smi::FromInt(DeclareGlobalsFlags()));
953  __ CallRuntime(Runtime::kDeclareGlobals, 3);
954  // Return value is ignored.
955 }
956 
957 
958 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
959  Comment cmnt(masm_, "[ SwitchStatement");
960  Breakable nested_statement(this, stmt);
961  SetStatementPosition(stmt);
962 
963  // Keep the switch value on the stack until a case matches.
964  VisitForStackValue(stmt->tag());
965  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
966 
967  ZoneList<CaseClause*>* clauses = stmt->cases();
968  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
969 
970  Label next_test; // Recycled for each test.
971  // Compile all the tests with branches to their bodies.
972  for (int i = 0; i < clauses->length(); i++) {
973  CaseClause* clause = clauses->at(i);
974  clause->body_target()->Unuse();
975 
976  // The default is not a test, but remember it as final fall through.
977  if (clause->is_default()) {
978  default_clause = clause;
979  continue;
980  }
981 
982  Comment cmnt(masm_, "[ Case comparison");
983  __ bind(&next_test);
984  next_test.Unuse();
985 
986  // Compile the label expression.
987  VisitForAccumulatorValue(clause->label());
988 
989  // Perform the comparison as if via '==='.
990  __ movq(rdx, Operand(rsp, 0)); // Switch value.
991  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
992  JumpPatchSite patch_site(masm_);
993  if (inline_smi_code) {
994  Label slow_case;
995  __ movq(rcx, rdx);
996  __ or_(rcx, rax);
997  patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
998 
999  __ cmpq(rdx, rax);
1000  __ j(not_equal, &next_test);
1001  __ Drop(1); // Switch value is no longer needed.
1002  __ jmp(clause->body_target());
1003  __ bind(&slow_case);
1004  }
1005 
1006  // Record position before stub call for type feedback.
1007  SetSourcePosition(clause->position());
1008  Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
1009  CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1010  patch_site.EmitPatchInfo();
1011 
1012  __ testq(rax, rax);
1013  __ j(not_equal, &next_test);
1014  __ Drop(1); // Switch value is no longer needed.
1015  __ jmp(clause->body_target());
1016  }
1017 
1018  // Discard the test value and jump to the default if present, otherwise to
1019  // the end of the statement.
1020  __ bind(&next_test);
1021  __ Drop(1); // Switch value is no longer needed.
1022  if (default_clause == NULL) {
1023  __ jmp(nested_statement.break_label());
1024  } else {
1025  __ jmp(default_clause->body_target());
1026  }
1027 
1028  // Compile all the case bodies.
1029  for (int i = 0; i < clauses->length(); i++) {
1030  Comment cmnt(masm_, "[ Case body");
1031  CaseClause* clause = clauses->at(i);
1032  __ bind(clause->body_target());
1033  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1034  VisitStatements(clause->statements());
1035  }
1036 
1037  __ bind(nested_statement.break_label());
1038  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1039 }
1040 
1041 
1042 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1043  Comment cmnt(masm_, "[ ForInStatement");
1044  SetStatementPosition(stmt);
1045 
1046  Label loop, exit;
1047  ForIn loop_statement(this, stmt);
1048  increment_loop_depth();
1049 
1050  // Get the object to enumerate over. Both SpiderMonkey and JSC
1051  // ignore null and undefined in contrast to the specification; see
1052  // ECMA-262 section 12.6.4.
1053  VisitForAccumulatorValue(stmt->enumerable());
1054  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1055  __ j(equal, &exit);
1056  Register null_value = rdi;
1057  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1058  __ cmpq(rax, null_value);
1059  __ j(equal, &exit);
1060 
1061  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1062 
1063  // Convert the object to a JS object.
1064  Label convert, done_convert;
1065  __ JumpIfSmi(rax, &convert);
1066  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1067  __ j(above_equal, &done_convert);
1068  __ bind(&convert);
1069  __ push(rax);
1070  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1071  __ bind(&done_convert);
1072  __ push(rax);
1073 
1074  // Check for proxies.
1075  Label call_runtime;
1077  __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
1078  __ j(below_equal, &call_runtime);
1079 
1080  // Check cache validity in generated code. This is a fast case for
1081  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1082  // guarantee cache validity, call the runtime system to check cache
1083  // validity or get the property names in a fixed array.
1084  __ CheckEnumCache(null_value, &call_runtime);
1085 
1086  // The enum cache is valid. Load the map of the object being
1087  // iterated over and use the cache for the iteration.
1088  Label use_cache;
1090  __ jmp(&use_cache, Label::kNear);
1091 
1092  // Get the set of properties to enumerate.
1093  __ bind(&call_runtime);
1094  __ push(rax); // Duplicate the enumerable object on the stack.
1095  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1096 
1097  // If we got a map from the runtime call, we can do a fast
1098  // modification check. Otherwise, we got a fixed array, and we have
1099  // to do a slow check.
1100  Label fixed_array;
1101  __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1102  Heap::kMetaMapRootIndex);
1103  __ j(not_equal, &fixed_array, Label::kNear);
1104 
1105  // We got a map in register rax. Get the enumeration cache from it.
1106  __ bind(&use_cache);
1107  __ LoadInstanceDescriptors(rax, rcx);
1110 
1111  // Set up the four remaining stack slots.
1112  __ push(rax); // Map.
1113  __ push(rdx); // Enumeration cache.
1115  __ push(rax); // Enumeration cache length (as smi).
1116  __ Push(Smi::FromInt(0)); // Initial index.
1117  __ jmp(&loop);
1118 
1119  // We got a fixed array in register rax. Iterate through that.
1120  Label non_proxy;
1121  __ bind(&fixed_array);
1122 
1123  Handle<JSGlobalPropertyCell> cell =
1124  isolate()->factory()->NewJSGlobalPropertyCell(
1125  Handle<Object>(
1127  RecordTypeFeedbackCell(stmt->PrepareId(), cell);
1128  __ LoadHeapObject(rbx, cell);
1131 
1132  __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
1133  __ movq(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
1135  __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
1136  __ j(above, &non_proxy);
1137  __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy
1138  __ bind(&non_proxy);
1139  __ push(rbx); // Smi
1140  __ push(rax); // Array
1142  __ push(rax); // Fixed array length (as smi).
1143  __ Push(Smi::FromInt(0)); // Initial index.
1144 
1145  // Generate code for doing the condition check.
1146  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1147  __ bind(&loop);
1148  __ movq(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
1149  __ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
1150  __ j(above_equal, loop_statement.break_label());
1151 
1152  // Get the current entry of the array into register rbx.
1153  __ movq(rbx, Operand(rsp, 2 * kPointerSize));
1154  SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1155  __ movq(rbx, FieldOperand(rbx,
1156  index.reg,
1157  index.scale,
1159 
1160  // Get the expected map from the stack or a smi in the
1161  // permanent slow case into register rdx.
1162  __ movq(rdx, Operand(rsp, 3 * kPointerSize));
1163 
1164  // Check if the expected map still matches that of the enumerable.
1165  // If not, we may have to filter the key.
1166  Label update_each;
1167  __ movq(rcx, Operand(rsp, 4 * kPointerSize));
1169  __ j(equal, &update_each, Label::kNear);
1170 
1171  // For proxies, no filtering is done.
1172  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1173  __ Cmp(rdx, Smi::FromInt(0));
1174  __ j(equal, &update_each, Label::kNear);
1175 
1176  // Convert the entry to a string or null if it isn't a property
1177  // anymore. If the property has been removed while iterating, we
1178  // just skip it.
1179  __ push(rcx); // Enumerable.
1180  __ push(rbx); // Current entry.
1181  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1182  __ Cmp(rax, Smi::FromInt(0));
1183  __ j(equal, loop_statement.continue_label());
1184  __ movq(rbx, rax);
1185 
1186  // Update the 'each' property or variable from the possibly filtered
1187  // entry in register rbx.
1188  __ bind(&update_each);
1189  __ movq(result_register(), rbx);
1190  // Perform the assignment as if via '='.
1191  { EffectContext context(this);
1192  EmitAssignment(stmt->each());
1193  }
1194 
1195  // Generate code for the body of the loop.
1196  Visit(stmt->body());
1197 
1198  // Generate code for going to the next element by incrementing the
1199  // index (smi) stored on top of the stack.
1200  __ bind(loop_statement.continue_label());
1201  __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1202 
1203  EmitStackCheck(stmt, &loop);
1204  __ jmp(&loop);
1205 
1206  // Remove the pointers stored on the stack.
1207  __ bind(loop_statement.break_label());
1208  __ addq(rsp, Immediate(5 * kPointerSize));
1209 
1210  // Exit and decrement the loop depth.
1211  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1212  __ bind(&exit);
1213  decrement_loop_depth();
1214 }
1215 
1216 
1217 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1218  bool pretenure) {
1219  // Use the fast case closure allocation code that allocates in new
1220  // space for nested functions that don't need literals cloning. If
1221  // we're running with the --always-opt or the --prepare-always-opt
1222  // flag, we need to use the runtime function so that the new function
1223  // we are creating here gets a chance to have its code optimized and
1224  // doesn't just get a copy of the existing unoptimized code.
1225  if (!FLAG_always_opt &&
1226  !FLAG_prepare_always_opt &&
1227  !pretenure &&
1228  scope()->is_function_scope() &&
1229  info->num_literals() == 0) {
1230  FastNewClosureStub stub(info->language_mode());
1231  __ Push(info);
1232  __ CallStub(&stub);
1233  } else {
1234  __ push(rsi);
1235  __ Push(info);
1236  __ Push(pretenure
1237  ? isolate()->factory()->true_value()
1238  : isolate()->factory()->false_value());
1239  __ CallRuntime(Runtime::kNewClosure, 3);
1240  }
1241  context()->Plug(rax);
1242 }
1243 
1244 
1245 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1246  Comment cmnt(masm_, "[ VariableProxy");
1247  EmitVariableLoad(expr);
1248 }
1249 
1250 
1251 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1252  TypeofState typeof_state,
1253  Label* slow) {
1254  Register context = rsi;
1255  Register temp = rdx;
1256 
1257  Scope* s = scope();
1258  while (s != NULL) {
1259  if (s->num_heap_slots() > 0) {
1260  if (s->calls_non_strict_eval()) {
1261  // Check that extension is NULL.
1263  Immediate(0));
1264  __ j(not_equal, slow);
1265  }
1266  // Load next context in chain.
1267  __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1268  // Walk the rest of the chain without clobbering rsi.
1269  context = temp;
1270  }
1271  // If no outer scope calls eval, we do not need to check more
1272  // context extensions. If we have reached an eval scope, we check
1273  // all extensions from this point.
1274  if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1275  s = s->outer_scope();
1276  }
1277 
1278  if (s != NULL && s->is_eval_scope()) {
1279  // Loop up the context chain. There is no frame effect so it is
1280  // safe to use raw labels here.
1281  Label next, fast;
1282  if (!context.is(temp)) {
1283  __ movq(temp, context);
1284  }
1285  // Load map for comparison into register, outside loop.
1286  __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex);
1287  __ bind(&next);
1288  // Terminate at global context.
1290  __ j(equal, &fast, Label::kNear);
1291  // Check that extension is NULL.
1292  __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1293  __ j(not_equal, slow);
1294  // Load next context in chain.
1295  __ movq(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1296  __ jmp(&next);
1297  __ bind(&fast);
1298  }
1299 
1300  // All extension objects were empty and it is safe to use a global
1301  // load IC call.
1302  __ movq(rax, GlobalObjectOperand());
1303  __ Move(rcx, var->name());
1304  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1305  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1306  ? RelocInfo::CODE_TARGET
1307  : RelocInfo::CODE_TARGET_CONTEXT;
1308  CallIC(ic, mode);
1309 }
1310 
1311 
1312 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1313  Label* slow) {
1314  ASSERT(var->IsContextSlot());
1315  Register context = rsi;
1316  Register temp = rbx;
1317 
1318  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1319  if (s->num_heap_slots() > 0) {
1320  if (s->calls_non_strict_eval()) {
1321  // Check that extension is NULL.
1323  Immediate(0));
1324  __ j(not_equal, slow);
1325  }
1326  __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1327  // Walk the rest of the chain without clobbering rsi.
1328  context = temp;
1329  }
1330  }
1331  // Check that last extension is NULL.
1332  __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1333  __ j(not_equal, slow);
1334 
1335  // This function is used only for loads, not stores, so it's safe to
1336  // return an rsi-based operand (the write barrier cannot be allowed to
1337  // destroy the rsi register).
1338  return ContextOperand(context, var->index());
1339 }
1340 
1341 
1342 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1343  TypeofState typeof_state,
1344  Label* slow,
1345  Label* done) {
1346  // Generate fast-case code for variables that might be shadowed by
1347  // eval-introduced variables. Eval is used a lot without
1348  // introducing variables. In those cases, we do not want to
1349  // perform a runtime call for all variables in the scope
1350  // containing the eval.
1351  if (var->mode() == DYNAMIC_GLOBAL) {
1352  EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1353  __ jmp(done);
1354  } else if (var->mode() == DYNAMIC_LOCAL) {
1355  Variable* local = var->local_if_not_shadowed();
1356  __ movq(rax, ContextSlotOperandCheckExtensions(local, slow));
1357  if (local->mode() == CONST ||
1358  local->mode() == CONST_HARMONY ||
1359  local->mode() == LET) {
1360  __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1361  __ j(not_equal, done);
1362  if (local->mode() == CONST) {
1363  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1364  } else { // LET || CONST_HARMONY
1365  __ Push(var->name());
1366  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1367  }
1368  }
1369  __ jmp(done);
1370  }
1371 }
1372 
1373 
1374 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1375  // Record position before possible IC call.
1376  SetSourcePosition(proxy->position());
1377  Variable* var = proxy->var();
1378 
1379  // Three cases: global variables, lookup variables, and all other types of
1380  // variables.
1381  switch (var->location()) {
1382  case Variable::UNALLOCATED: {
1383  Comment cmnt(masm_, "Global variable");
1384  // Use inline caching. Variable name is passed in rcx and the global
1385  // object on the stack.
1386  __ Move(rcx, var->name());
1387  __ movq(rax, GlobalObjectOperand());
1388  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1389  CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1390  context()->Plug(rax);
1391  break;
1392  }
1393 
1394  case Variable::PARAMETER:
1395  case Variable::LOCAL:
1396  case Variable::CONTEXT: {
1397  Comment cmnt(masm_, var->IsContextSlot() ? "Context slot" : "Stack slot");
1398  if (var->binding_needs_init()) {
1399  // var->scope() may be NULL when the proxy is located in eval code and
1400  // refers to a potential outside binding. Currently those bindings are
1401  // always looked up dynamically, i.e. in that case
1402  // var->location() == LOOKUP.
1403  // always holds.
1404  ASSERT(var->scope() != NULL);
1405 
1406  // Check if the binding really needs an initialization check. The check
1407  // can be skipped in the following situation: we have a LET or CONST
1408  // binding in harmony mode, both the Variable and the VariableProxy have
1409  // the same declaration scope (i.e. they are both in global code, in the
1410  // same function or in the same eval code) and the VariableProxy is in
1411  // the source physically located after the initializer of the variable.
1412  //
1413  // We cannot skip any initialization checks for CONST in non-harmony
1414  // mode because const variables may be declared but never initialized:
1415  // if (false) { const x; }; var y = x;
1416  //
1417  // The condition on the declaration scopes is a conservative check for
1418  // nested functions that access a binding and are called before the
1419  // binding is initialized:
1420  // function() { f(); let x = 1; function f() { x = 2; } }
1421  //
1422  bool skip_init_check;
1423  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1424  skip_init_check = false;
1425  } else {
1426  // Check that we always have valid source position.
1427  ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1428  ASSERT(proxy->position() != RelocInfo::kNoPosition);
1429  skip_init_check = var->mode() != CONST &&
1430  var->initializer_position() < proxy->position();
1431  }
1432 
1433  if (!skip_init_check) {
1434  // Let and const need a read barrier.
1435  Label done;
1436  GetVar(rax, var);
1437  __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1438  __ j(not_equal, &done, Label::kNear);
1439  if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1440  // Throw a reference error when using an uninitialized let/const
1441  // binding in harmony mode.
1442  __ Push(var->name());
1443  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1444  } else {
1445  // Uninitalized const bindings outside of harmony mode are unholed.
1446  ASSERT(var->mode() == CONST);
1447  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1448  }
1449  __ bind(&done);
1450  context()->Plug(rax);
1451  break;
1452  }
1453  }
1454  context()->Plug(var);
1455  break;
1456  }
1457 
1458  case Variable::LOOKUP: {
1459  Label done, slow;
1460  // Generate code for loading from variables potentially shadowed
1461  // by eval-introduced variables.
1462  EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1463  __ bind(&slow);
1464  Comment cmnt(masm_, "Lookup slot");
1465  __ push(rsi); // Context.
1466  __ Push(var->name());
1467  __ CallRuntime(Runtime::kLoadContextSlot, 2);
1468  __ bind(&done);
1469  context()->Plug(rax);
1470  break;
1471  }
1472  }
1473 }
1474 
1475 
1476 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1477  Comment cmnt(masm_, "[ RegExpLiteral");
1478  Label materialized;
1479  // Registers will be used as follows:
1480  // rdi = JS function.
1481  // rcx = literals array.
1482  // rbx = regexp literal.
1483  // rax = regexp literal clone.
1486  int literal_offset =
1487  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1488  __ movq(rbx, FieldOperand(rcx, literal_offset));
1489  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1490  __ j(not_equal, &materialized, Label::kNear);
1491 
1492  // Create regexp literal using runtime function
1493  // Result will be in rax.
1494  __ push(rcx);
1495  __ Push(Smi::FromInt(expr->literal_index()));
1496  __ Push(expr->pattern());
1497  __ Push(expr->flags());
1498  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1499  __ movq(rbx, rax);
1500 
1501  __ bind(&materialized);
1503  Label allocated, runtime_allocate;
1504  __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1505  __ jmp(&allocated);
1506 
1507  __ bind(&runtime_allocate);
1508  __ push(rbx);
1509  __ Push(Smi::FromInt(size));
1510  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1511  __ pop(rbx);
1512 
1513  __ bind(&allocated);
1514  // Copy the content into the newly allocated memory.
1515  // (Unroll copy loop once for better throughput).
1516  for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1517  __ movq(rdx, FieldOperand(rbx, i));
1518  __ movq(rcx, FieldOperand(rbx, i + kPointerSize));
1519  __ movq(FieldOperand(rax, i), rdx);
1520  __ movq(FieldOperand(rax, i + kPointerSize), rcx);
1521  }
1522  if ((size % (2 * kPointerSize)) != 0) {
1523  __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
1524  __ movq(FieldOperand(rax, size - kPointerSize), rdx);
1525  }
1526  context()->Plug(rax);
1527 }
1528 
1529 
1530 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1531  if (expression == NULL) {
1532  __ PushRoot(Heap::kNullValueRootIndex);
1533  } else {
1534  VisitForStackValue(expression);
1535  }
1536 }
1537 
1538 
1539 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1540  Comment cmnt(masm_, "[ ObjectLiteral");
1541  Handle<FixedArray> constant_properties = expr->constant_properties();
1544  __ Push(Smi::FromInt(expr->literal_index()));
1545  __ Push(constant_properties);
1546  int flags = expr->fast_elements()
1549  flags |= expr->has_function()
1552  __ Push(Smi::FromInt(flags));
1553  int properties_count = constant_properties->length() / 2;
1554  if (expr->depth() > 1) {
1555  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1556  } else if (flags != ObjectLiteral::kFastElements ||
1558  __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1559  } else {
1560  FastCloneShallowObjectStub stub(properties_count);
1561  __ CallStub(&stub);
1562  }
1563 
1564  // If result_saved is true the result is on top of the stack. If
1565  // result_saved is false the result is in rax.
1566  bool result_saved = false;
1567 
1568  // Mark all computed expressions that are bound to a key that
1569  // is shadowed by a later occurrence of the same key. For the
1570  // marked expressions, no store code is emitted.
1571  expr->CalculateEmitStore(zone());
1572 
1573  AccessorTable accessor_table(isolate()->zone());
1574  for (int i = 0; i < expr->properties()->length(); i++) {
1575  ObjectLiteral::Property* property = expr->properties()->at(i);
1576  if (property->IsCompileTimeValue()) continue;
1577 
1578  Literal* key = property->key();
1579  Expression* value = property->value();
1580  if (!result_saved) {
1581  __ push(rax); // Save result on the stack
1582  result_saved = true;
1583  }
1584  switch (property->kind()) {
1586  UNREACHABLE();
1589  // Fall through.
1591  if (key->handle()->IsSymbol()) {
1592  if (property->emit_store()) {
1593  VisitForAccumulatorValue(value);
1594  __ Move(rcx, key->handle());
1595  __ movq(rdx, Operand(rsp, 0));
1596  Handle<Code> ic = is_classic_mode()
1597  ? isolate()->builtins()->StoreIC_Initialize()
1598  : isolate()->builtins()->StoreIC_Initialize_Strict();
1599  CallIC(ic, RelocInfo::CODE_TARGET, key->id());
1600  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1601  } else {
1602  VisitForEffect(value);
1603  }
1604  break;
1605  }
1606  // Fall through.
1608  __ push(Operand(rsp, 0)); // Duplicate receiver.
1609  VisitForStackValue(key);
1610  VisitForStackValue(value);
1611  if (property->emit_store()) {
1612  __ Push(Smi::FromInt(NONE)); // PropertyAttributes
1613  __ CallRuntime(Runtime::kSetProperty, 4);
1614  } else {
1615  __ Drop(3);
1616  }
1617  break;
1619  accessor_table.lookup(key)->second->getter = value;
1620  break;
1622  accessor_table.lookup(key)->second->setter = value;
1623  break;
1624  }
1625  }
1626 
1627  // Emit code to define accessors, using only a single call to the runtime for
1628  // each pair of corresponding getters and setters.
1629  for (AccessorTable::Iterator it = accessor_table.begin();
1630  it != accessor_table.end();
1631  ++it) {
1632  __ push(Operand(rsp, 0)); // Duplicate receiver.
1633  VisitForStackValue(it->first);
1634  EmitAccessor(it->second->getter);
1635  EmitAccessor(it->second->setter);
1636  __ Push(Smi::FromInt(NONE));
1637  __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1638  }
1639 
1640  if (expr->has_function()) {
1641  ASSERT(result_saved);
1642  __ push(Operand(rsp, 0));
1643  __ CallRuntime(Runtime::kToFastProperties, 1);
1644  }
1645 
1646  if (result_saved) {
1647  context()->PlugTOS();
1648  } else {
1649  context()->Plug(rax);
1650  }
1651 }
1652 
1653 
1654 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1655  Comment cmnt(masm_, "[ ArrayLiteral");
1656 
1657  ZoneList<Expression*>* subexprs = expr->values();
1658  int length = subexprs->length();
1659  Handle<FixedArray> constant_elements = expr->constant_elements();
1660  ASSERT_EQ(2, constant_elements->length());
1661  ElementsKind constant_elements_kind =
1662  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1663  bool has_constant_fast_elements =
1664  IsFastObjectElementsKind(constant_elements_kind);
1665  Handle<FixedArrayBase> constant_elements_values(
1666  FixedArrayBase::cast(constant_elements->get(1)));
1667 
1670  __ Push(Smi::FromInt(expr->literal_index()));
1671  __ Push(constant_elements);
1672  Heap* heap = isolate()->heap();
1673  if (has_constant_fast_elements &&
1674  constant_elements_values->map() == heap->fixed_cow_array_map()) {
1675  // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1676  // change, so it's possible to specialize the stub in advance.
1677  __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1678  FastCloneShallowArrayStub stub(
1680  length);
1681  __ CallStub(&stub);
1682  } else if (expr->depth() > 1) {
1683  __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1685  __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1686  } else {
1687  ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1688  FLAG_smi_only_arrays);
1689  // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1690  // change, so it's possible to specialize the stub in advance.
1691  FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
1694  FastCloneShallowArrayStub stub(mode, length);
1695  __ CallStub(&stub);
1696  }
1697 
1698  bool result_saved = false; // Is the result saved to the stack?
1699 
1700  // Emit code to evaluate all the non-constant subexpressions and to store
1701  // them into the newly cloned array.
1702  for (int i = 0; i < length; i++) {
1703  Expression* subexpr = subexprs->at(i);
1704  // If the subexpression is a literal or a simple materialized literal it
1705  // is already set in the cloned array.
1706  if (subexpr->AsLiteral() != NULL ||
1708  continue;
1709  }
1710 
1711  if (!result_saved) {
1712  __ push(rax);
1713  result_saved = true;
1714  }
1715  VisitForAccumulatorValue(subexpr);
1716 
1717  if (IsFastObjectElementsKind(constant_elements_kind)) {
1718  // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1719  // cannot transition and don't need to call the runtime stub.
1720  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1721  __ movq(rbx, Operand(rsp, 0)); // Copy of array literal.
1723  // Store the subexpression value in the array's elements.
1724  __ movq(FieldOperand(rbx, offset), result_register());
1725  // Update the write barrier for the array store.
1726  __ RecordWriteField(rbx, offset, result_register(), rcx,
1730  } else {
1731  // Store the subexpression value in the array's elements.
1732  __ movq(rbx, Operand(rsp, 0)); // Copy of array literal.
1734  __ Move(rcx, Smi::FromInt(i));
1735  __ Move(rdx, Smi::FromInt(expr->literal_index()));
1736  StoreArrayLiteralElementStub stub;
1737  __ CallStub(&stub);
1738  }
1739 
1740  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1741  }
1742 
1743  if (result_saved) {
1744  context()->PlugTOS();
1745  } else {
1746  context()->Plug(rax);
1747  }
1748 }
1749 
1750 
1751 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1752  Comment cmnt(masm_, "[ Assignment");
1753  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1754  // on the left-hand side.
1755  if (!expr->target()->IsValidLeftHandSide()) {
1756  VisitForEffect(expr->target());
1757  return;
1758  }
1759 
1760  // Left-hand side can only be a property, a global or a (parameter or local)
1761  // slot.
1762  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1763  LhsKind assign_type = VARIABLE;
1764  Property* property = expr->target()->AsProperty();
1765  if (property != NULL) {
1766  assign_type = (property->key()->IsPropertyName())
1767  ? NAMED_PROPERTY
1768  : KEYED_PROPERTY;
1769  }
1770 
1771  // Evaluate LHS expression.
1772  switch (assign_type) {
1773  case VARIABLE:
1774  // Nothing to do here.
1775  break;
1776  case NAMED_PROPERTY:
1777  if (expr->is_compound()) {
1778  // We need the receiver both on the stack and in the accumulator.
1779  VisitForAccumulatorValue(property->obj());
1780  __ push(result_register());
1781  } else {
1782  VisitForStackValue(property->obj());
1783  }
1784  break;
1785  case KEYED_PROPERTY: {
1786  if (expr->is_compound()) {
1787  VisitForStackValue(property->obj());
1788  VisitForAccumulatorValue(property->key());
1789  __ movq(rdx, Operand(rsp, 0));
1790  __ push(rax);
1791  } else {
1792  VisitForStackValue(property->obj());
1793  VisitForStackValue(property->key());
1794  }
1795  break;
1796  }
1797  }
1798 
1799  // For compound assignments we need another deoptimization point after the
1800  // variable/property load.
1801  if (expr->is_compound()) {
1802  { AccumulatorValueContext context(this);
1803  switch (assign_type) {
1804  case VARIABLE:
1805  EmitVariableLoad(expr->target()->AsVariableProxy());
1806  PrepareForBailout(expr->target(), TOS_REG);
1807  break;
1808  case NAMED_PROPERTY:
1809  EmitNamedPropertyLoad(property);
1810  PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1811  break;
1812  case KEYED_PROPERTY:
1813  EmitKeyedPropertyLoad(property);
1814  PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1815  break;
1816  }
1817  }
1818 
1819  Token::Value op = expr->binary_op();
1820  __ push(rax); // Left operand goes on the stack.
1821  VisitForAccumulatorValue(expr->value());
1822 
1823  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1824  ? OVERWRITE_RIGHT
1825  : NO_OVERWRITE;
1826  SetSourcePosition(expr->position() + 1);
1827  AccumulatorValueContext context(this);
1828  if (ShouldInlineSmiCase(op)) {
1829  EmitInlineSmiBinaryOp(expr->binary_operation(),
1830  op,
1831  mode,
1832  expr->target(),
1833  expr->value());
1834  } else {
1835  EmitBinaryOp(expr->binary_operation(), op, mode);
1836  }
1837  // Deoptimization point in case the binary operation may have side effects.
1838  PrepareForBailout(expr->binary_operation(), TOS_REG);
1839  } else {
1840  VisitForAccumulatorValue(expr->value());
1841  }
1842 
1843  // Record source position before possible IC call.
1844  SetSourcePosition(expr->position());
1845 
1846  // Store the value.
1847  switch (assign_type) {
1848  case VARIABLE:
1849  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1850  expr->op());
1851  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1852  context()->Plug(rax);
1853  break;
1854  case NAMED_PROPERTY:
1855  EmitNamedPropertyAssignment(expr);
1856  break;
1857  case KEYED_PROPERTY:
1858  EmitKeyedPropertyAssignment(expr);
1859  break;
1860  }
1861 }
1862 
1863 
1864 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1865  SetSourcePosition(prop->position());
1866  Literal* key = prop->key()->AsLiteral();
1867  __ Move(rcx, key->handle());
1868  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1869  CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1870 }
1871 
1872 
1873 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1874  SetSourcePosition(prop->position());
1875  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1876  CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1877 }
1878 
1879 
1880 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1881  Token::Value op,
1882  OverwriteMode mode,
1883  Expression* left,
1884  Expression* right) {
1885  // Do combined smi check of the operands. Left operand is on the
1886  // stack (popped into rdx). Right operand is in rax but moved into
1887  // rcx to make the shifts easier.
1888  Label done, stub_call, smi_case;
1889  __ pop(rdx);
1890  __ movq(rcx, rax);
1891  __ or_(rax, rdx);
1892  JumpPatchSite patch_site(masm_);
1893  patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
1894 
1895  __ bind(&stub_call);
1896  __ movq(rax, rcx);
1897  BinaryOpStub stub(op, mode);
1898  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1899  patch_site.EmitPatchInfo();
1900  __ jmp(&done, Label::kNear);
1901 
1902  __ bind(&smi_case);
1903  switch (op) {
1904  case Token::SAR:
1905  __ SmiShiftArithmeticRight(rax, rdx, rcx);
1906  break;
1907  case Token::SHL:
1908  __ SmiShiftLeft(rax, rdx, rcx);
1909  break;
1910  case Token::SHR:
1911  __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
1912  break;
1913  case Token::ADD:
1914  __ SmiAdd(rax, rdx, rcx, &stub_call);
1915  break;
1916  case Token::SUB:
1917  __ SmiSub(rax, rdx, rcx, &stub_call);
1918  break;
1919  case Token::MUL:
1920  __ SmiMul(rax, rdx, rcx, &stub_call);
1921  break;
1922  case Token::BIT_OR:
1923  __ SmiOr(rax, rdx, rcx);
1924  break;
1925  case Token::BIT_AND:
1926  __ SmiAnd(rax, rdx, rcx);
1927  break;
1928  case Token::BIT_XOR:
1929  __ SmiXor(rax, rdx, rcx);
1930  break;
1931  default:
1932  UNREACHABLE();
1933  break;
1934  }
1935 
1936  __ bind(&done);
1937  context()->Plug(rax);
1938 }
1939 
1940 
1941 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1942  Token::Value op,
1943  OverwriteMode mode) {
1944  __ pop(rdx);
1945  BinaryOpStub stub(op, mode);
1946  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1947  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1948  patch_site.EmitPatchInfo();
1949  context()->Plug(rax);
1950 }
1951 
1952 
1953 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1954  // Invalid left-hand sides are rewritten to have a 'throw
1955  // ReferenceError' on the left-hand side.
1956  if (!expr->IsValidLeftHandSide()) {
1957  VisitForEffect(expr);
1958  return;
1959  }
1960 
1961  // Left-hand side can only be a property, a global or a (parameter or local)
1962  // slot.
1963  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1964  LhsKind assign_type = VARIABLE;
1965  Property* prop = expr->AsProperty();
1966  if (prop != NULL) {
1967  assign_type = (prop->key()->IsPropertyName())
1968  ? NAMED_PROPERTY
1969  : KEYED_PROPERTY;
1970  }
1971 
1972  switch (assign_type) {
1973  case VARIABLE: {
1974  Variable* var = expr->AsVariableProxy()->var();
1975  EffectContext context(this);
1976  EmitVariableAssignment(var, Token::ASSIGN);
1977  break;
1978  }
1979  case NAMED_PROPERTY: {
1980  __ push(rax); // Preserve value.
1981  VisitForAccumulatorValue(prop->obj());
1982  __ movq(rdx, rax);
1983  __ pop(rax); // Restore value.
1984  __ Move(rcx, prop->key()->AsLiteral()->handle());
1985  Handle<Code> ic = is_classic_mode()
1986  ? isolate()->builtins()->StoreIC_Initialize()
1987  : isolate()->builtins()->StoreIC_Initialize_Strict();
1988  CallIC(ic);
1989  break;
1990  }
1991  case KEYED_PROPERTY: {
1992  __ push(rax); // Preserve value.
1993  VisitForStackValue(prop->obj());
1994  VisitForAccumulatorValue(prop->key());
1995  __ movq(rcx, rax);
1996  __ pop(rdx);
1997  __ pop(rax); // Restore value.
1998  Handle<Code> ic = is_classic_mode()
1999  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2000  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2001  CallIC(ic);
2002  break;
2003  }
2004  }
2005  context()->Plug(rax);
2006 }
2007 
2008 
2009 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2010  Token::Value op) {
2011  if (var->IsUnallocated()) {
2012  // Global var, const, or let.
2013  __ Move(rcx, var->name());
2014  __ movq(rdx, GlobalObjectOperand());
2015  Handle<Code> ic = is_classic_mode()
2016  ? isolate()->builtins()->StoreIC_Initialize()
2017  : isolate()->builtins()->StoreIC_Initialize_Strict();
2018  CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2019  } else if (op == Token::INIT_CONST) {
2020  // Const initializers need a write barrier.
2021  ASSERT(!var->IsParameter()); // No const parameters.
2022  if (var->IsStackLocal()) {
2023  Label skip;
2024  __ movq(rdx, StackOperand(var));
2025  __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2026  __ j(not_equal, &skip);
2027  __ movq(StackOperand(var), rax);
2028  __ bind(&skip);
2029  } else {
2030  ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2031  // Like var declarations, const declarations are hoisted to function
2032  // scope. However, unlike var initializers, const initializers are
2033  // able to drill a hole to that function context, even from inside a
2034  // 'with' context. We thus bypass the normal static scope lookup for
2035  // var->IsContextSlot().
2036  __ push(rax);
2037  __ push(rsi);
2038  __ Push(var->name());
2039  __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2040  }
2041 
2042  } else if (var->mode() == LET && op != Token::INIT_LET) {
2043  // Non-initializing assignment to let variable needs a write barrier.
2044  if (var->IsLookupSlot()) {
2045  __ push(rax); // Value.
2046  __ push(rsi); // Context.
2047  __ Push(var->name());
2048  __ Push(Smi::FromInt(language_mode()));
2049  __ CallRuntime(Runtime::kStoreContextSlot, 4);
2050  } else {
2051  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2052  Label assign;
2053  MemOperand location = VarOperand(var, rcx);
2054  __ movq(rdx, location);
2055  __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2056  __ j(not_equal, &assign, Label::kNear);
2057  __ Push(var->name());
2058  __ CallRuntime(Runtime::kThrowReferenceError, 1);
2059  __ bind(&assign);
2060  __ movq(location, rax);
2061  if (var->IsContextSlot()) {
2062  __ movq(rdx, rax);
2063  __ RecordWriteContextSlot(
2064  rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2065  }
2066  }
2067 
2068  } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2069  // Assignment to var or initializing assignment to let/const
2070  // in harmony mode.
2071  if (var->IsStackAllocated() || var->IsContextSlot()) {
2072  MemOperand location = VarOperand(var, rcx);
2073  if (FLAG_debug_code && op == Token::INIT_LET) {
2074  // Check for an uninitialized let binding.
2075  __ movq(rdx, location);
2076  __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2077  __ Check(equal, "Let binding re-initialization.");
2078  }
2079  // Perform the assignment.
2080  __ movq(location, rax);
2081  if (var->IsContextSlot()) {
2082  __ movq(rdx, rax);
2083  __ RecordWriteContextSlot(
2084  rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2085  }
2086  } else {
2087  ASSERT(var->IsLookupSlot());
2088  __ push(rax); // Value.
2089  __ push(rsi); // Context.
2090  __ Push(var->name());
2091  __ Push(Smi::FromInt(language_mode()));
2092  __ CallRuntime(Runtime::kStoreContextSlot, 4);
2093  }
2094  }
2095  // Non-initializing assignments to consts are ignored.
2096 }
2097 
2098 
2099 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2100  // Assignment to a property, using a named store IC.
2101  Property* prop = expr->target()->AsProperty();
2102  ASSERT(prop != NULL);
2103  ASSERT(prop->key()->AsLiteral() != NULL);
2104 
2105  // If the assignment starts a block of assignments to the same object,
2106  // change to slow case to avoid the quadratic behavior of repeatedly
2107  // adding fast properties.
2108  if (expr->starts_initialization_block()) {
2109  __ push(result_register());
2110  __ push(Operand(rsp, kPointerSize)); // Receiver is now under value.
2111  __ CallRuntime(Runtime::kToSlowProperties, 1);
2112  __ pop(result_register());
2113  }
2114 
2115  // Record source code position before IC call.
2116  SetSourcePosition(expr->position());
2117  __ Move(rcx, prop->key()->AsLiteral()->handle());
2118  if (expr->ends_initialization_block()) {
2119  __ movq(rdx, Operand(rsp, 0));
2120  } else {
2121  __ pop(rdx);
2122  }
2123  Handle<Code> ic = is_classic_mode()
2124  ? isolate()->builtins()->StoreIC_Initialize()
2125  : isolate()->builtins()->StoreIC_Initialize_Strict();
2126  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2127 
2128  // If the assignment ends an initialization block, revert to fast case.
2129  if (expr->ends_initialization_block()) {
2130  __ push(rax); // Result of assignment, saved even if not needed.
2131  __ push(Operand(rsp, kPointerSize)); // Receiver is under value.
2132  __ CallRuntime(Runtime::kToFastProperties, 1);
2133  __ pop(rax);
2134  __ Drop(1);
2135  }
2136  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2137  context()->Plug(rax);
2138 }
2139 
2140 
2141 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2142  // Assignment to a property, using a keyed store IC.
2143 
2144  // If the assignment starts a block of assignments to the same object,
2145  // change to slow case to avoid the quadratic behavior of repeatedly
2146  // adding fast properties.
2147  if (expr->starts_initialization_block()) {
2148  __ push(result_register());
2149  // Receiver is now under the key and value.
2150  __ push(Operand(rsp, 2 * kPointerSize));
2151  __ CallRuntime(Runtime::kToSlowProperties, 1);
2152  __ pop(result_register());
2153  }
2154 
2155  __ pop(rcx);
2156  if (expr->ends_initialization_block()) {
2157  __ movq(rdx, Operand(rsp, 0)); // Leave receiver on the stack for later.
2158  } else {
2159  __ pop(rdx);
2160  }
2161  // Record source code position before IC call.
2162  SetSourcePosition(expr->position());
2163  Handle<Code> ic = is_classic_mode()
2164  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2165  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2166  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2167 
2168  // If the assignment ends an initialization block, revert to fast case.
2169  if (expr->ends_initialization_block()) {
2170  __ pop(rdx);
2171  __ push(rax); // Result of assignment, saved even if not needed.
2172  __ push(rdx);
2173  __ CallRuntime(Runtime::kToFastProperties, 1);
2174  __ pop(rax);
2175  }
2176 
2177  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2178  context()->Plug(rax);
2179 }
2180 
2181 
2182 void FullCodeGenerator::VisitProperty(Property* expr) {
2183  Comment cmnt(masm_, "[ Property");
2184  Expression* key = expr->key();
2185 
2186  if (key->IsPropertyName()) {
2187  VisitForAccumulatorValue(expr->obj());
2188  EmitNamedPropertyLoad(expr);
2189  context()->Plug(rax);
2190  } else {
2191  VisitForStackValue(expr->obj());
2192  VisitForAccumulatorValue(expr->key());
2193  __ pop(rdx);
2194  EmitKeyedPropertyLoad(expr);
2195  context()->Plug(rax);
2196  }
2197 }
2198 
2199 
2200 void FullCodeGenerator::CallIC(Handle<Code> code,
2201  RelocInfo::Mode rmode,
2202  unsigned ast_id) {
2203  ic_total_count_++;
2204  __ call(code, rmode, ast_id);
2205 }
2206 
2207 
2208 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2209  Handle<Object> name,
2210  RelocInfo::Mode mode) {
2211  // Code common for calls using the IC.
2212  ZoneList<Expression*>* args = expr->arguments();
2213  int arg_count = args->length();
2214  { PreservePositionScope scope(masm()->positions_recorder());
2215  for (int i = 0; i < arg_count; i++) {
2216  VisitForStackValue(args->at(i));
2217  }
2218  __ Move(rcx, name);
2219  }
2220  // Record source position for debugger.
2221  SetSourcePosition(expr->position());
2222  // Call the IC initialization code.
2223  Handle<Code> ic =
2224  isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2225  CallIC(ic, mode, expr->id());
2226  RecordJSReturnSite(expr);
2227  // Restore context register.
2229  context()->Plug(rax);
2230 }
2231 
2232 
2233 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2234  Expression* key) {
2235  // Load the key.
2236  VisitForAccumulatorValue(key);
2237 
2238  // Swap the name of the function and the receiver on the stack to follow
2239  // the calling convention for call ICs.
2240  __ pop(rcx);
2241  __ push(rax);
2242  __ push(rcx);
2243 
2244  // Load the arguments.
2245  ZoneList<Expression*>* args = expr->arguments();
2246  int arg_count = args->length();
2247  { PreservePositionScope scope(masm()->positions_recorder());
2248  for (int i = 0; i < arg_count; i++) {
2249  VisitForStackValue(args->at(i));
2250  }
2251  }
2252  // Record source position for debugger.
2253  SetSourcePosition(expr->position());
2254  // Call the IC initialization code.
2255  Handle<Code> ic =
2256  isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2257  __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
2258  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2259  RecordJSReturnSite(expr);
2260  // Restore context register.
2262  context()->DropAndPlug(1, rax); // Drop the key still on the stack.
2263 }
2264 
2265 
2266 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2267  // Code common for calls using the call stub.
2268  ZoneList<Expression*>* args = expr->arguments();
2269  int arg_count = args->length();
2270  { PreservePositionScope scope(masm()->positions_recorder());
2271  for (int i = 0; i < arg_count; i++) {
2272  VisitForStackValue(args->at(i));
2273  }
2274  }
2275  // Record source position for debugger.
2276  SetSourcePosition(expr->position());
2277 
2278  // Record call targets in unoptimized code, but not in the snapshot.
2279  if (!Serializer::enabled()) {
2280  flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2281  Handle<Object> uninitialized =
2283  Handle<JSGlobalPropertyCell> cell =
2284  isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2285  RecordTypeFeedbackCell(expr->id(), cell);
2286  __ Move(rbx, cell);
2287  }
2288 
2289  CallFunctionStub stub(arg_count, flags);
2290  __ movq(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2291  __ CallStub(&stub, expr->id());
2292  RecordJSReturnSite(expr);
2293  // Restore context register.
2295  // Discard the function left on TOS.
2296  context()->DropAndPlug(1, rax);
2297 }
2298 
2299 
2300 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2301  // Push copy of the first argument or undefined if it doesn't exist.
2302  if (arg_count > 0) {
2303  __ push(Operand(rsp, arg_count * kPointerSize));
2304  } else {
2305  __ PushRoot(Heap::kUndefinedValueRootIndex);
2306  }
2307 
2308  // Push the receiver of the enclosing function and do runtime call.
2309  __ push(Operand(rbp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2310 
2311  // Push the language mode.
2312  __ Push(Smi::FromInt(language_mode()));
2313 
2314  // Push the start position of the scope the calls resides in.
2315  __ Push(Smi::FromInt(scope()->start_position()));
2316 
2317  // Do the runtime call.
2318  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2319 }
2320 
2321 
2322 void FullCodeGenerator::VisitCall(Call* expr) {
2323 #ifdef DEBUG
2324  // We want to verify that RecordJSReturnSite gets called on all paths
2325  // through this function. Avoid early returns.
2326  expr->return_is_recorded_ = false;
2327 #endif
2328 
2329  Comment cmnt(masm_, "[ Call");
2330  Expression* callee = expr->expression();
2331  VariableProxy* proxy = callee->AsVariableProxy();
2332  Property* property = callee->AsProperty();
2333 
2334  if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2335  // In a call to eval, we first call %ResolvePossiblyDirectEval to
2336  // resolve the function we need to call and the receiver of the call.
2337  // Then we call the resolved function using the given arguments.
2338  ZoneList<Expression*>* args = expr->arguments();
2339  int arg_count = args->length();
2340  { PreservePositionScope pos_scope(masm()->positions_recorder());
2341  VisitForStackValue(callee);
2342  __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot.
2343 
2344  // Push the arguments.
2345  for (int i = 0; i < arg_count; i++) {
2346  VisitForStackValue(args->at(i));
2347  }
2348 
2349  // Push a copy of the function (found below the arguments) and resolve
2350  // eval.
2351  __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
2352  EmitResolvePossiblyDirectEval(arg_count);
2353 
2354  // The runtime call returns a pair of values in rax (function) and
2355  // rdx (receiver). Touch up the stack with the right values.
2356  __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
2357  __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2358  }
2359  // Record source position for debugger.
2360  SetSourcePosition(expr->position());
2361  CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2362  __ movq(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2363  __ CallStub(&stub);
2364  RecordJSReturnSite(expr);
2365  // Restore context register.
2367  context()->DropAndPlug(1, rax);
2368  } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2369  // Call to a global variable. Push global object as receiver for the
2370  // call IC lookup.
2371  __ push(GlobalObjectOperand());
2372  EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2373  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2374  // Call to a lookup slot (dynamically introduced variable).
2375  Label slow, done;
2376 
2377  { PreservePositionScope scope(masm()->positions_recorder());
2378  // Generate code for loading from variables potentially shadowed by
2379  // eval-introduced variables.
2380  EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2381  }
2382  __ bind(&slow);
2383  // Call the runtime to find the function to call (returned in rax) and
2384  // the object holding it (returned in rdx).
2385  __ push(context_register());
2386  __ Push(proxy->name());
2387  __ CallRuntime(Runtime::kLoadContextSlot, 2);
2388  __ push(rax); // Function.
2389  __ push(rdx); // Receiver.
2390 
2391  // If fast case code has been generated, emit code to push the function
2392  // and receiver and have the slow path jump around this code.
2393  if (done.is_linked()) {
2394  Label call;
2395  __ jmp(&call, Label::kNear);
2396  __ bind(&done);
2397  // Push function.
2398  __ push(rax);
2399  // The receiver is implicitly the global receiver. Indicate this by
2400  // passing the hole to the call function stub.
2401  __ PushRoot(Heap::kTheHoleValueRootIndex);
2402  __ bind(&call);
2403  }
2404 
2405  // The receiver is either the global receiver or an object found by
2406  // LoadContextSlot. That object could be the hole if the receiver is
2407  // implicitly the global object.
2408  EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2409  } else if (property != NULL) {
2410  { PreservePositionScope scope(masm()->positions_recorder());
2411  VisitForStackValue(property->obj());
2412  }
2413  if (property->key()->IsPropertyName()) {
2414  EmitCallWithIC(expr,
2415  property->key()->AsLiteral()->handle(),
2416  RelocInfo::CODE_TARGET);
2417  } else {
2418  EmitKeyedCallWithIC(expr, property->key());
2419  }
2420  } else {
2421  // Call to an arbitrary expression not handled specially above.
2422  { PreservePositionScope scope(masm()->positions_recorder());
2423  VisitForStackValue(callee);
2424  }
2425  // Load global receiver object.
2426  __ movq(rbx, GlobalObjectOperand());
2428  // Emit function call.
2429  EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2430  }
2431 
2432 #ifdef DEBUG
2433  // RecordJSReturnSite should have been called.
2434  ASSERT(expr->return_is_recorded_);
2435 #endif
2436 }
2437 
2438 
2439 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2440  Comment cmnt(masm_, "[ CallNew");
2441  // According to ECMA-262, section 11.2.2, page 44, the function
2442  // expression in new calls must be evaluated before the
2443  // arguments.
2444 
2445  // Push constructor on the stack. If it's not a function it's used as
2446  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2447  // ignored.
2448  VisitForStackValue(expr->expression());
2449 
2450  // Push the arguments ("left-to-right") on the stack.
2451  ZoneList<Expression*>* args = expr->arguments();
2452  int arg_count = args->length();
2453  for (int i = 0; i < arg_count; i++) {
2454  VisitForStackValue(args->at(i));
2455  }
2456 
2457  // Call the construct call builtin that handles allocation and
2458  // constructor invocation.
2459  SetSourcePosition(expr->position());
2460 
2461  // Load function and argument count into rdi and rax.
2462  __ Set(rax, arg_count);
2463  __ movq(rdi, Operand(rsp, arg_count * kPointerSize));
2464 
2465  // Record call targets in unoptimized code, but not in the snapshot.
2467  if (!Serializer::enabled()) {
2468  flags = RECORD_CALL_TARGET;
2469  Handle<Object> uninitialized =
2471  Handle<JSGlobalPropertyCell> cell =
2472  isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2473  RecordTypeFeedbackCell(expr->id(), cell);
2474  __ Move(rbx, cell);
2475  } else {
2476  flags = NO_CALL_FUNCTION_FLAGS;
2477  }
2478 
2479  CallConstructStub stub(flags);
2480  __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2481  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2482  context()->Plug(rax);
2483 }
2484 
2485 
2486 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2487  ZoneList<Expression*>* args = expr->arguments();
2488  ASSERT(args->length() == 1);
2489 
2490  VisitForAccumulatorValue(args->at(0));
2491 
2492  Label materialize_true, materialize_false;
2493  Label* if_true = NULL;
2494  Label* if_false = NULL;
2495  Label* fall_through = NULL;
2496  context()->PrepareTest(&materialize_true, &materialize_false,
2497  &if_true, &if_false, &fall_through);
2498 
2499  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2500  __ JumpIfSmi(rax, if_true);
2501  __ jmp(if_false);
2502 
2503  context()->Plug(if_true, if_false);
2504 }
2505 
2506 
2507 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2508  ZoneList<Expression*>* args = expr->arguments();
2509  ASSERT(args->length() == 1);
2510 
2511  VisitForAccumulatorValue(args->at(0));
2512 
2513  Label materialize_true, materialize_false;
2514  Label* if_true = NULL;
2515  Label* if_false = NULL;
2516  Label* fall_through = NULL;
2517  context()->PrepareTest(&materialize_true, &materialize_false,
2518  &if_true, &if_false, &fall_through);
2519 
2520  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2521  Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
2522  Split(non_negative_smi, if_true, if_false, fall_through);
2523 
2524  context()->Plug(if_true, if_false);
2525 }
2526 
2527 
2528 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2529  ZoneList<Expression*>* args = expr->arguments();
2530  ASSERT(args->length() == 1);
2531 
2532  VisitForAccumulatorValue(args->at(0));
2533 
2534  Label materialize_true, materialize_false;
2535  Label* if_true = NULL;
2536  Label* if_false = NULL;
2537  Label* fall_through = NULL;
2538  context()->PrepareTest(&materialize_true, &materialize_false,
2539  &if_true, &if_false, &fall_through);
2540 
2541  __ JumpIfSmi(rax, if_false);
2542  __ CompareRoot(rax, Heap::kNullValueRootIndex);
2543  __ j(equal, if_true);
2545  // Undetectable objects behave like undefined when tested with typeof.
2547  Immediate(1 << Map::kIsUndetectable));
2548  __ j(not_zero, if_false);
2550  __ cmpq(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2551  __ j(below, if_false);
2552  __ cmpq(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2553  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2554  Split(below_equal, if_true, if_false, fall_through);
2555 
2556  context()->Plug(if_true, if_false);
2557 }
2558 
2559 
2560 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2561  ZoneList<Expression*>* args = expr->arguments();
2562  ASSERT(args->length() == 1);
2563 
2564  VisitForAccumulatorValue(args->at(0));
2565 
2566  Label materialize_true, materialize_false;
2567  Label* if_true = NULL;
2568  Label* if_false = NULL;
2569  Label* fall_through = NULL;
2570  context()->PrepareTest(&materialize_true, &materialize_false,
2571  &if_true, &if_false, &fall_through);
2572 
2573  __ JumpIfSmi(rax, if_false);
2574  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
2575  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2576  Split(above_equal, if_true, if_false, fall_through);
2577 
2578  context()->Plug(if_true, if_false);
2579 }
2580 
2581 
2582 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2583  ZoneList<Expression*>* args = expr->arguments();
2584  ASSERT(args->length() == 1);
2585 
2586  VisitForAccumulatorValue(args->at(0));
2587 
2588  Label materialize_true, materialize_false;
2589  Label* if_true = NULL;
2590  Label* if_false = NULL;
2591  Label* fall_through = NULL;
2592  context()->PrepareTest(&materialize_true, &materialize_false,
2593  &if_true, &if_false, &fall_through);
2594 
2595  __ JumpIfSmi(rax, if_false);
2598  Immediate(1 << Map::kIsUndetectable));
2599  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2600  Split(not_zero, if_true, if_false, fall_through);
2601 
2602  context()->Plug(if_true, if_false);
2603 }
2604 
2605 
2606 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2607  CallRuntime* expr) {
2608  ZoneList<Expression*>* args = expr->arguments();
2609  ASSERT(args->length() == 1);
2610 
2611  VisitForAccumulatorValue(args->at(0));
2612 
2613  Label materialize_true, materialize_false;
2614  Label* if_true = NULL;
2615  Label* if_false = NULL;
2616  Label* fall_through = NULL;
2617  context()->PrepareTest(&materialize_true, &materialize_false,
2618  &if_true, &if_false, &fall_through);
2619 
2620  if (FLAG_debug_code) __ AbortIfSmi(rax);
2621 
2622  // Check whether this map has already been checked to be safe for default
2623  // valueOf.
2627  __ j(not_zero, if_true);
2628 
2629  // Check for fast case object. Generate false result for slow case object.
2632  __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
2633  __ j(equal, if_false);
2634 
2635  // Look for valueOf symbol in the descriptor array, and indicate false if
2636  // found. The type is not checked, so if it is a transition it is a false
2637  // negative.
2638  __ LoadInstanceDescriptors(rbx, rbx);
2640  // rbx: descriptor array
2641  // rcx: length of descriptor array
2642  // Calculate the end of the descriptor array.
2643  SmiIndex index = masm_->SmiToIndex(rdx, rcx, kPointerSizeLog2);
2644  __ lea(rcx,
2645  Operand(
2646  rbx, index.reg, index.scale, FixedArray::kHeaderSize));
2647  // Calculate location of the first key name.
2648  __ addq(rbx,
2649  Immediate(FixedArray::kHeaderSize +
2650  DescriptorArray::kFirstIndex * kPointerSize));
2651  // Loop through all the keys in the descriptor array. If one of these is the
2652  // symbol valueOf the result is false.
2653  Label entry, loop;
2654  __ jmp(&entry);
2655  __ bind(&loop);
2656  __ movq(rdx, FieldOperand(rbx, 0));
2657  __ Cmp(rdx, FACTORY->value_of_symbol());
2658  __ j(equal, if_false);
2659  __ addq(rbx, Immediate(kPointerSize));
2660  __ bind(&entry);
2661  __ cmpq(rbx, rcx);
2662  __ j(not_equal, &loop);
2663 
2664  // Reload map as register rbx was used as temporary above.
2666 
2667  // If a valueOf property is not found on the object check that it's
2668  // prototype is the un-modified String prototype. If not result is false.
2670  __ testq(rcx, Immediate(kSmiTagMask));
2671  __ j(zero, if_false);
2675  __ cmpq(rcx,
2677  __ j(not_equal, if_false);
2678  // Set the bit in the map to indicate that it has been checked safe for
2679  // default valueOf and set true result.
2682  __ jmp(if_true);
2683 
2684  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2685  context()->Plug(if_true, if_false);
2686 }
2687 
2688 
2689 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2690  ZoneList<Expression*>* args = expr->arguments();
2691  ASSERT(args->length() == 1);
2692 
2693  VisitForAccumulatorValue(args->at(0));
2694 
2695  Label materialize_true, materialize_false;
2696  Label* if_true = NULL;
2697  Label* if_false = NULL;
2698  Label* fall_through = NULL;
2699  context()->PrepareTest(&materialize_true, &materialize_false,
2700  &if_true, &if_false, &fall_through);
2701 
2702  __ JumpIfSmi(rax, if_false);
2703  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2704  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2705  Split(equal, if_true, if_false, fall_through);
2706 
2707  context()->Plug(if_true, if_false);
2708 }
2709 
2710 
2711 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2712  ZoneList<Expression*>* args = expr->arguments();
2713  ASSERT(args->length() == 1);
2714 
2715  VisitForAccumulatorValue(args->at(0));
2716 
2717  Label materialize_true, materialize_false;
2718  Label* if_true = NULL;
2719  Label* if_false = NULL;
2720  Label* fall_through = NULL;
2721  context()->PrepareTest(&materialize_true, &materialize_false,
2722  &if_true, &if_false, &fall_through);
2723 
2724  __ JumpIfSmi(rax, if_false);
2725  __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
2726  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2727  Split(equal, if_true, if_false, fall_through);
2728 
2729  context()->Plug(if_true, if_false);
2730 }
2731 
2732 
2733 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2734  ZoneList<Expression*>* args = expr->arguments();
2735  ASSERT(args->length() == 1);
2736 
2737  VisitForAccumulatorValue(args->at(0));
2738 
2739  Label materialize_true, materialize_false;
2740  Label* if_true = NULL;
2741  Label* if_false = NULL;
2742  Label* fall_through = NULL;
2743  context()->PrepareTest(&materialize_true, &materialize_false,
2744  &if_true, &if_false, &fall_through);
2745 
2746  __ JumpIfSmi(rax, if_false);
2747  __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
2748  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2749  Split(equal, if_true, if_false, fall_through);
2750 
2751  context()->Plug(if_true, if_false);
2752 }
2753 
2754 
2755 
2756 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2757  ASSERT(expr->arguments()->length() == 0);
2758 
2759  Label materialize_true, materialize_false;
2760  Label* if_true = NULL;
2761  Label* if_false = NULL;
2762  Label* fall_through = NULL;
2763  context()->PrepareTest(&materialize_true, &materialize_false,
2764  &if_true, &if_false, &fall_through);
2765 
2766  // Get the frame pointer for the calling frame.
2768 
2769  // Skip the arguments adaptor frame if it exists.
2770  Label check_frame_marker;
2773  __ j(not_equal, &check_frame_marker);
2775 
2776  // Check the marker in the calling frame.
2777  __ bind(&check_frame_marker);
2779  Smi::FromInt(StackFrame::CONSTRUCT));
2780  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2781  Split(equal, if_true, if_false, fall_through);
2782 
2783  context()->Plug(if_true, if_false);
2784 }
2785 
2786 
2787 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2788  ZoneList<Expression*>* args = expr->arguments();
2789  ASSERT(args->length() == 2);
2790 
2791  // Load the two objects into registers and perform the comparison.
2792  VisitForStackValue(args->at(0));
2793  VisitForAccumulatorValue(args->at(1));
2794 
2795  Label materialize_true, materialize_false;
2796  Label* if_true = NULL;
2797  Label* if_false = NULL;
2798  Label* fall_through = NULL;
2799  context()->PrepareTest(&materialize_true, &materialize_false,
2800  &if_true, &if_false, &fall_through);
2801 
2802  __ pop(rbx);
2803  __ cmpq(rax, rbx);
2804  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2805  Split(equal, if_true, if_false, fall_through);
2806 
2807  context()->Plug(if_true, if_false);
2808 }
2809 
2810 
2811 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2812  ZoneList<Expression*>* args = expr->arguments();
2813  ASSERT(args->length() == 1);
2814 
2815  // ArgumentsAccessStub expects the key in rdx and the formal
2816  // parameter count in rax.
2817  VisitForAccumulatorValue(args->at(0));
2818  __ movq(rdx, rax);
2819  __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
2820  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2821  __ CallStub(&stub);
2822  context()->Plug(rax);
2823 }
2824 
2825 
2826 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2827  ASSERT(expr->arguments()->length() == 0);
2828 
2829  Label exit;
2830  // Get the number of formal parameters.
2831  __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
2832 
2833  // Check if the calling frame is an arguments adaptor frame.
2837  __ j(not_equal, &exit, Label::kNear);
2838 
2839  // Arguments adaptor case: Read the arguments length from the
2840  // adaptor frame.
2842 
2843  __ bind(&exit);
2844  if (FLAG_debug_code) __ AbortIfNotSmi(rax);
2845  context()->Plug(rax);
2846 }
2847 
2848 
2849 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2850  ZoneList<Expression*>* args = expr->arguments();
2851  ASSERT(args->length() == 1);
2852  Label done, null, function, non_function_constructor;
2853 
2854  VisitForAccumulatorValue(args->at(0));
2855 
2856  // If the object is a smi, we return null.
2857  __ JumpIfSmi(rax, &null);
2858 
2859  // Check that the object is a JS object but take special care of JS
2860  // functions to make sure they have 'Function' as their class.
2861  // Assume that there are only two callable types, and one of them is at
2862  // either end of the type range for JS object types. Saves extra comparisons.
2864  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
2865  // Map is now in rax.
2866  __ j(below, &null);
2869  __ j(equal, &function);
2870 
2871  __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
2873  LAST_SPEC_OBJECT_TYPE - 1);
2874  __ j(equal, &function);
2875  // Assume that there is no larger type.
2877 
2878  // Check if the constructor in the map is a JS function.
2880  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2881  __ j(not_equal, &non_function_constructor);
2882 
2883  // rax now contains the constructor function. Grab the
2884  // instance class name from there.
2887  __ jmp(&done);
2888 
2889  // Functions have class 'Function'.
2890  __ bind(&function);
2891  __ Move(rax, isolate()->factory()->function_class_symbol());
2892  __ jmp(&done);
2893 
2894  // Objects with a non-function constructor have class 'Object'.
2895  __ bind(&non_function_constructor);
2896  __ Move(rax, isolate()->factory()->Object_symbol());
2897  __ jmp(&done);
2898 
2899  // Non-JS objects have class null.
2900  __ bind(&null);
2901  __ LoadRoot(rax, Heap::kNullValueRootIndex);
2902 
2903  // All done.
2904  __ bind(&done);
2905 
2906  context()->Plug(rax);
2907 }
2908 
2909 
2910 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2911  // Conditionally generate a log call.
2912  // Args:
2913  // 0 (literal string): The type of logging (corresponds to the flags).
2914  // This is used to determine whether or not to generate the log call.
2915  // 1 (string): Format string. Access the string at argument index 2
2916  // with '%2s' (see Logger::LogRuntime for all the formats).
2917  // 2 (array): Arguments to the format string.
2918  ZoneList<Expression*>* args = expr->arguments();
2919  ASSERT_EQ(args->length(), 3);
2920  if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2921  VisitForStackValue(args->at(1));
2922  VisitForStackValue(args->at(2));
2923  __ CallRuntime(Runtime::kLog, 2);
2924  }
2925  // Finally, we're expected to leave a value on the top of the stack.
2926  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2927  context()->Plug(rax);
2928 }
2929 
2930 
2931 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2932  ASSERT(expr->arguments()->length() == 0);
2933 
2934  Label slow_allocate_heapnumber;
2935  Label heapnumber_allocated;
2936 
2937  __ AllocateHeapNumber(rbx, rcx, &slow_allocate_heapnumber);
2938  __ jmp(&heapnumber_allocated);
2939 
2940  __ bind(&slow_allocate_heapnumber);
2941  // Allocate a heap number.
2942  __ CallRuntime(Runtime::kNumberAlloc, 0);
2943  __ movq(rbx, rax);
2944 
2945  __ bind(&heapnumber_allocated);
2946 
2947  // Return a random uint32 number in rax.
2948  // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
2949  __ PrepareCallCFunction(1);
2950 #ifdef _WIN64
2951  __ movq(rcx, ContextOperand(context_register(), Context::GLOBAL_INDEX));
2953 
2954 #else
2955  __ movq(rdi, ContextOperand(context_register(), Context::GLOBAL_INDEX));
2957 #endif
2958  __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2959 
2960  // Convert 32 random bits in rax to 0.(32 random bits) in a double
2961  // by computing:
2962  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2963  __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
2964  __ movd(xmm1, rcx);
2965  __ movd(xmm0, rax);
2966  __ cvtss2sd(xmm1, xmm1);
2967  __ xorps(xmm0, xmm1);
2968  __ subsd(xmm0, xmm1);
2970 
2971  __ movq(rax, rbx);
2972  context()->Plug(rax);
2973 }
2974 
2975 
2976 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
2977  // Load the arguments on the stack and call the stub.
2978  SubStringStub stub;
2979  ZoneList<Expression*>* args = expr->arguments();
2980  ASSERT(args->length() == 3);
2981  VisitForStackValue(args->at(0));
2982  VisitForStackValue(args->at(1));
2983  VisitForStackValue(args->at(2));
2984  __ CallStub(&stub);
2985  context()->Plug(rax);
2986 }
2987 
2988 
2989 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
2990  // Load the arguments on the stack and call the stub.
2991  RegExpExecStub stub;
2992  ZoneList<Expression*>* args = expr->arguments();
2993  ASSERT(args->length() == 4);
2994  VisitForStackValue(args->at(0));
2995  VisitForStackValue(args->at(1));
2996  VisitForStackValue(args->at(2));
2997  VisitForStackValue(args->at(3));
2998  __ CallStub(&stub);
2999  context()->Plug(rax);
3000 }
3001 
3002 
3003 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3004  ZoneList<Expression*>* args = expr->arguments();
3005  ASSERT(args->length() == 1);
3006 
3007  VisitForAccumulatorValue(args->at(0)); // Load the object.
3008 
3009  Label done;
3010  // If the object is a smi return the object.
3011  __ JumpIfSmi(rax, &done);
3012  // If the object is not a value type, return the object.
3013  __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
3014  __ j(not_equal, &done);
3016 
3017  __ bind(&done);
3018  context()->Plug(rax);
3019 }
3020 
3021 
3022 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3023  ZoneList<Expression*>* args = expr->arguments();
3024  ASSERT(args->length() == 2);
3025  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3026  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3027 
3028  VisitForAccumulatorValue(args->at(0)); // Load the object.
3029 
3030  Label runtime, done;
3031  Register object = rax;
3032  Register result = rax;
3033  Register scratch = rcx;
3034 
3035 #ifdef DEBUG
3036  __ AbortIfSmi(object);
3037  __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3038  __ Assert(equal, "Trying to get date field from non-date.");
3039 #endif
3040 
3041  if (index->value() == 0) {
3042  __ movq(result, FieldOperand(object, JSDate::kValueOffset));
3043  } else {
3044  if (index->value() < JSDate::kFirstUncachedField) {
3045  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3046  __ movq(scratch, stamp);
3047  __ cmpq(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3048  __ j(not_equal, &runtime, Label::kNear);
3049  __ movq(result, FieldOperand(object, JSDate::kValueOffset +
3050  kPointerSize * index->value()));
3051  __ jmp(&done);
3052  }
3053  __ bind(&runtime);
3054  __ PrepareCallCFunction(2);
3055 #ifdef _WIN64
3056  __ movq(rcx, object);
3057  __ movq(rdx, index, RelocInfo::NONE);
3058 #else
3059  __ movq(rdi, object);
3060  __ movq(rsi, index, RelocInfo::NONE);
3061 #endif
3062  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3064  __ bind(&done);
3065  }
3066  context()->Plug(rax);
3067 }
3068 
3069 
3070 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3071  // Load the arguments on the stack and call the runtime function.
3072  ZoneList<Expression*>* args = expr->arguments();
3073  ASSERT(args->length() == 2);
3074  VisitForStackValue(args->at(0));
3075  VisitForStackValue(args->at(1));
3076  MathPowStub stub(MathPowStub::ON_STACK);
3077  __ CallStub(&stub);
3078  context()->Plug(rax);
3079 }
3080 
3081 
3082 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3083  ZoneList<Expression*>* args = expr->arguments();
3084  ASSERT(args->length() == 2);
3085 
3086  VisitForStackValue(args->at(0)); // Load the object.
3087  VisitForAccumulatorValue(args->at(1)); // Load the value.
3088  __ pop(rbx); // rax = value. rbx = object.
3089 
3090  Label done;
3091  // If the object is a smi, return the value.
3092  __ JumpIfSmi(rbx, &done);
3093 
3094  // If the object is not a value type, return the value.
3095  __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
3096  __ j(not_equal, &done);
3097 
3098  // Store the value.
3100  // Update the write barrier. Save the value as it will be
3101  // overwritten by the write barrier code and is needed afterward.
3102  __ movq(rdx, rax);
3103  __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs);
3104 
3105  __ bind(&done);
3106  context()->Plug(rax);
3107 }
3108 
3109 
3110 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3111  ZoneList<Expression*>* args = expr->arguments();
3112  ASSERT_EQ(args->length(), 1);
3113 
3114  // Load the argument on the stack and call the stub.
3115  VisitForStackValue(args->at(0));
3116 
3117  NumberToStringStub stub;
3118  __ CallStub(&stub);
3119  context()->Plug(rax);
3120 }
3121 
3122 
3123 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3124  ZoneList<Expression*>* args = expr->arguments();
3125  ASSERT(args->length() == 1);
3126 
3127  VisitForAccumulatorValue(args->at(0));
3128 
3129  Label done;
3130  StringCharFromCodeGenerator generator(rax, rbx);
3131  generator.GenerateFast(masm_);
3132  __ jmp(&done);
3133 
3134  NopRuntimeCallHelper call_helper;
3135  generator.GenerateSlow(masm_, call_helper);
3136 
3137  __ bind(&done);
3138  context()->Plug(rbx);
3139 }
3140 
3141 
3142 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3143  ZoneList<Expression*>* args = expr->arguments();
3144  ASSERT(args->length() == 2);
3145 
3146  VisitForStackValue(args->at(0));
3147  VisitForAccumulatorValue(args->at(1));
3148 
3149  Register object = rbx;
3150  Register index = rax;
3151  Register result = rdx;
3152 
3153  __ pop(object);
3154 
3155  Label need_conversion;
3156  Label index_out_of_range;
3157  Label done;
3158  StringCharCodeAtGenerator generator(object,
3159  index,
3160  result,
3161  &need_conversion,
3162  &need_conversion,
3163  &index_out_of_range,
3165  generator.GenerateFast(masm_);
3166  __ jmp(&done);
3167 
3168  __ bind(&index_out_of_range);
3169  // When the index is out of range, the spec requires us to return
3170  // NaN.
3171  __ LoadRoot(result, Heap::kNanValueRootIndex);
3172  __ jmp(&done);
3173 
3174  __ bind(&need_conversion);
3175  // Move the undefined value into the result register, which will
3176  // trigger conversion.
3177  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3178  __ jmp(&done);
3179 
3180  NopRuntimeCallHelper call_helper;
3181  generator.GenerateSlow(masm_, call_helper);
3182 
3183  __ bind(&done);
3184  context()->Plug(result);
3185 }
3186 
3187 
3188 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3189  ZoneList<Expression*>* args = expr->arguments();
3190  ASSERT(args->length() == 2);
3191 
3192  VisitForStackValue(args->at(0));
3193  VisitForAccumulatorValue(args->at(1));
3194 
3195  Register object = rbx;
3196  Register index = rax;
3197  Register scratch = rdx;
3198  Register result = rax;
3199 
3200  __ pop(object);
3201 
3202  Label need_conversion;
3203  Label index_out_of_range;
3204  Label done;
3205  StringCharAtGenerator generator(object,
3206  index,
3207  scratch,
3208  result,
3209  &need_conversion,
3210  &need_conversion,
3211  &index_out_of_range,
3213  generator.GenerateFast(masm_);
3214  __ jmp(&done);
3215 
3216  __ bind(&index_out_of_range);
3217  // When the index is out of range, the spec requires us to return
3218  // the empty string.
3219  __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3220  __ jmp(&done);
3221 
3222  __ bind(&need_conversion);
3223  // Move smi zero into the result register, which will trigger
3224  // conversion.
3225  __ Move(result, Smi::FromInt(0));
3226  __ jmp(&done);
3227 
3228  NopRuntimeCallHelper call_helper;
3229  generator.GenerateSlow(masm_, call_helper);
3230 
3231  __ bind(&done);
3232  context()->Plug(result);
3233 }
3234 
3235 
3236 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3237  ZoneList<Expression*>* args = expr->arguments();
3238  ASSERT_EQ(2, args->length());
3239 
3240  VisitForStackValue(args->at(0));
3241  VisitForStackValue(args->at(1));
3242 
3243  StringAddStub stub(NO_STRING_ADD_FLAGS);
3244  __ CallStub(&stub);
3245  context()->Plug(rax);
3246 }
3247 
3248 
3249 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3250  ZoneList<Expression*>* args = expr->arguments();
3251  ASSERT_EQ(2, args->length());
3252 
3253  VisitForStackValue(args->at(0));
3254  VisitForStackValue(args->at(1));
3255 
3256  StringCompareStub stub;
3257  __ CallStub(&stub);
3258  context()->Plug(rax);
3259 }
3260 
3261 
3262 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3263  // Load the argument on the stack and call the stub.
3264  TranscendentalCacheStub stub(TranscendentalCache::SIN,
3266  ZoneList<Expression*>* args = expr->arguments();
3267  ASSERT(args->length() == 1);
3268  VisitForStackValue(args->at(0));
3269  __ CallStub(&stub);
3270  context()->Plug(rax);
3271 }
3272 
3273 
3274 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3275  // Load the argument on the stack and call the stub.
3276  TranscendentalCacheStub stub(TranscendentalCache::COS,
3278  ZoneList<Expression*>* args = expr->arguments();
3279  ASSERT(args->length() == 1);
3280  VisitForStackValue(args->at(0));
3281  __ CallStub(&stub);
3282  context()->Plug(rax);
3283 }
3284 
3285 
3286 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3287  // Load the argument on the stack and call the stub.
3288  TranscendentalCacheStub stub(TranscendentalCache::TAN,
3290  ZoneList<Expression*>* args = expr->arguments();
3291  ASSERT(args->length() == 1);
3292  VisitForStackValue(args->at(0));
3293  __ CallStub(&stub);
3294  context()->Plug(rax);
3295 }
3296 
3297 
3298 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3299  // Load the argument on the stack and call the stub.
3300  TranscendentalCacheStub stub(TranscendentalCache::LOG,
3302  ZoneList<Expression*>* args = expr->arguments();
3303  ASSERT(args->length() == 1);
3304  VisitForStackValue(args->at(0));
3305  __ CallStub(&stub);
3306  context()->Plug(rax);
3307 }
3308 
3309 
3310 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3311  // Load the argument on the stack and call the runtime function.
3312  ZoneList<Expression*>* args = expr->arguments();
3313  ASSERT(args->length() == 1);
3314  VisitForStackValue(args->at(0));
3315  __ CallRuntime(Runtime::kMath_sqrt, 1);
3316  context()->Plug(rax);
3317 }
3318 
3319 
3320 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3321  ZoneList<Expression*>* args = expr->arguments();
3322  ASSERT(args->length() >= 2);
3323 
3324  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3325  for (int i = 0; i < arg_count + 1; i++) {
3326  VisitForStackValue(args->at(i));
3327  }
3328  VisitForAccumulatorValue(args->last()); // Function.
3329 
3330  // Check for proxy.
3331  Label proxy, done;
3332  __ CmpObjectType(rax, JS_FUNCTION_PROXY_TYPE, rbx);
3333  __ j(equal, &proxy);
3334 
3335  // InvokeFunction requires the function in rdi. Move it in there.
3336  __ movq(rdi, result_register());
3337  ParameterCount count(arg_count);
3338  __ InvokeFunction(rdi, count, CALL_FUNCTION,
3339  NullCallWrapper(), CALL_AS_METHOD);
3341  __ jmp(&done);
3342 
3343  __ bind(&proxy);
3344  __ push(rax);
3345  __ CallRuntime(Runtime::kCall, args->length());
3346  __ bind(&done);
3347 
3348  context()->Plug(rax);
3349 }
3350 
3351 
3352 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3353  RegExpConstructResultStub stub;
3354  ZoneList<Expression*>* args = expr->arguments();
3355  ASSERT(args->length() == 3);
3356  VisitForStackValue(args->at(0));
3357  VisitForStackValue(args->at(1));
3358  VisitForStackValue(args->at(2));
3359  __ CallStub(&stub);
3360  context()->Plug(rax);
3361 }
3362 
3363 
3364 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3365  ZoneList<Expression*>* args = expr->arguments();
3366  ASSERT_EQ(2, args->length());
3367 
3368  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3369  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3370 
3371  Handle<FixedArray> jsfunction_result_caches(
3372  isolate()->global_context()->jsfunction_result_caches());
3373  if (jsfunction_result_caches->length() <= cache_id) {
3374  __ Abort("Attempt to use undefined cache.");
3375  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3376  context()->Plug(rax);
3377  return;
3378  }
3379 
3380  VisitForAccumulatorValue(args->at(1));
3381 
3382  Register key = rax;
3383  Register cache = rbx;
3384  Register tmp = rcx;
3385  __ movq(cache, ContextOperand(rsi, Context::GLOBAL_INDEX));
3386  __ movq(cache,
3388  __ movq(cache,
3390  __ movq(cache,
3391  FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3392 
3393  Label done, not_found;
3394  // tmp now holds finger offset as a smi.
3395  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3397  SmiIndex index =
3398  __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
3399  __ cmpq(key, FieldOperand(cache,
3400  index.reg,
3401  index.scale,
3403  __ j(not_equal, &not_found, Label::kNear);
3404  __ movq(rax, FieldOperand(cache,
3405  index.reg,
3406  index.scale,
3407  FixedArray::kHeaderSize + kPointerSize));
3408  __ jmp(&done, Label::kNear);
3409 
3410  __ bind(&not_found);
3411  // Call runtime to perform the lookup.
3412  __ push(cache);
3413  __ push(key);
3414  __ CallRuntime(Runtime::kGetFromCache, 2);
3415 
3416  __ bind(&done);
3417  context()->Plug(rax);
3418 }
3419 
3420 
3421 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3422  ZoneList<Expression*>* args = expr->arguments();
3423  ASSERT_EQ(2, args->length());
3424 
3425  Register right = rax;
3426  Register left = rbx;
3427  Register tmp = rcx;
3428 
3429  VisitForStackValue(args->at(0));
3430  VisitForAccumulatorValue(args->at(1));
3431  __ pop(left);
3432 
3433  Label done, fail, ok;
3434  __ cmpq(left, right);
3435  __ j(equal, &ok, Label::kNear);
3436  // Fail if either is a non-HeapObject.
3437  Condition either_smi = masm()->CheckEitherSmi(left, right, tmp);
3438  __ j(either_smi, &fail, Label::kNear);
3439  __ j(zero, &fail, Label::kNear);
3440  __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
3442  Immediate(JS_REGEXP_TYPE));
3443  __ j(not_equal, &fail, Label::kNear);
3444  __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
3445  __ j(not_equal, &fail, Label::kNear);
3446  __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
3447  __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
3448  __ j(equal, &ok, Label::kNear);
3449  __ bind(&fail);
3450  __ Move(rax, isolate()->factory()->false_value());
3451  __ jmp(&done, Label::kNear);
3452  __ bind(&ok);
3453  __ Move(rax, isolate()->factory()->true_value());
3454  __ bind(&done);
3455 
3456  context()->Plug(rax);
3457 }
3458 
3459 
3460 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3461  ZoneList<Expression*>* args = expr->arguments();
3462  ASSERT(args->length() == 1);
3463 
3464  VisitForAccumulatorValue(args->at(0));
3465 
3466  Label materialize_true, materialize_false;
3467  Label* if_true = NULL;
3468  Label* if_false = NULL;
3469  Label* fall_through = NULL;
3470  context()->PrepareTest(&materialize_true, &materialize_false,
3471  &if_true, &if_false, &fall_through);
3472 
3475  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3476  __ j(zero, if_true);
3477  __ jmp(if_false);
3478 
3479  context()->Plug(if_true, if_false);
3480 }
3481 
3482 
3483 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3484  ZoneList<Expression*>* args = expr->arguments();
3485  ASSERT(args->length() == 1);
3486  VisitForAccumulatorValue(args->at(0));
3487 
3488  if (FLAG_debug_code) {
3489  __ AbortIfNotString(rax);
3490  }
3491 
3494  __ IndexFromHash(rax, rax);
3495 
3496  context()->Plug(rax);
3497 }
3498 
3499 
3500 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3501  Label bailout, return_result, done, one_char_separator, long_separator,
3502  non_trivial_array, not_size_one_array, loop,
3503  loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3504  ZoneList<Expression*>* args = expr->arguments();
3505  ASSERT(args->length() == 2);
3506  // We will leave the separator on the stack until the end of the function.
3507  VisitForStackValue(args->at(1));
3508  // Load this to rax (= array)
3509  VisitForAccumulatorValue(args->at(0));
3510  // All aliases of the same register have disjoint lifetimes.
3511  Register array = rax;
3512  Register elements = no_reg; // Will be rax.
3513 
3514  Register index = rdx;
3515 
3516  Register string_length = rcx;
3517 
3518  Register string = rsi;
3519 
3520  Register scratch = rbx;
3521 
3522  Register array_length = rdi;
3523  Register result_pos = no_reg; // Will be rdi.
3524 
3525  Operand separator_operand = Operand(rsp, 2 * kPointerSize);
3526  Operand result_operand = Operand(rsp, 1 * kPointerSize);
3527  Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
3528  // Separator operand is already pushed. Make room for the two
3529  // other stack fields, and clear the direction flag in anticipation
3530  // of calling CopyBytes.
3531  __ subq(rsp, Immediate(2 * kPointerSize));
3532  __ cld();
3533  // Check that the array is a JSArray
3534  __ JumpIfSmi(array, &bailout);
3535  __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3536  __ j(not_equal, &bailout);
3537 
3538  // Check that the array has fast elements.
3539  __ CheckFastElements(scratch, &bailout);
3540 
3541  // Array has fast elements, so its length must be a smi.
3542  // If the array has length zero, return the empty string.
3543  __ movq(array_length, FieldOperand(array, JSArray::kLengthOffset));
3544  __ SmiCompare(array_length, Smi::FromInt(0));
3545  __ j(not_zero, &non_trivial_array);
3546  __ LoadRoot(rax, Heap::kEmptyStringRootIndex);
3547  __ jmp(&return_result);
3548 
3549  // Save the array length on the stack.
3550  __ bind(&non_trivial_array);
3551  __ SmiToInteger32(array_length, array_length);
3552  __ movl(array_length_operand, array_length);
3553 
3554  // Save the FixedArray containing array's elements.
3555  // End of array's live range.
3556  elements = array;
3557  __ movq(elements, FieldOperand(array, JSArray::kElementsOffset));
3558  array = no_reg;
3559 
3560 
3561  // Check that all array elements are sequential ASCII strings, and
3562  // accumulate the sum of their lengths, as a smi-encoded value.
3563  __ Set(index, 0);
3564  __ Set(string_length, 0);
3565  // Loop condition: while (index < array_length).
3566  // Live loop registers: index(int32), array_length(int32), string(String*),
3567  // scratch, string_length(int32), elements(FixedArray*).
3568  if (FLAG_debug_code) {
3569  __ cmpq(index, array_length);
3570  __ Assert(below, "No empty arrays here in EmitFastAsciiArrayJoin");
3571  }
3572  __ bind(&loop);
3573  __ movq(string, FieldOperand(elements,
3574  index,
3577  __ JumpIfSmi(string, &bailout);
3578  __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
3579  __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3580  __ andb(scratch, Immediate(
3582  __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
3583  __ j(not_equal, &bailout);
3584  __ AddSmiField(string_length,
3586  __ j(overflow, &bailout);
3587  __ incl(index);
3588  __ cmpl(index, array_length);
3589  __ j(less, &loop);
3590 
3591  // Live registers:
3592  // string_length: Sum of string lengths.
3593  // elements: FixedArray of strings.
3594  // index: Array length.
3595  // array_length: Array length.
3596 
3597  // If array_length is 1, return elements[0], a string.
3598  __ cmpl(array_length, Immediate(1));
3599  __ j(not_equal, &not_size_one_array);
3600  __ movq(rax, FieldOperand(elements, FixedArray::kHeaderSize));
3601  __ jmp(&return_result);
3602 
3603  __ bind(&not_size_one_array);
3604 
3605  // End of array_length live range.
3606  result_pos = array_length;
3607  array_length = no_reg;
3608 
3609  // Live registers:
3610  // string_length: Sum of string lengths.
3611  // elements: FixedArray of strings.
3612  // index: Array length.
3613 
3614  // Check that the separator is a sequential ASCII string.
3615  __ movq(string, separator_operand);
3616  __ JumpIfSmi(string, &bailout);
3617  __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
3618  __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3619  __ andb(scratch, Immediate(
3621  __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
3622  __ j(not_equal, &bailout);
3623 
3624  // Live registers:
3625  // string_length: Sum of string lengths.
3626  // elements: FixedArray of strings.
3627  // index: Array length.
3628  // string: Separator string.
3629 
3630  // Add (separator length times (array_length - 1)) to string_length.
3631  __ SmiToInteger32(scratch,
3633  __ decl(index);
3634  __ imull(scratch, index);
3635  __ j(overflow, &bailout);
3636  __ addl(string_length, scratch);
3637  __ j(overflow, &bailout);
3638 
3639  // Live registers and stack values:
3640  // string_length: Total length of result string.
3641  // elements: FixedArray of strings.
3642  __ AllocateAsciiString(result_pos, string_length, scratch,
3643  index, string, &bailout);
3644  __ movq(result_operand, result_pos);
3645  __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
3646 
3647  __ movq(string, separator_operand);
3648  __ SmiCompare(FieldOperand(string, SeqAsciiString::kLengthOffset),
3649  Smi::FromInt(1));
3650  __ j(equal, &one_char_separator);
3651  __ j(greater, &long_separator);
3652 
3653 
3654  // Empty separator case:
3655  __ Set(index, 0);
3656  __ movl(scratch, array_length_operand);
3657  __ jmp(&loop_1_condition);
3658  // Loop condition: while (index < array_length).
3659  __ bind(&loop_1);
3660  // Each iteration of the loop concatenates one string to the result.
3661  // Live values in registers:
3662  // index: which element of the elements array we are adding to the result.
3663  // result_pos: the position to which we are currently copying characters.
3664  // elements: the FixedArray of strings we are joining.
3665  // scratch: array length.
3666 
3667  // Get string = array[index].
3668  __ movq(string, FieldOperand(elements, index,
3671  __ SmiToInteger32(string_length,
3673  __ lea(string,
3675  __ CopyBytes(result_pos, string, string_length);
3676  __ incl(index);
3677  __ bind(&loop_1_condition);
3678  __ cmpl(index, scratch);
3679  __ j(less, &loop_1); // Loop while (index < array_length).
3680  __ jmp(&done);
3681 
3682  // Generic bailout code used from several places.
3683  __ bind(&bailout);
3684  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3685  __ jmp(&return_result);
3686 
3687 
3688  // One-character separator case
3689  __ bind(&one_char_separator);
3690  // Get the separator ASCII character value.
3691  // Register "string" holds the separator.
3692  __ movzxbl(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
3693  __ Set(index, 0);
3694  // Jump into the loop after the code that copies the separator, so the first
3695  // element is not preceded by a separator
3696  __ jmp(&loop_2_entry);
3697  // Loop condition: while (index < length).
3698  __ bind(&loop_2);
3699  // Each iteration of the loop concatenates one string to the result.
3700  // Live values in registers:
3701  // elements: The FixedArray of strings we are joining.
3702  // index: which element of the elements array we are adding to the result.
3703  // result_pos: the position to which we are currently copying characters.
3704  // scratch: Separator character.
3705 
3706  // Copy the separator character to the result.
3707  __ movb(Operand(result_pos, 0), scratch);
3708  __ incq(result_pos);
3709 
3710  __ bind(&loop_2_entry);
3711  // Get string = array[index].
3712  __ movq(string, FieldOperand(elements, index,
3715  __ SmiToInteger32(string_length,
3717  __ lea(string,
3719  __ CopyBytes(result_pos, string, string_length);
3720  __ incl(index);
3721  __ cmpl(index, array_length_operand);
3722  __ j(less, &loop_2); // End while (index < length).
3723  __ jmp(&done);
3724 
3725 
3726  // Long separator case (separator is more than one character).
3727  __ bind(&long_separator);
3728 
3729  // Make elements point to end of elements array, and index
3730  // count from -array_length to zero, so we don't need to maintain
3731  // a loop limit.
3732  __ movl(index, array_length_operand);
3733  __ lea(elements, FieldOperand(elements, index, times_pointer_size,
3735  __ neg(index);
3736 
3737  // Replace separator string with pointer to its first character, and
3738  // make scratch be its length.
3739  __ movq(string, separator_operand);
3740  __ SmiToInteger32(scratch,
3742  __ lea(string,
3744  __ movq(separator_operand, string);
3745 
3746  // Jump into the loop after the code that copies the separator, so the first
3747  // element is not preceded by a separator
3748  __ jmp(&loop_3_entry);
3749  // Loop condition: while (index < length).
3750  __ bind(&loop_3);
3751  // Each iteration of the loop concatenates one string to the result.
3752  // Live values in registers:
3753  // index: which element of the elements array we are adding to the result.
3754  // result_pos: the position to which we are currently copying characters.
3755  // scratch: Separator length.
3756  // separator_operand (rsp[0x10]): Address of first char of separator.
3757 
3758  // Copy the separator to the result.
3759  __ movq(string, separator_operand);
3760  __ movl(string_length, scratch);
3761  __ CopyBytes(result_pos, string, string_length, 2);
3762 
3763  __ bind(&loop_3_entry);
3764  // Get string = array[index].
3765  __ movq(string, Operand(elements, index, times_pointer_size, 0));
3766  __ SmiToInteger32(string_length,
3768  __ lea(string,
3770  __ CopyBytes(result_pos, string, string_length);
3771  __ incq(index);
3772  __ j(not_equal, &loop_3); // Loop while (index < 0).
3773 
3774  __ bind(&done);
3775  __ movq(rax, result_operand);
3776 
3777  __ bind(&return_result);
3778  // Drop temp values from the stack, and restore context register.
3779  __ addq(rsp, Immediate(3 * kPointerSize));
3781  context()->Plug(rax);
3782 }
3783 
3784 
3785 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3786  Handle<String> name = expr->name();
3787  if (name->length() > 0 && name->Get(0) == '_') {
3788  Comment cmnt(masm_, "[ InlineRuntimeCall");
3789  EmitInlineRuntimeCall(expr);
3790  return;
3791  }
3792 
3793  Comment cmnt(masm_, "[ CallRuntime");
3794  ZoneList<Expression*>* args = expr->arguments();
3795 
3796  if (expr->is_jsruntime()) {
3797  // Prepare for calling JS runtime function.
3798  __ movq(rax, GlobalObjectOperand());
3800  }
3801 
3802  // Push the arguments ("left-to-right").
3803  int arg_count = args->length();
3804  for (int i = 0; i < arg_count; i++) {
3805  VisitForStackValue(args->at(i));
3806  }
3807 
3808  if (expr->is_jsruntime()) {
3809  // Call the JS runtime function using a call IC.
3810  __ Move(rcx, expr->name());
3811  RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3812  Handle<Code> ic =
3813  isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3814  CallIC(ic, mode, expr->id());
3815  // Restore context register.
3817  } else {
3818  __ CallRuntime(expr->function(), arg_count);
3819  }
3820  context()->Plug(rax);
3821 }
3822 
3823 
3824 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3825  switch (expr->op()) {
3826  case Token::DELETE: {
3827  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3828  Property* property = expr->expression()->AsProperty();
3829  VariableProxy* proxy = expr->expression()->AsVariableProxy();
3830 
3831  if (property != NULL) {
3832  VisitForStackValue(property->obj());
3833  VisitForStackValue(property->key());
3834  StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
3836  __ Push(Smi::FromInt(strict_mode_flag));
3837  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3838  context()->Plug(rax);
3839  } else if (proxy != NULL) {
3840  Variable* var = proxy->var();
3841  // Delete of an unqualified identifier is disallowed in strict mode
3842  // but "delete this" is allowed.
3843  ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
3844  if (var->IsUnallocated()) {
3845  __ push(GlobalObjectOperand());
3846  __ Push(var->name());
3848  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3849  context()->Plug(rax);
3850  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3851  // Result of deleting non-global variables is false. 'this' is
3852  // not really a variable, though we implement it as one. The
3853  // subexpression does not have side effects.
3854  context()->Plug(var->is_this());
3855  } else {
3856  // Non-global variable. Call the runtime to try to delete from the
3857  // context where the variable was introduced.
3858  __ push(context_register());
3859  __ Push(var->name());
3860  __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3861  context()->Plug(rax);
3862  }
3863  } else {
3864  // Result of deleting non-property, non-variable reference is true.
3865  // The subexpression may have side effects.
3866  VisitForEffect(expr->expression());
3867  context()->Plug(true);
3868  }
3869  break;
3870  }
3871 
3872  case Token::VOID: {
3873  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3874  VisitForEffect(expr->expression());
3875  context()->Plug(Heap::kUndefinedValueRootIndex);
3876  break;
3877  }
3878 
3879  case Token::NOT: {
3880  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3881  if (context()->IsEffect()) {
3882  // Unary NOT has no side effects so it's only necessary to visit the
3883  // subexpression. Match the optimizing compiler by not branching.
3884  VisitForEffect(expr->expression());
3885  } else if (context()->IsTest()) {
3886  const TestContext* test = TestContext::cast(context());
3887  // The labels are swapped for the recursive call.
3888  VisitForControl(expr->expression(),
3889  test->false_label(),
3890  test->true_label(),
3891  test->fall_through());
3892  context()->Plug(test->true_label(), test->false_label());
3893  } else {
3894  // We handle value contexts explicitly rather than simply visiting
3895  // for control and plugging the control flow into the context,
3896  // because we need to prepare a pair of extra administrative AST ids
3897  // for the optimizing compiler.
3898  ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3899  Label materialize_true, materialize_false, done;
3900  VisitForControl(expr->expression(),
3901  &materialize_false,
3902  &materialize_true,
3903  &materialize_true);
3904  __ bind(&materialize_true);
3905  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3906  if (context()->IsAccumulatorValue()) {
3907  __ LoadRoot(rax, Heap::kTrueValueRootIndex);
3908  } else {
3909  __ PushRoot(Heap::kTrueValueRootIndex);
3910  }
3911  __ jmp(&done, Label::kNear);
3912  __ bind(&materialize_false);
3913  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3914  if (context()->IsAccumulatorValue()) {
3915  __ LoadRoot(rax, Heap::kFalseValueRootIndex);
3916  } else {
3917  __ PushRoot(Heap::kFalseValueRootIndex);
3918  }
3919  __ bind(&done);
3920  }
3921  break;
3922  }
3923 
3924  case Token::TYPEOF: {
3925  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3926  { StackValueContext context(this);
3927  VisitForTypeofValue(expr->expression());
3928  }
3929  __ CallRuntime(Runtime::kTypeof, 1);
3930  context()->Plug(rax);
3931  break;
3932  }
3933 
3934  case Token::ADD: {
3935  Comment cmt(masm_, "[ UnaryOperation (ADD)");
3936  VisitForAccumulatorValue(expr->expression());
3937  Label no_conversion;
3938  __ JumpIfSmi(result_register(), &no_conversion);
3939  ToNumberStub convert_stub;
3940  __ CallStub(&convert_stub);
3941  __ bind(&no_conversion);
3942  context()->Plug(result_register());
3943  break;
3944  }
3945 
3946  case Token::SUB:
3947  EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
3948  break;
3949 
3950  case Token::BIT_NOT:
3951  EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
3952  break;
3953 
3954  default:
3955  UNREACHABLE();
3956  }
3957 }
3958 
3959 
3960 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3961  const char* comment) {
3962  // TODO(svenpanne): Allowing format strings in Comment would be nice here...
3963  Comment cmt(masm_, comment);
3964  bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3965  UnaryOverwriteMode overwrite =
3966  can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3967  UnaryOpStub stub(expr->op(), overwrite);
3968  // UnaryOpStub expects the argument to be in the
3969  // accumulator register rax.
3970  VisitForAccumulatorValue(expr->expression());
3971  SetSourcePosition(expr->position());
3972  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
3973  context()->Plug(rax);
3974 }
3975 
3976 
3977 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3978  Comment cmnt(masm_, "[ CountOperation");
3979  SetSourcePosition(expr->position());
3980 
3981  // Invalid left-hand-sides are rewritten to have a 'throw
3982  // ReferenceError' as the left-hand side.
3983  if (!expr->expression()->IsValidLeftHandSide()) {
3984  VisitForEffect(expr->expression());
3985  return;
3986  }
3987 
3988  // Expression can only be a property, a global or a (parameter or local)
3989  // slot.
3990  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3991  LhsKind assign_type = VARIABLE;
3992  Property* prop = expr->expression()->AsProperty();
3993  // In case of a property we use the uninitialized expression context
3994  // of the key to detect a named property.
3995  if (prop != NULL) {
3996  assign_type =
3997  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3998  }
3999 
4000  // Evaluate expression and get value.
4001  if (assign_type == VARIABLE) {
4002  ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4003  AccumulatorValueContext context(this);
4004  EmitVariableLoad(expr->expression()->AsVariableProxy());
4005  } else {
4006  // Reserve space for result of postfix operation.
4007  if (expr->is_postfix() && !context()->IsEffect()) {
4008  __ Push(Smi::FromInt(0));
4009  }
4010  if (assign_type == NAMED_PROPERTY) {
4011  VisitForAccumulatorValue(prop->obj());
4012  __ push(rax); // Copy of receiver, needed for later store.
4013  EmitNamedPropertyLoad(prop);
4014  } else {
4015  VisitForStackValue(prop->obj());
4016  VisitForAccumulatorValue(prop->key());
4017  __ movq(rdx, Operand(rsp, 0)); // Leave receiver on stack
4018  __ push(rax); // Copy of key, needed for later store.
4019  EmitKeyedPropertyLoad(prop);
4020  }
4021  }
4022 
4023  // We need a second deoptimization point after loading the value
4024  // in case evaluating the property load my have a side effect.
4025  if (assign_type == VARIABLE) {
4026  PrepareForBailout(expr->expression(), TOS_REG);
4027  } else {
4028  PrepareForBailoutForId(expr->CountId(), TOS_REG);
4029  }
4030 
4031  // Call ToNumber only if operand is not a smi.
4032  Label no_conversion;
4033  __ JumpIfSmi(rax, &no_conversion, Label::kNear);
4034  ToNumberStub convert_stub;
4035  __ CallStub(&convert_stub);
4036  __ bind(&no_conversion);
4037 
4038  // Save result for postfix expressions.
4039  if (expr->is_postfix()) {
4040  if (!context()->IsEffect()) {
4041  // Save the result on the stack. If we have a named or keyed property
4042  // we store the result under the receiver that is currently on top
4043  // of the stack.
4044  switch (assign_type) {
4045  case VARIABLE:
4046  __ push(rax);
4047  break;
4048  case NAMED_PROPERTY:
4049  __ movq(Operand(rsp, kPointerSize), rax);
4050  break;
4051  case KEYED_PROPERTY:
4052  __ movq(Operand(rsp, 2 * kPointerSize), rax);
4053  break;
4054  }
4055  }
4056  }
4057 
4058  // Inline smi case if we are in a loop.
4059  Label done, stub_call;
4060  JumpPatchSite patch_site(masm_);
4061 
4062  if (ShouldInlineSmiCase(expr->op())) {
4063  if (expr->op() == Token::INC) {
4064  __ SmiAddConstant(rax, rax, Smi::FromInt(1));
4065  } else {
4066  __ SmiSubConstant(rax, rax, Smi::FromInt(1));
4067  }
4068  __ j(overflow, &stub_call, Label::kNear);
4069  // We could eliminate this smi check if we split the code at
4070  // the first smi check before calling ToNumber.
4071  patch_site.EmitJumpIfSmi(rax, &done, Label::kNear);
4072 
4073  __ bind(&stub_call);
4074  // Call stub. Undo operation first.
4075  if (expr->op() == Token::INC) {
4076  __ SmiSubConstant(rax, rax, Smi::FromInt(1));
4077  } else {
4078  __ SmiAddConstant(rax, rax, Smi::FromInt(1));
4079  }
4080  }
4081 
4082  // Record position before stub call.
4083  SetSourcePosition(expr->position());
4084 
4085  // Call stub for +1/-1.
4086  BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
4087  if (expr->op() == Token::INC) {
4088  __ Move(rdx, Smi::FromInt(1));
4089  } else {
4090  __ movq(rdx, rax);
4091  __ Move(rax, Smi::FromInt(1));
4092  }
4093  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
4094  patch_site.EmitPatchInfo();
4095  __ bind(&done);
4096 
4097  // Store the value returned in rax.
4098  switch (assign_type) {
4099  case VARIABLE:
4100  if (expr->is_postfix()) {
4101  // Perform the assignment as if via '='.
4102  { EffectContext context(this);
4103  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4104  Token::ASSIGN);
4105  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4106  context.Plug(rax);
4107  }
4108  // For all contexts except kEffect: We have the result on
4109  // top of the stack.
4110  if (!context()->IsEffect()) {
4111  context()->PlugTOS();
4112  }
4113  } else {
4114  // Perform the assignment as if via '='.
4115  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4116  Token::ASSIGN);
4117  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4118  context()->Plug(rax);
4119  }
4120  break;
4121  case NAMED_PROPERTY: {
4122  __ Move(rcx, prop->key()->AsLiteral()->handle());
4123  __ pop(rdx);
4124  Handle<Code> ic = is_classic_mode()
4125  ? isolate()->builtins()->StoreIC_Initialize()
4126  : isolate()->builtins()->StoreIC_Initialize_Strict();
4127  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4128  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4129  if (expr->is_postfix()) {
4130  if (!context()->IsEffect()) {
4131  context()->PlugTOS();
4132  }
4133  } else {
4134  context()->Plug(rax);
4135  }
4136  break;
4137  }
4138  case KEYED_PROPERTY: {
4139  __ pop(rcx);
4140  __ pop(rdx);
4141  Handle<Code> ic = is_classic_mode()
4142  ? isolate()->builtins()->KeyedStoreIC_Initialize()
4143  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4144  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4145  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4146  if (expr->is_postfix()) {
4147  if (!context()->IsEffect()) {
4148  context()->PlugTOS();
4149  }
4150  } else {
4151  context()->Plug(rax);
4152  }
4153  break;
4154  }
4155  }
4156 }
4157 
4158 
4159 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4160  VariableProxy* proxy = expr->AsVariableProxy();
4161  ASSERT(!context()->IsEffect());
4162  ASSERT(!context()->IsTest());
4163 
4164  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4165  Comment cmnt(masm_, "Global variable");
4166  __ Move(rcx, proxy->name());
4167  __ movq(rax, GlobalObjectOperand());
4168  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4169  // Use a regular load, not a contextual load, to avoid a reference
4170  // error.
4171  CallIC(ic);
4172  PrepareForBailout(expr, TOS_REG);
4173  context()->Plug(rax);
4174  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4175  Label done, slow;
4176 
4177  // Generate code for loading from variables potentially shadowed
4178  // by eval-introduced variables.
4179  EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4180 
4181  __ bind(&slow);
4182  __ push(rsi);
4183  __ Push(proxy->name());
4184  __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4185  PrepareForBailout(expr, TOS_REG);
4186  __ bind(&done);
4187 
4188  context()->Plug(rax);
4189  } else {
4190  // This expression cannot throw a reference error at the top level.
4191  VisitInDuplicateContext(expr);
4192  }
4193 }
4194 
4195 
4196 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4197  Expression* sub_expr,
4198  Handle<String> check) {
4199  Label materialize_true, materialize_false;
4200  Label* if_true = NULL;
4201  Label* if_false = NULL;
4202  Label* fall_through = NULL;
4203  context()->PrepareTest(&materialize_true, &materialize_false,
4204  &if_true, &if_false, &fall_through);
4205 
4206  { AccumulatorValueContext context(this);
4207  VisitForTypeofValue(sub_expr);
4208  }
4209  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4210 
4211  if (check->Equals(isolate()->heap()->number_symbol())) {
4212  __ JumpIfSmi(rax, if_true);
4214  __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
4215  Split(equal, if_true, if_false, fall_through);
4216  } else if (check->Equals(isolate()->heap()->string_symbol())) {
4217  __ JumpIfSmi(rax, if_false);
4218  // Check for undetectable objects => false.
4219  __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
4220  __ j(above_equal, if_false);
4222  Immediate(1 << Map::kIsUndetectable));
4223  Split(zero, if_true, if_false, fall_through);
4224  } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4225  __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4226  __ j(equal, if_true);
4227  __ CompareRoot(rax, Heap::kFalseValueRootIndex);
4228  Split(equal, if_true, if_false, fall_through);
4229  } else if (FLAG_harmony_typeof &&
4230  check->Equals(isolate()->heap()->null_symbol())) {
4231  __ CompareRoot(rax, Heap::kNullValueRootIndex);
4232  Split(equal, if_true, if_false, fall_through);
4233  } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4234  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4235  __ j(equal, if_true);
4236  __ JumpIfSmi(rax, if_false);
4237  // Check for undetectable objects => true.
4240  Immediate(1 << Map::kIsUndetectable));
4241  Split(not_zero, if_true, if_false, fall_through);
4242  } else if (check->Equals(isolate()->heap()->function_symbol())) {
4243  __ JumpIfSmi(rax, if_false);
4245  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
4246  __ j(equal, if_true);
4247  __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
4248  Split(equal, if_true, if_false, fall_through);
4249  } else if (check->Equals(isolate()->heap()->object_symbol())) {
4250  __ JumpIfSmi(rax, if_false);
4251  if (!FLAG_harmony_typeof) {
4252  __ CompareRoot(rax, Heap::kNullValueRootIndex);
4253  __ j(equal, if_true);
4254  }
4255  __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
4256  __ j(below, if_false);
4257  __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4258  __ j(above, if_false);
4259  // Check for undetectable objects => false.
4261  Immediate(1 << Map::kIsUndetectable));
4262  Split(zero, if_true, if_false, fall_through);
4263  } else {
4264  if (if_false != fall_through) __ jmp(if_false);
4265  }
4266  context()->Plug(if_true, if_false);
4267 }
4268 
4269 
4270 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4271  Comment cmnt(masm_, "[ CompareOperation");
4272  SetSourcePosition(expr->position());
4273 
4274  // First we try a fast inlined version of the compare when one of
4275  // the operands is a literal.
4276  if (TryLiteralCompare(expr)) return;
4277 
4278  // Always perform the comparison for its control flow. Pack the result
4279  // into the expression's context after the comparison is performed.
4280  Label materialize_true, materialize_false;
4281  Label* if_true = NULL;
4282  Label* if_false = NULL;
4283  Label* fall_through = NULL;
4284  context()->PrepareTest(&materialize_true, &materialize_false,
4285  &if_true, &if_false, &fall_through);
4286 
4287  Token::Value op = expr->op();
4288  VisitForStackValue(expr->left());
4289  switch (op) {
4290  case Token::IN:
4291  VisitForStackValue(expr->right());
4292  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4293  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4294  __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4295  Split(equal, if_true, if_false, fall_through);
4296  break;
4297 
4298  case Token::INSTANCEOF: {
4299  VisitForStackValue(expr->right());
4300  InstanceofStub stub(InstanceofStub::kNoFlags);
4301  __ CallStub(&stub);
4302  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4303  __ testq(rax, rax);
4304  // The stub returns 0 for true.
4305  Split(zero, if_true, if_false, fall_through);
4306  break;
4307  }
4308 
4309  default: {
4310  VisitForAccumulatorValue(expr->right());
4311  Condition cc = no_condition;
4312  switch (op) {
4313  case Token::EQ_STRICT:
4314  case Token::EQ:
4315  cc = equal;
4316  break;
4317  case Token::LT:
4318  cc = less;
4319  break;
4320  case Token::GT:
4321  cc = greater;
4322  break;
4323  case Token::LTE:
4324  cc = less_equal;
4325  break;
4326  case Token::GTE:
4327  cc = greater_equal;
4328  break;
4329  case Token::IN:
4330  case Token::INSTANCEOF:
4331  default:
4332  UNREACHABLE();
4333  }
4334  __ pop(rdx);
4335 
4336  bool inline_smi_code = ShouldInlineSmiCase(op);
4337  JumpPatchSite patch_site(masm_);
4338  if (inline_smi_code) {
4339  Label slow_case;
4340  __ movq(rcx, rdx);
4341  __ or_(rcx, rax);
4342  patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4343  __ cmpq(rdx, rax);
4344  Split(cc, if_true, if_false, NULL);
4345  __ bind(&slow_case);
4346  }
4347 
4348  // Record position and call the compare IC.
4349  SetSourcePosition(expr->position());
4350  Handle<Code> ic = CompareIC::GetUninitialized(op);
4351  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4352  patch_site.EmitPatchInfo();
4353 
4354  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4355  __ testq(rax, rax);
4356  Split(cc, if_true, if_false, fall_through);
4357  }
4358  }
4359 
4360  // Convert the result of the comparison into one expected for this
4361  // expression's context.
4362  context()->Plug(if_true, if_false);
4363 }
4364 
4365 
4366 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4367  Expression* sub_expr,
4368  NilValue nil) {
4369  Label materialize_true, materialize_false;
4370  Label* if_true = NULL;
4371  Label* if_false = NULL;
4372  Label* fall_through = NULL;
4373  context()->PrepareTest(&materialize_true, &materialize_false,
4374  &if_true, &if_false, &fall_through);
4375 
4376  VisitForAccumulatorValue(sub_expr);
4377  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4378  Heap::RootListIndex nil_value = nil == kNullValue ?
4379  Heap::kNullValueRootIndex :
4380  Heap::kUndefinedValueRootIndex;
4381  __ CompareRoot(rax, nil_value);
4382  if (expr->op() == Token::EQ_STRICT) {
4383  Split(equal, if_true, if_false, fall_through);
4384  } else {
4385  Heap::RootListIndex other_nil_value = nil == kNullValue ?
4386  Heap::kUndefinedValueRootIndex :
4387  Heap::kNullValueRootIndex;
4388  __ j(equal, if_true);
4389  __ CompareRoot(rax, other_nil_value);
4390  __ j(equal, if_true);
4391  __ JumpIfSmi(rax, if_false);
4392  // It can be an undetectable object.
4395  Immediate(1 << Map::kIsUndetectable));
4396  Split(not_zero, if_true, if_false, fall_through);
4397  }
4398  context()->Plug(if_true, if_false);
4399 }
4400 
4401 
4402 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4404  context()->Plug(rax);
4405 }
4406 
4407 
4408 Register FullCodeGenerator::result_register() {
4409  return rax;
4410 }
4411 
4412 
4413 Register FullCodeGenerator::context_register() {
4414  return rsi;
4415 }
4416 
4417 
4418 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4419  ASSERT(IsAligned(frame_offset, kPointerSize));
4420  __ movq(Operand(rbp, frame_offset), value);
4421 }
4422 
4423 
4424 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4425  __ movq(dst, ContextOperand(rsi, context_index));
4426 }
4427 
4428 
4429 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4430  Scope* declaration_scope = scope()->DeclarationScope();
4431  if (declaration_scope->is_global_scope() ||
4432  declaration_scope->is_module_scope()) {
4433  // Contexts nested in the global context have a canonical empty function
4434  // as their closure, not the anonymous closure containing the global
4435  // code. Pass a smi sentinel and let the runtime look up the empty
4436  // function.
4437  __ Push(Smi::FromInt(0));
4438  } else if (declaration_scope->is_eval_scope()) {
4439  // Contexts created by a call to eval have the same closure as the
4440  // context calling eval, not the anonymous closure containing the eval
4441  // code. Fetch it from the context.
4443  } else {
4444  ASSERT(declaration_scope->is_function_scope());
4446  }
4447 }
4448 
4449 
4450 // ----------------------------------------------------------------------------
4451 // Non-local control flow support.
4452 
4453 
4454 void FullCodeGenerator::EnterFinallyBlock() {
4455  ASSERT(!result_register().is(rdx));
4456  ASSERT(!result_register().is(rcx));
4457  // Cook return address on top of stack (smi encoded Code* delta)
4458  __ pop(rdx);
4459  __ Move(rcx, masm_->CodeObject());
4460  __ subq(rdx, rcx);
4461  __ Integer32ToSmi(rdx, rdx);
4462  __ push(rdx);
4463 
4464  // Store result register while executing finally block.
4465  __ push(result_register());
4466 
4467  // Store pending message while executing finally block.
4468  ExternalReference pending_message_obj =
4469  ExternalReference::address_of_pending_message_obj(isolate());
4470  __ Load(rdx, pending_message_obj);
4471  __ push(rdx);
4472 
4473  ExternalReference has_pending_message =
4474  ExternalReference::address_of_has_pending_message(isolate());
4475  __ Load(rdx, has_pending_message);
4476  __ push(rdx);
4477 
4478  ExternalReference pending_message_script =
4479  ExternalReference::address_of_pending_message_script(isolate());
4480  __ Load(rdx, pending_message_script);
4481  __ push(rdx);
4482 }
4483 
4484 
4485 void FullCodeGenerator::ExitFinallyBlock() {
4486  ASSERT(!result_register().is(rdx));
4487  ASSERT(!result_register().is(rcx));
4488  // Restore pending message from stack.
4489  __ pop(rdx);
4490  ExternalReference pending_message_script =
4491  ExternalReference::address_of_pending_message_script(isolate());
4492  __ Store(pending_message_script, rdx);
4493 
4494  __ pop(rdx);
4495  ExternalReference has_pending_message =
4496  ExternalReference::address_of_has_pending_message(isolate());
4497  __ Store(has_pending_message, rdx);
4498 
4499  __ pop(rdx);
4500  ExternalReference pending_message_obj =
4501  ExternalReference::address_of_pending_message_obj(isolate());
4502  __ Store(pending_message_obj, rdx);
4503 
4504  // Restore result register from stack.
4505  __ pop(result_register());
4506 
4507  // Uncook return address.
4508  __ pop(rdx);
4509  __ SmiToInteger32(rdx, rdx);
4510  __ Move(rcx, masm_->CodeObject());
4511  __ addq(rdx, rcx);
4512  __ jmp(rdx);
4513 }
4514 
4515 
4516 #undef __
4517 
4518 #define __ ACCESS_MASM(masm())
4519 
4520 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4521  int* stack_depth,
4522  int* context_length) {
4523  // The macros used here must preserve the result register.
4524 
4525  // Because the handler block contains the context of the finally
4526  // code, we can restore it directly from there for the finally code
4527  // rather than iteratively unwinding contexts via their previous
4528  // links.
4529  __ Drop(*stack_depth); // Down to the handler block.
4530  if (*context_length > 0) {
4531  // Restore the context to its dedicated register and the stack.
4534  }
4535  __ PopTryHandler();
4536  __ call(finally_entry_);
4537 
4538  *stack_depth = 0;
4539  *context_length = 0;
4540  return previous_;
4541 }
4542 
4543 
4544 #undef __
4545 
4546 } } // namespace v8::internal
4547 
4548 #endif // V8_TARGET_ARCH_X64
const Register rdx
static const int kBitFieldOffset
Definition: objects.h:4994
Scope * DeclarationScope()
Definition: scopes.cc:699
const intptr_t kSmiTagMask
Definition: v8.h:3855
VariableDeclaration * function() const
Definition: scopes.h:323
static int SlotOffset(int index)
Definition: contexts.h:408
static const int kBuiltinsOffset
Definition: objects.h:6083
static String * cast(Object *obj)
static const int kDeclarationsId
Definition: ast.h:202
static Smi * FromInt(int value)
Definition: objects-inl.h:973
bool IsFastObjectElementsKind(ElementsKind kind)
const Register rbp
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
static const int kDataOffset
Definition: objects.h:6432
static const int kGlobalReceiverOffset
Definition: objects.h:6085
int SizeOfCodeGeneratedSince(Label *label)
T Max(T a, T b)
Definition: utils.h:222
Scope * outer_scope() const
Definition: scopes.h:347
const Register rsi
Flag flags[]
Definition: flags.cc:1467
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:5052
static bool enabled()
Definition: serialize.h:480
bool is_int8(int x)
Definition: assembler.h:830
static const int kSize
Definition: objects.h:6433
SmiIndex SmiToIndex(Register dst, Register src, int shift)
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:246
static const int kInObjectFieldCount
Definition: objects.h:6487
const char * comment() const
Definition: flags.cc:1362
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3902
const uint32_t kStringRepresentationMask
Definition: objects.h:455
static const int kMaximumSlots
Definition: code-stubs.h:343
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
Definition: objects.h:5609
static const int kGlobalContextOffset
Definition: objects.h:6084
Variable * parameter(int index) const
Definition: scopes.h:330
PropertyAttributes
MemOperand ContextOperand(Register context, int index)
static const int kFunctionEntryId
Definition: ast.h:198
static Smi * cast(Object *object)
int ContextChainLength(Scope *scope)
Definition: scopes.cc:689
static const int kHashFieldOffset
Definition: objects.h:7099
#define IN
static const int kLiteralsOffset
Definition: objects.h:5987
#define UNREACHABLE()
Definition: checks.h:50
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
Definition: objects.h:7098
static const int kValueOffset
Definition: objects.h:1307
Variable * arguments() const
Definition: scopes.h:338
static const int kForInSlowCaseMarker
Definition: objects.h:4149
NilValue
Definition: v8.h:141
const XMMRegister xmm1
const int kPointerSize
Definition: globals.h:234
static const int kJSReturnSequenceLength
static const int kForInFastCaseMarker
Definition: objects.h:4148
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:5011
Operand FieldOperand(Register object, int offset)
bool IsAligned(T value, U alignment)
Definition: utils.h:206
const Register rbx
const Register rsp
#define __
static const int kCacheStampOffset
Definition: objects.h:6280
static TestContext * cast(AstContext *context)
Definition: hydrogen.h:690
static const int kPropertiesOffset
Definition: objects.h:2113
const Register rax
const Register rdi
static const int kHeaderSize
Definition: objects.h:7282
static const int kElementsOffset
Definition: objects.h:2114
static const int kContainsCachedArrayIndexMask
Definition: objects.h:7154
const uint32_t kStringTag
Definition: objects.h:437
#define BASE_EMBEDDED
Definition: allocation.h:68
Vector< const char > CStrVector(const char *data)
Definition: utils.h:525
static int OffsetOfElementAt(int index)
Definition: objects.h:2291
static const int kLengthOffset
Definition: objects.h:8111
static const int kMaxLoopNestingMarker
Definition: objects.h:4491
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
Definition: objects.h:2233
static const int kEnumerationIndexOffset
Definition: objects.h:2622
static const int kMapOffset
Definition: objects.h:1219
static const int kValueOffset
Definition: objects.h:6272
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:2627
const uint32_t kIsNotStringMask
Definition: objects.h:436
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:536
static const int kLengthOffset
Definition: objects.h:2232
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:168
const Register kScratchRegister
v8::Handle< v8::Value > Load(const v8::Arguments &args)
Definition: shell.cc:159
const int kSmiTagSize
Definition: v8.h:3854
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
Definition: objects-inl.h:3682
const Register rcx
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:708
static const int kConstructorOffset
Definition: objects.h:4954
Condition CheckNonNegativeSmi(Register src)
const int kSmiTag
Definition: v8.h:3853
#define ASSERT_NE(v1, v2)
Definition: checks.h:272
static const int kIsUndetectable
Definition: objects.h:5005
Condition CheckEitherSmi(Register first, Register second, Register scratch=kScratchRegister)
static bool ShouldGenerateLog(Expression *type)
Definition: codegen.cc:153
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
#define FACTORY
Definition: isolate.h:1409
static const int kPrototypeOffset
Definition: objects.h:4953
const Register no_reg
static const int kValueOffset
Definition: objects.h:6188
const uint32_t kAsciiStringTag
Definition: objects.h:451
static const int kHashShift
Definition: objects.h:7121
T Min(T a, T b)
Definition: utils.h:229
static const int kSharedFunctionInfoOffset
Definition: objects.h:5984
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:1669
static const int kMaxValue
Definition: objects.h:1006
static const int kBitField2Offset
Definition: objects.h:4995
#define VOID
static Handle< Code > GetUninitialized(Token::Value op)
Definition: ic.cc:2544
void check(i::Vector< const char > string)
FlagType type() const
Definition: flags.cc:1358
static const int kFirstIndex
Definition: objects.h:2611
const uint32_t kStringEncodingMask
Definition: objects.h:449
static const int kInstanceTypeOffset
Definition: objects.h:4992
TypeofState
Definition: codegen.h:70
const XMMRegister xmm0