v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
full-codegen-ia32.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_IA32
31 
32 #include "code-stubs.h"
33 #include "codegen.h"
34 #include "compiler.h"
35 #include "debug.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
38 #include "parser.h"
39 #include "scopes.h"
40 #include "stub-cache.h"
41 
42 namespace v8 {
43 namespace internal {
44 
45 #define __ ACCESS_MASM(masm_)
46 
47 
48 class JumpPatchSite BASE_EMBEDDED {
49  public:
50  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
51 #ifdef DEBUG
52  info_emitted_ = false;
53 #endif
54  }
55 
56  ~JumpPatchSite() {
57  ASSERT(patch_site_.is_bound() == info_emitted_);
58  }
59 
60  void EmitJumpIfNotSmi(Register reg,
61  Label* target,
62  Label::Distance distance = Label::kFar) {
63  __ test(reg, Immediate(kSmiTagMask));
64  EmitJump(not_carry, target, distance); // Always taken before patched.
65  }
66 
67  void EmitJumpIfSmi(Register reg,
68  Label* target,
69  Label::Distance distance = Label::kFar) {
70  __ test(reg, Immediate(kSmiTagMask));
71  EmitJump(carry, target, distance); // Never taken before patched.
72  }
73 
74  void EmitPatchInfo() {
75  if (patch_site_.is_bound()) {
76  int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
77  ASSERT(is_int8(delta_to_patch_site));
78  __ test(eax, Immediate(delta_to_patch_site));
79 #ifdef DEBUG
80  info_emitted_ = true;
81 #endif
82  } else {
83  __ nop(); // Signals no inlined code.
84  }
85  }
86 
87  private:
88  // jc will be patched with jz, jnc will become jnz.
89  void EmitJump(Condition cc, Label* target, Label::Distance distance) {
90  ASSERT(!patch_site_.is_bound() && !info_emitted_);
91  ASSERT(cc == carry || cc == not_carry);
92  __ bind(&patch_site_);
93  __ j(cc, target, distance);
94  }
95 
96  MacroAssembler* masm_;
97  Label patch_site_;
98 #ifdef DEBUG
99  bool info_emitted_;
100 #endif
101 };
102 
103 
104 static void EmitStackCheck(MacroAssembler* masm_,
105  int pointers = 0,
106  Register scratch = esp) {
107  Label ok;
108  Isolate* isolate = masm_->isolate();
109  ExternalReference stack_limit =
110  ExternalReference::address_of_stack_limit(isolate);
111  ASSERT(scratch.is(esp) == (pointers == 0));
112  if (pointers != 0) {
113  __ mov(scratch, esp);
114  __ sub(scratch, Immediate(pointers * kPointerSize));
115  }
116  __ cmp(scratch, Operand::StaticVariable(stack_limit));
117  __ j(above_equal, &ok, Label::kNear);
118  __ call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
119  __ bind(&ok);
120 }
121 
122 
123 // Generate code for a JS function. On entry to the function the receiver
124 // and arguments have been pushed on the stack left to right, with the
125 // return address on top of them. The actual argument count matches the
126 // formal parameter count expected by the function.
127 //
128 // The live registers are:
129 // o edi: the JS function object being called (i.e. ourselves)
130 // o esi: our context
131 // o ebp: our caller's frame pointer
132 // o esp: stack pointer (pointing to return address)
133 //
134 // The function builds a JS frame. Please see JavaScriptFrameConstants in
135 // frames-ia32.h for its layout.
136 void FullCodeGenerator::Generate() {
137  CompilationInfo* info = info_;
138  handler_table_ =
139  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
140 
141  InitializeFeedbackVector();
142 
143  profiling_counter_ = isolate()->factory()->NewCell(
144  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
145  SetFunctionPosition(function());
146  Comment cmnt(masm_, "[ function compiled by full code generator");
147 
149 
150 #ifdef DEBUG
151  if (strlen(FLAG_stop_at) > 0 &&
152  info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
153  __ int3();
154  }
155 #endif
156 
157  // Sloppy mode functions and builtins need to replace the receiver with the
158  // global proxy when called as functions (without an explicit receiver
159  // object).
160  if (info->strict_mode() == SLOPPY && !info->is_native()) {
161  Label ok;
162  // +1 for return address.
163  int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
164  __ mov(ecx, Operand(esp, receiver_offset));
165 
166  __ cmp(ecx, isolate()->factory()->undefined_value());
167  __ j(not_equal, &ok, Label::kNear);
168 
169  __ mov(ecx, GlobalObjectOperand());
171 
172  __ mov(Operand(esp, receiver_offset), ecx);
173 
174  __ bind(&ok);
175  }
176 
177  // Open a frame scope to indicate that there is a frame on the stack. The
178  // MANUAL indicates that the scope shouldn't actually generate code to set up
179  // the frame (that is done below).
180  FrameScope frame_scope(masm_, StackFrame::MANUAL);
181 
182  info->set_prologue_offset(masm_->pc_offset());
183  __ Prologue(BUILD_FUNCTION_FRAME);
184  info->AddNoFrameRange(0, masm_->pc_offset());
185 
186  { Comment cmnt(masm_, "[ Allocate locals");
187  int locals_count = info->scope()->num_stack_slots();
188  // Generators allocate locals, if any, in context slots.
189  ASSERT(!info->function()->is_generator() || locals_count == 0);
190  if (locals_count == 1) {
191  __ push(Immediate(isolate()->factory()->undefined_value()));
192  } else if (locals_count > 1) {
193  if (locals_count >= 128) {
194  EmitStackCheck(masm_, locals_count, ecx);
195  }
196  __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
197  const int kMaxPushes = 32;
198  if (locals_count >= kMaxPushes) {
199  int loop_iterations = locals_count / kMaxPushes;
200  __ mov(ecx, loop_iterations);
201  Label loop_header;
202  __ bind(&loop_header);
203  // Do pushes.
204  for (int i = 0; i < kMaxPushes; i++) {
205  __ push(eax);
206  }
207  __ dec(ecx);
208  __ j(not_zero, &loop_header, Label::kNear);
209  }
210  int remaining = locals_count % kMaxPushes;
211  // Emit the remaining pushes.
212  for (int i = 0; i < remaining; i++) {
213  __ push(eax);
214  }
215  }
216  }
217 
218  bool function_in_register = true;
219 
220  // Possibly allocate a local context.
221  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
222  if (heap_slots > 0) {
223  Comment cmnt(masm_, "[ Allocate context");
224  // Argument to NewContext is the function, which is still in edi.
225  if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
226  __ push(edi);
227  __ Push(info->scope()->GetScopeInfo());
228  __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
229  } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
230  FastNewContextStub stub(heap_slots);
231  __ CallStub(&stub);
232  } else {
233  __ push(edi);
234  __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
235  }
236  function_in_register = false;
237  // Context is returned in eax. It replaces the context passed to us.
238  // It's saved in the stack and kept live in esi.
239  __ mov(esi, eax);
241 
242  // Copy parameters into context if necessary.
243  int num_parameters = info->scope()->num_parameters();
244  for (int i = 0; i < num_parameters; i++) {
245  Variable* var = scope()->parameter(i);
246  if (var->IsContextSlot()) {
247  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
248  (num_parameters - 1 - i) * kPointerSize;
249  // Load parameter from stack.
250  __ mov(eax, Operand(ebp, parameter_offset));
251  // Store it in the context.
252  int context_offset = Context::SlotOffset(var->index());
253  __ mov(Operand(esi, context_offset), eax);
254  // Update the write barrier. This clobbers eax and ebx.
255  __ RecordWriteContextSlot(esi,
256  context_offset,
257  eax,
258  ebx,
260  }
261  }
262  }
263 
264  Variable* arguments = scope()->arguments();
265  if (arguments != NULL) {
266  // Function uses arguments object.
267  Comment cmnt(masm_, "[ Allocate arguments object");
268  if (function_in_register) {
269  __ push(edi);
270  } else {
272  }
273  // Receiver is just before the parameters on the caller's stack.
274  int num_parameters = info->scope()->num_parameters();
275  int offset = num_parameters * kPointerSize;
276  __ lea(edx,
277  Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
278  __ push(edx);
279  __ push(Immediate(Smi::FromInt(num_parameters)));
280  // Arguments to ArgumentsAccessStub:
281  // function, receiver address, parameter count.
282  // The stub will rewrite receiver and parameter count if the previous
283  // stack frame was an arguments adapter frame.
285  if (strict_mode() == STRICT) {
287  } else if (function()->has_duplicate_parameters()) {
289  } else {
291  }
292  ArgumentsAccessStub stub(type);
293  __ CallStub(&stub);
294 
295  SetVar(arguments, eax, ebx, edx);
296  }
297 
298  if (FLAG_trace) {
299  __ CallRuntime(Runtime::kTraceEnter, 0);
300  }
301 
302  // Visit the declarations and body unless there is an illegal
303  // redeclaration.
304  if (scope()->HasIllegalRedeclaration()) {
305  Comment cmnt(masm_, "[ Declarations");
306  scope()->VisitIllegalRedeclaration(this);
307 
308  } else {
309  PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
310  { Comment cmnt(masm_, "[ Declarations");
311  // For named function expressions, declare the function name as a
312  // constant.
313  if (scope()->is_function_scope() && scope()->function() != NULL) {
314  VariableDeclaration* function = scope()->function();
315  ASSERT(function->proxy()->var()->mode() == CONST ||
316  function->proxy()->var()->mode() == CONST_LEGACY);
317  ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
318  VisitVariableDeclaration(function);
319  }
320  VisitDeclarations(scope()->declarations());
321  }
322 
323  { Comment cmnt(masm_, "[ Stack check");
324  PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
325  EmitStackCheck(masm_);
326  }
327 
328  { Comment cmnt(masm_, "[ Body");
329  ASSERT(loop_depth() == 0);
330  VisitStatements(function()->body());
331  ASSERT(loop_depth() == 0);
332  }
333  }
334 
335  // Always emit a 'return undefined' in case control fell off the end of
336  // the body.
337  { Comment cmnt(masm_, "[ return <undefined>;");
338  __ mov(eax, isolate()->factory()->undefined_value());
339  EmitReturnSequence();
340  }
341 }
342 
343 
344 void FullCodeGenerator::ClearAccumulator() {
345  __ Move(eax, Immediate(Smi::FromInt(0)));
346 }
347 
348 
349 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
350  __ mov(ebx, Immediate(profiling_counter_));
352  Immediate(Smi::FromInt(delta)));
353 }
354 
355 
356 void FullCodeGenerator::EmitProfilingCounterReset() {
357  int reset_value = FLAG_interrupt_budget;
358  __ mov(ebx, Immediate(profiling_counter_));
360  Immediate(Smi::FromInt(reset_value)));
361 }
362 
363 
364 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
365  Label* back_edge_target) {
366  Comment cmnt(masm_, "[ Back edge bookkeeping");
367  Label ok;
368 
369  ASSERT(back_edge_target->is_bound());
370  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
371  int weight = Min(kMaxBackEdgeWeight,
372  Max(1, distance / kCodeSizeMultiplier));
373  EmitProfilingCounterDecrement(weight);
374  __ j(positive, &ok, Label::kNear);
375  __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
376 
377  // Record a mapping of this PC offset to the OSR id. This is used to find
378  // the AST id from the unoptimized code in order to use it as a key into
379  // the deoptimization input data found in the optimized code.
380  RecordBackEdge(stmt->OsrEntryId());
381 
382  EmitProfilingCounterReset();
383 
384  __ bind(&ok);
385  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
386  // Record a mapping of the OSR id to this PC. This is used if the OSR
387  // entry becomes the target of a bailout. We don't expect it to be, but
388  // we want it to work if it is.
389  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
390 }
391 
392 
393 void FullCodeGenerator::EmitReturnSequence() {
394  Comment cmnt(masm_, "[ Return sequence");
395  if (return_label_.is_bound()) {
396  __ jmp(&return_label_);
397  } else {
398  // Common return label
399  __ bind(&return_label_);
400  if (FLAG_trace) {
401  __ push(eax);
402  __ CallRuntime(Runtime::kTraceExit, 1);
403  }
404  // Pretend that the exit is a backwards jump to the entry.
405  int weight = 1;
406  if (info_->ShouldSelfOptimize()) {
407  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
408  } else {
409  int distance = masm_->pc_offset();
410  weight = Min(kMaxBackEdgeWeight,
411  Max(1, distance / kCodeSizeMultiplier));
412  }
413  EmitProfilingCounterDecrement(weight);
414  Label ok;
415  __ j(positive, &ok, Label::kNear);
416  __ push(eax);
417  __ call(isolate()->builtins()->InterruptCheck(),
418  RelocInfo::CODE_TARGET);
419  __ pop(eax);
420  EmitProfilingCounterReset();
421  __ bind(&ok);
422 #ifdef DEBUG
423  // Add a label for checking the size of the code used for returning.
424  Label check_exit_codesize;
425  masm_->bind(&check_exit_codesize);
426 #endif
427  SetSourcePosition(function()->end_position() - 1);
428  __ RecordJSReturn();
429  // Do not use the leave instruction here because it is too short to
430  // patch with the code required by the debugger.
431  __ mov(esp, ebp);
432  int no_frame_start = masm_->pc_offset();
433  __ pop(ebp);
434 
435  int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
436  __ Ret(arguments_bytes, ecx);
437 #ifdef ENABLE_DEBUGGER_SUPPORT
438  // Check that the size of the code used for returning is large enough
439  // for the debugger's requirements.
441  masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
442 #endif
443  info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
444  }
445 }
446 
447 
448 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
449  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
450 }
451 
452 
453 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
454  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
455  codegen()->GetVar(result_register(), var);
456 }
457 
458 
459 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
460  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
461  MemOperand operand = codegen()->VarOperand(var, result_register());
462  // Memory operands can be pushed directly.
463  __ push(operand);
464 }
465 
466 
467 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
468  // For simplicity we always test the accumulator register.
469  codegen()->GetVar(result_register(), var);
470  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
471  codegen()->DoTest(this);
472 }
473 
474 
475 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
476  UNREACHABLE(); // Not used on IA32.
477 }
478 
479 
480 void FullCodeGenerator::AccumulatorValueContext::Plug(
481  Heap::RootListIndex index) const {
482  UNREACHABLE(); // Not used on IA32.
483 }
484 
485 
486 void FullCodeGenerator::StackValueContext::Plug(
487  Heap::RootListIndex index) const {
488  UNREACHABLE(); // Not used on IA32.
489 }
490 
491 
492 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
493  UNREACHABLE(); // Not used on IA32.
494 }
495 
496 
497 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
498 }
499 
500 
501 void FullCodeGenerator::AccumulatorValueContext::Plug(
502  Handle<Object> lit) const {
503  if (lit->IsSmi()) {
504  __ SafeMove(result_register(), Immediate(lit));
505  } else {
506  __ Move(result_register(), Immediate(lit));
507  }
508 }
509 
510 
511 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
512  if (lit->IsSmi()) {
513  __ SafePush(Immediate(lit));
514  } else {
515  __ push(Immediate(lit));
516  }
517 }
518 
519 
520 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
521  codegen()->PrepareForBailoutBeforeSplit(condition(),
522  true,
523  true_label_,
524  false_label_);
525  ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
526  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
527  if (false_label_ != fall_through_) __ jmp(false_label_);
528  } else if (lit->IsTrue() || lit->IsJSObject()) {
529  if (true_label_ != fall_through_) __ jmp(true_label_);
530  } else if (lit->IsString()) {
531  if (String::cast(*lit)->length() == 0) {
532  if (false_label_ != fall_through_) __ jmp(false_label_);
533  } else {
534  if (true_label_ != fall_through_) __ jmp(true_label_);
535  }
536  } else if (lit->IsSmi()) {
537  if (Smi::cast(*lit)->value() == 0) {
538  if (false_label_ != fall_through_) __ jmp(false_label_);
539  } else {
540  if (true_label_ != fall_through_) __ jmp(true_label_);
541  }
542  } else {
543  // For simplicity we always test the accumulator register.
544  __ mov(result_register(), lit);
545  codegen()->DoTest(this);
546  }
547 }
548 
549 
550 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
551  Register reg) const {
552  ASSERT(count > 0);
553  __ Drop(count);
554 }
555 
556 
557 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
558  int count,
559  Register reg) const {
560  ASSERT(count > 0);
561  __ Drop(count);
562  __ Move(result_register(), reg);
563 }
564 
565 
566 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
567  Register reg) const {
568  ASSERT(count > 0);
569  if (count > 1) __ Drop(count - 1);
570  __ mov(Operand(esp, 0), reg);
571 }
572 
573 
574 void FullCodeGenerator::TestContext::DropAndPlug(int count,
575  Register reg) const {
576  ASSERT(count > 0);
577  // For simplicity we always test the accumulator register.
578  __ Drop(count);
579  __ Move(result_register(), reg);
580  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
581  codegen()->DoTest(this);
582 }
583 
584 
585 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
586  Label* materialize_false) const {
587  ASSERT(materialize_true == materialize_false);
588  __ bind(materialize_true);
589 }
590 
591 
592 void FullCodeGenerator::AccumulatorValueContext::Plug(
593  Label* materialize_true,
594  Label* materialize_false) const {
595  Label done;
596  __ bind(materialize_true);
597  __ mov(result_register(), isolate()->factory()->true_value());
598  __ jmp(&done, Label::kNear);
599  __ bind(materialize_false);
600  __ mov(result_register(), isolate()->factory()->false_value());
601  __ bind(&done);
602 }
603 
604 
605 void FullCodeGenerator::StackValueContext::Plug(
606  Label* materialize_true,
607  Label* materialize_false) const {
608  Label done;
609  __ bind(materialize_true);
610  __ push(Immediate(isolate()->factory()->true_value()));
611  __ jmp(&done, Label::kNear);
612  __ bind(materialize_false);
613  __ push(Immediate(isolate()->factory()->false_value()));
614  __ bind(&done);
615 }
616 
617 
618 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
619  Label* materialize_false) const {
620  ASSERT(materialize_true == true_label_);
621  ASSERT(materialize_false == false_label_);
622 }
623 
624 
625 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
626 }
627 
628 
629 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
630  Handle<Object> value = flag
631  ? isolate()->factory()->true_value()
632  : isolate()->factory()->false_value();
633  __ mov(result_register(), value);
634 }
635 
636 
637 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
638  Handle<Object> value = flag
639  ? isolate()->factory()->true_value()
640  : isolate()->factory()->false_value();
641  __ push(Immediate(value));
642 }
643 
644 
645 void FullCodeGenerator::TestContext::Plug(bool flag) const {
646  codegen()->PrepareForBailoutBeforeSplit(condition(),
647  true,
648  true_label_,
649  false_label_);
650  if (flag) {
651  if (true_label_ != fall_through_) __ jmp(true_label_);
652  } else {
653  if (false_label_ != fall_through_) __ jmp(false_label_);
654  }
655 }
656 
657 
658 void FullCodeGenerator::DoTest(Expression* condition,
659  Label* if_true,
660  Label* if_false,
661  Label* fall_through) {
662  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
663  CallIC(ic, condition->test_id());
664  __ test(result_register(), result_register());
665  // The stub returns nonzero for true.
666  Split(not_zero, if_true, if_false, fall_through);
667 }
668 
669 
670 void FullCodeGenerator::Split(Condition cc,
671  Label* if_true,
672  Label* if_false,
673  Label* fall_through) {
674  if (if_false == fall_through) {
675  __ j(cc, if_true);
676  } else if (if_true == fall_through) {
677  __ j(NegateCondition(cc), if_false);
678  } else {
679  __ j(cc, if_true);
680  __ jmp(if_false);
681  }
682 }
683 
684 
685 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
686  ASSERT(var->IsStackAllocated());
687  // Offset is negative because higher indexes are at lower addresses.
688  int offset = -var->index() * kPointerSize;
689  // Adjust by a (parameter or local) base offset.
690  if (var->IsParameter()) {
691  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
692  } else {
694  }
695  return Operand(ebp, offset);
696 }
697 
698 
699 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
700  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
701  if (var->IsContextSlot()) {
702  int context_chain_length = scope()->ContextChainLength(var->scope());
703  __ LoadContext(scratch, context_chain_length);
704  return ContextOperand(scratch, var->index());
705  } else {
706  return StackOperand(var);
707  }
708 }
709 
710 
711 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
712  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
713  MemOperand location = VarOperand(var, dest);
714  __ mov(dest, location);
715 }
716 
717 
718 void FullCodeGenerator::SetVar(Variable* var,
719  Register src,
720  Register scratch0,
721  Register scratch1) {
722  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
723  ASSERT(!scratch0.is(src));
724  ASSERT(!scratch0.is(scratch1));
725  ASSERT(!scratch1.is(src));
726  MemOperand location = VarOperand(var, scratch0);
727  __ mov(location, src);
728 
729  // Emit the write barrier code if the location is in the heap.
730  if (var->IsContextSlot()) {
731  int offset = Context::SlotOffset(var->index());
732  ASSERT(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
733  __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
734  }
735 }
736 
737 
738 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
739  bool should_normalize,
740  Label* if_true,
741  Label* if_false) {
742  // Only prepare for bailouts before splits if we're in a test
743  // context. Otherwise, we let the Visit function deal with the
744  // preparation to avoid preparing with the same AST id twice.
745  if (!context()->IsTest() || !info_->IsOptimizable()) return;
746 
747  Label skip;
748  if (should_normalize) __ jmp(&skip, Label::kNear);
749  PrepareForBailout(expr, TOS_REG);
750  if (should_normalize) {
751  __ cmp(eax, isolate()->factory()->true_value());
752  Split(equal, if_true, if_false, NULL);
753  __ bind(&skip);
754  }
755 }
756 
757 
758 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
759  // The variable in the declaration always resides in the current context.
760  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
761  if (generate_debug_code_) {
762  // Check that we're not inside a with or catch context.
764  __ cmp(ebx, isolate()->factory()->with_context_map());
765  __ Check(not_equal, kDeclarationInWithContext);
766  __ cmp(ebx, isolate()->factory()->catch_context_map());
767  __ Check(not_equal, kDeclarationInCatchContext);
768  }
769 }
770 
771 
772 void FullCodeGenerator::VisitVariableDeclaration(
773  VariableDeclaration* declaration) {
774  // If it was not possible to allocate the variable at compile time, we
775  // need to "declare" it at runtime to make sure it actually exists in the
776  // local context.
777  VariableProxy* proxy = declaration->proxy();
778  VariableMode mode = declaration->mode();
779  Variable* variable = proxy->var();
780  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
781  switch (variable->location()) {
783  globals_->Add(variable->name(), zone());
784  globals_->Add(variable->binding_needs_init()
785  ? isolate()->factory()->the_hole_value()
786  : isolate()->factory()->undefined_value(), zone());
787  break;
788 
789  case Variable::PARAMETER:
790  case Variable::LOCAL:
791  if (hole_init) {
792  Comment cmnt(masm_, "[ VariableDeclaration");
793  __ mov(StackOperand(variable),
794  Immediate(isolate()->factory()->the_hole_value()));
795  }
796  break;
797 
798  case Variable::CONTEXT:
799  if (hole_init) {
800  Comment cmnt(masm_, "[ VariableDeclaration");
801  EmitDebugCheckDeclarationContext(variable);
802  __ mov(ContextOperand(esi, variable->index()),
803  Immediate(isolate()->factory()->the_hole_value()));
804  // No write barrier since the hole value is in old space.
805  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
806  }
807  break;
808 
809  case Variable::LOOKUP: {
810  Comment cmnt(masm_, "[ VariableDeclaration");
811  __ push(esi);
812  __ push(Immediate(variable->name()));
813  // VariableDeclaration nodes are always introduced in one of four modes.
815  PropertyAttributes attr =
817  __ push(Immediate(Smi::FromInt(attr)));
818  // Push initial value, if any.
819  // Note: For variables we must not push an initial value (such as
820  // 'undefined') because we may have a (legal) redeclaration and we
821  // must not destroy the current value.
822  if (hole_init) {
823  __ push(Immediate(isolate()->factory()->the_hole_value()));
824  } else {
825  __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
826  }
827  __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
828  break;
829  }
830  }
831 }
832 
833 
834 void FullCodeGenerator::VisitFunctionDeclaration(
835  FunctionDeclaration* declaration) {
836  VariableProxy* proxy = declaration->proxy();
837  Variable* variable = proxy->var();
838  switch (variable->location()) {
839  case Variable::UNALLOCATED: {
840  globals_->Add(variable->name(), zone());
841  Handle<SharedFunctionInfo> function =
842  Compiler::BuildFunctionInfo(declaration->fun(), script());
843  // Check for stack-overflow exception.
844  if (function.is_null()) return SetStackOverflow();
845  globals_->Add(function, zone());
846  break;
847  }
848 
849  case Variable::PARAMETER:
850  case Variable::LOCAL: {
851  Comment cmnt(masm_, "[ FunctionDeclaration");
852  VisitForAccumulatorValue(declaration->fun());
853  __ mov(StackOperand(variable), result_register());
854  break;
855  }
856 
857  case Variable::CONTEXT: {
858  Comment cmnt(masm_, "[ FunctionDeclaration");
859  EmitDebugCheckDeclarationContext(variable);
860  VisitForAccumulatorValue(declaration->fun());
861  __ mov(ContextOperand(esi, variable->index()), result_register());
862  // We know that we have written a function, which is not a smi.
863  __ RecordWriteContextSlot(esi,
864  Context::SlotOffset(variable->index()),
865  result_register(),
866  ecx,
870  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
871  break;
872  }
873 
874  case Variable::LOOKUP: {
875  Comment cmnt(masm_, "[ FunctionDeclaration");
876  __ push(esi);
877  __ push(Immediate(variable->name()));
878  __ push(Immediate(Smi::FromInt(NONE)));
879  VisitForStackValue(declaration->fun());
880  __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
881  break;
882  }
883  }
884 }
885 
886 
887 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
888  Variable* variable = declaration->proxy()->var();
889  ASSERT(variable->location() == Variable::CONTEXT);
890  ASSERT(variable->interface()->IsFrozen());
891 
892  Comment cmnt(masm_, "[ ModuleDeclaration");
893  EmitDebugCheckDeclarationContext(variable);
894 
895  // Load instance object.
896  __ LoadContext(eax, scope_->ContextChainLength(scope_->GlobalScope()));
897  __ mov(eax, ContextOperand(eax, variable->interface()->Index()));
899 
900  // Assign it.
901  __ mov(ContextOperand(esi, variable->index()), eax);
902  // We know that we have written a module, which is not a smi.
903  __ RecordWriteContextSlot(esi,
904  Context::SlotOffset(variable->index()),
905  eax,
906  ecx,
907  kDontSaveFPRegs,
908  EMIT_REMEMBERED_SET,
910  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
911 
912  // Traverse into body.
913  Visit(declaration->module());
914 }
915 
916 
917 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
918  VariableProxy* proxy = declaration->proxy();
919  Variable* variable = proxy->var();
920  switch (variable->location()) {
922  // TODO(rossberg)
923  break;
924 
925  case Variable::CONTEXT: {
926  Comment cmnt(masm_, "[ ImportDeclaration");
927  EmitDebugCheckDeclarationContext(variable);
928  // TODO(rossberg)
929  break;
930  }
931 
932  case Variable::PARAMETER:
933  case Variable::LOCAL:
934  case Variable::LOOKUP:
935  UNREACHABLE();
936  }
937 }
938 
939 
940 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
941  // TODO(rossberg)
942 }
943 
944 
945 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
946  // Call the runtime to declare the globals.
947  __ push(esi); // The context is the first argument.
948  __ Push(pairs);
949  __ Push(Smi::FromInt(DeclareGlobalsFlags()));
950  __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
951  // Return value is ignored.
952 }
953 
954 
955 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
956  // Call the runtime to declare the modules.
957  __ Push(descriptions);
958  __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
959  // Return value is ignored.
960 }
961 
962 
963 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
964  Comment cmnt(masm_, "[ SwitchStatement");
965  Breakable nested_statement(this, stmt);
966  SetStatementPosition(stmt);
967 
968  // Keep the switch value on the stack until a case matches.
969  VisitForStackValue(stmt->tag());
970  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
971 
972  ZoneList<CaseClause*>* clauses = stmt->cases();
973  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
974 
975  Label next_test; // Recycled for each test.
976  // Compile all the tests with branches to their bodies.
977  for (int i = 0; i < clauses->length(); i++) {
978  CaseClause* clause = clauses->at(i);
979  clause->body_target()->Unuse();
980 
981  // The default is not a test, but remember it as final fall through.
982  if (clause->is_default()) {
983  default_clause = clause;
984  continue;
985  }
986 
987  Comment cmnt(masm_, "[ Case comparison");
988  __ bind(&next_test);
989  next_test.Unuse();
990 
991  // Compile the label expression.
992  VisitForAccumulatorValue(clause->label());
993 
994  // Perform the comparison as if via '==='.
995  __ mov(edx, Operand(esp, 0)); // Switch value.
996  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
997  JumpPatchSite patch_site(masm_);
998  if (inline_smi_code) {
999  Label slow_case;
1000  __ mov(ecx, edx);
1001  __ or_(ecx, eax);
1002  patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
1003 
1004  __ cmp(edx, eax);
1005  __ j(not_equal, &next_test);
1006  __ Drop(1); // Switch value is no longer needed.
1007  __ jmp(clause->body_target());
1008  __ bind(&slow_case);
1009  }
1010 
1011  // Record position before stub call for type feedback.
1012  SetSourcePosition(clause->position());
1013  Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1014  CallIC(ic, clause->CompareId());
1015  patch_site.EmitPatchInfo();
1016 
1017  Label skip;
1018  __ jmp(&skip, Label::kNear);
1019  PrepareForBailout(clause, TOS_REG);
1020  __ cmp(eax, isolate()->factory()->true_value());
1021  __ j(not_equal, &next_test);
1022  __ Drop(1);
1023  __ jmp(clause->body_target());
1024  __ bind(&skip);
1025 
1026  __ test(eax, eax);
1027  __ j(not_equal, &next_test);
1028  __ Drop(1); // Switch value is no longer needed.
1029  __ jmp(clause->body_target());
1030  }
1031 
1032  // Discard the test value and jump to the default if present, otherwise to
1033  // the end of the statement.
1034  __ bind(&next_test);
1035  __ Drop(1); // Switch value is no longer needed.
1036  if (default_clause == NULL) {
1037  __ jmp(nested_statement.break_label());
1038  } else {
1039  __ jmp(default_clause->body_target());
1040  }
1041 
1042  // Compile all the case bodies.
1043  for (int i = 0; i < clauses->length(); i++) {
1044  Comment cmnt(masm_, "[ Case body");
1045  CaseClause* clause = clauses->at(i);
1046  __ bind(clause->body_target());
1047  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1048  VisitStatements(clause->statements());
1049  }
1050 
1051  __ bind(nested_statement.break_label());
1052  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1053 }
1054 
1055 
1056 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1057  Comment cmnt(masm_, "[ ForInStatement");
1058  int slot = stmt->ForInFeedbackSlot();
1059 
1060  SetStatementPosition(stmt);
1061 
1062  Label loop, exit;
1063  ForIn loop_statement(this, stmt);
1064  increment_loop_depth();
1065 
1066  // Get the object to enumerate over. If the object is null or undefined, skip
1067  // over the loop. See ECMA-262 version 5, section 12.6.4.
1068  VisitForAccumulatorValue(stmt->enumerable());
1069  __ cmp(eax, isolate()->factory()->undefined_value());
1070  __ j(equal, &exit);
1071  __ cmp(eax, isolate()->factory()->null_value());
1072  __ j(equal, &exit);
1073 
1074  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1075 
1076  // Convert the object to a JS object.
1077  Label convert, done_convert;
1078  __ JumpIfSmi(eax, &convert, Label::kNear);
1079  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1080  __ j(above_equal, &done_convert, Label::kNear);
1081  __ bind(&convert);
1082  __ push(eax);
1083  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1084  __ bind(&done_convert);
1085  __ push(eax);
1086 
1087  // Check for proxies.
1088  Label call_runtime, use_cache, fixed_array;
1090  __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
1091  __ j(below_equal, &call_runtime);
1092 
1093  // Check cache validity in generated code. This is a fast case for
1094  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1095  // guarantee cache validity, call the runtime system to check cache
1096  // validity or get the property names in a fixed array.
1097  __ CheckEnumCache(&call_runtime);
1098 
1100  __ jmp(&use_cache, Label::kNear);
1101 
1102  // Get the set of properties to enumerate.
1103  __ bind(&call_runtime);
1104  __ push(eax);
1105  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1107  isolate()->factory()->meta_map());
1108  __ j(not_equal, &fixed_array);
1109 
1110 
1111  // We got a map in register eax. Get the enumeration cache from it.
1112  Label no_descriptors;
1113  __ bind(&use_cache);
1114 
1115  __ EnumLength(edx, eax);
1116  __ cmp(edx, Immediate(Smi::FromInt(0)));
1117  __ j(equal, &no_descriptors);
1118 
1119  __ LoadInstanceDescriptors(eax, ecx);
1122 
1123  // Set up the four remaining stack slots.
1124  __ push(eax); // Map.
1125  __ push(ecx); // Enumeration cache.
1126  __ push(edx); // Number of valid entries for the map in the enum cache.
1127  __ push(Immediate(Smi::FromInt(0))); // Initial index.
1128  __ jmp(&loop);
1129 
1130  __ bind(&no_descriptors);
1131  __ add(esp, Immediate(kPointerSize));
1132  __ jmp(&exit);
1133 
1134  // We got a fixed array in register eax. Iterate through that.
1135  Label non_proxy;
1136  __ bind(&fixed_array);
1137 
1138  Handle<Object> feedback = Handle<Object>(
1140  isolate());
1141  StoreFeedbackVectorSlot(slot, feedback);
1142 
1143  // No need for a write barrier, we are storing a Smi in the feedback vector.
1144  __ LoadHeapObject(ebx, FeedbackVector());
1147 
1148  __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
1149  __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
1151  __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1152  __ j(above, &non_proxy);
1153  __ Move(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
1154  __ bind(&non_proxy);
1155  __ push(ebx); // Smi
1156  __ push(eax); // Array
1158  __ push(eax); // Fixed array length (as smi).
1159  __ push(Immediate(Smi::FromInt(0))); // Initial index.
1160 
1161  // Generate code for doing the condition check.
1162  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1163  __ bind(&loop);
1164  __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1165  __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1166  __ j(above_equal, loop_statement.break_label());
1167 
1168  // Get the current entry of the array into register ebx.
1169  __ mov(ebx, Operand(esp, 2 * kPointerSize));
1171 
1172  // Get the expected map from the stack or a smi in the
1173  // permanent slow case into register edx.
1174  __ mov(edx, Operand(esp, 3 * kPointerSize));
1175 
1176  // Check if the expected map still matches that of the enumerable.
1177  // If not, we may have to filter the key.
1178  Label update_each;
1179  __ mov(ecx, Operand(esp, 4 * kPointerSize));
1181  __ j(equal, &update_each, Label::kNear);
1182 
1183  // For proxies, no filtering is done.
1184  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1185  ASSERT(Smi::FromInt(0) == 0);
1186  __ test(edx, edx);
1187  __ j(zero, &update_each);
1188 
1189  // Convert the entry to a string or null if it isn't a property
1190  // anymore. If the property has been removed while iterating, we
1191  // just skip it.
1192  __ push(ecx); // Enumerable.
1193  __ push(ebx); // Current entry.
1194  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1195  __ test(eax, eax);
1196  __ j(equal, loop_statement.continue_label());
1197  __ mov(ebx, eax);
1198 
1199  // Update the 'each' property or variable from the possibly filtered
1200  // entry in register ebx.
1201  __ bind(&update_each);
1202  __ mov(result_register(), ebx);
1203  // Perform the assignment as if via '='.
1204  { EffectContext context(this);
1205  EmitAssignment(stmt->each());
1206  }
1207 
1208  // Generate code for the body of the loop.
1209  Visit(stmt->body());
1210 
1211  // Generate code for going to the next element by incrementing the
1212  // index (smi) stored on top of the stack.
1213  __ bind(loop_statement.continue_label());
1214  __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1215 
1216  EmitBackEdgeBookkeeping(stmt, &loop);
1217  __ jmp(&loop);
1218 
1219  // Remove the pointers stored on the stack.
1220  __ bind(loop_statement.break_label());
1221  __ add(esp, Immediate(5 * kPointerSize));
1222 
1223  // Exit and decrement the loop depth.
1224  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1225  __ bind(&exit);
1226  decrement_loop_depth();
1227 }
1228 
1229 
1230 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1231  Comment cmnt(masm_, "[ ForOfStatement");
1232  SetStatementPosition(stmt);
1233 
1234  Iteration loop_statement(this, stmt);
1235  increment_loop_depth();
1236 
1237  // var iterator = iterable[@@iterator]()
1238  VisitForAccumulatorValue(stmt->assign_iterator());
1239 
1240  // As with for-in, skip the loop if the iterator is null or undefined.
1241  __ CompareRoot(eax, Heap::kUndefinedValueRootIndex);
1242  __ j(equal, loop_statement.break_label());
1243  __ CompareRoot(eax, Heap::kNullValueRootIndex);
1244  __ j(equal, loop_statement.break_label());
1245 
1246  // Convert the iterator to a JS object.
1247  Label convert, done_convert;
1248  __ JumpIfSmi(eax, &convert);
1249  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1250  __ j(above_equal, &done_convert);
1251  __ bind(&convert);
1252  __ push(eax);
1253  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1254  __ bind(&done_convert);
1255 
1256  // Loop entry.
1257  __ bind(loop_statement.continue_label());
1258 
1259  // result = iterator.next()
1260  VisitForEffect(stmt->next_result());
1261 
1262  // if (result.done) break;
1263  Label result_not_done;
1264  VisitForControl(stmt->result_done(),
1265  loop_statement.break_label(),
1266  &result_not_done,
1267  &result_not_done);
1268  __ bind(&result_not_done);
1269 
1270  // each = result.value
1271  VisitForEffect(stmt->assign_each());
1272 
1273  // Generate code for the body of the loop.
1274  Visit(stmt->body());
1275 
1276  // Check stack before looping.
1277  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1278  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1279  __ jmp(loop_statement.continue_label());
1280 
1281  // Exit and decrement the loop depth.
1282  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1283  __ bind(loop_statement.break_label());
1284  decrement_loop_depth();
1285 }
1286 
1287 
1288 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1289  bool pretenure) {
1290  // Use the fast case closure allocation code that allocates in new
1291  // space for nested functions that don't need literals cloning. If
1292  // we're running with the --always-opt or the --prepare-always-opt
1293  // flag, we need to use the runtime function so that the new function
1294  // we are creating here gets a chance to have its code optimized and
1295  // doesn't just get a copy of the existing unoptimized code.
1296  if (!FLAG_always_opt &&
1297  !FLAG_prepare_always_opt &&
1298  !pretenure &&
1299  scope()->is_function_scope() &&
1300  info->num_literals() == 0) {
1301  FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1302  __ mov(ebx, Immediate(info));
1303  __ CallStub(&stub);
1304  } else {
1305  __ push(esi);
1306  __ push(Immediate(info));
1307  __ push(Immediate(pretenure
1308  ? isolate()->factory()->true_value()
1309  : isolate()->factory()->false_value()));
1310  __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1311  }
1312  context()->Plug(eax);
1313 }
1314 
1315 
1316 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1317  Comment cmnt(masm_, "[ VariableProxy");
1318  EmitVariableLoad(expr);
1319 }
1320 
1321 
1322 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1323  TypeofState typeof_state,
1324  Label* slow) {
1325  Register context = esi;
1326  Register temp = edx;
1327 
1328  Scope* s = scope();
1329  while (s != NULL) {
1330  if (s->num_heap_slots() > 0) {
1331  if (s->calls_sloppy_eval()) {
1332  // Check that extension is NULL.
1334  Immediate(0));
1335  __ j(not_equal, slow);
1336  }
1337  // Load next context in chain.
1338  __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1339  // Walk the rest of the chain without clobbering esi.
1340  context = temp;
1341  }
1342  // If no outer scope calls eval, we do not need to check more
1343  // context extensions. If we have reached an eval scope, we check
1344  // all extensions from this point.
1345  if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1346  s = s->outer_scope();
1347  }
1348 
1349  if (s != NULL && s->is_eval_scope()) {
1350  // Loop up the context chain. There is no frame effect so it is
1351  // safe to use raw labels here.
1352  Label next, fast;
1353  if (!context.is(temp)) {
1354  __ mov(temp, context);
1355  }
1356  __ bind(&next);
1357  // Terminate at native context.
1359  Immediate(isolate()->factory()->native_context_map()));
1360  __ j(equal, &fast, Label::kNear);
1361  // Check that extension is NULL.
1362  __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1363  __ j(not_equal, slow);
1364  // Load next context in chain.
1365  __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1366  __ jmp(&next);
1367  __ bind(&fast);
1368  }
1369 
1370  // All extension objects were empty and it is safe to use a global
1371  // load IC call.
1372  __ mov(edx, GlobalObjectOperand());
1373  __ mov(ecx, var->name());
1374  ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1375  ? NOT_CONTEXTUAL
1376  : CONTEXTUAL;
1377 
1378  CallLoadIC(mode);
1379 }
1380 
1381 
1382 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1383  Label* slow) {
1384  ASSERT(var->IsContextSlot());
1385  Register context = esi;
1386  Register temp = ebx;
1387 
1388  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1389  if (s->num_heap_slots() > 0) {
1390  if (s->calls_sloppy_eval()) {
1391  // Check that extension is NULL.
1393  Immediate(0));
1394  __ j(not_equal, slow);
1395  }
1396  __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1397  // Walk the rest of the chain without clobbering esi.
1398  context = temp;
1399  }
1400  }
1401  // Check that last extension is NULL.
1402  __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1403  __ j(not_equal, slow);
1404 
1405  // This function is used only for loads, not stores, so it's safe to
1406  // return an esi-based operand (the write barrier cannot be allowed to
1407  // destroy the esi register).
1408  return ContextOperand(context, var->index());
1409 }
1410 
1411 
1412 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1413  TypeofState typeof_state,
1414  Label* slow,
1415  Label* done) {
1416  // Generate fast-case code for variables that might be shadowed by
1417  // eval-introduced variables. Eval is used a lot without
1418  // introducing variables. In those cases, we do not want to
1419  // perform a runtime call for all variables in the scope
1420  // containing the eval.
1421  if (var->mode() == DYNAMIC_GLOBAL) {
1422  EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1423  __ jmp(done);
1424  } else if (var->mode() == DYNAMIC_LOCAL) {
1425  Variable* local = var->local_if_not_shadowed();
1426  __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1427  if (local->mode() == LET || local->mode() == CONST ||
1428  local->mode() == CONST_LEGACY) {
1429  __ cmp(eax, isolate()->factory()->the_hole_value());
1430  __ j(not_equal, done);
1431  if (local->mode() == CONST_LEGACY) {
1432  __ mov(eax, isolate()->factory()->undefined_value());
1433  } else { // LET || CONST
1434  __ push(Immediate(var->name()));
1435  __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1436  }
1437  }
1438  __ jmp(done);
1439  }
1440 }
1441 
1442 
1443 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1444  // Record position before possible IC call.
1445  SetSourcePosition(proxy->position());
1446  Variable* var = proxy->var();
1447 
1448  // Three cases: global variables, lookup variables, and all other types of
1449  // variables.
1450  switch (var->location()) {
1451  case Variable::UNALLOCATED: {
1452  Comment cmnt(masm_, "[ Global variable");
1453  // Use inline caching. Variable name is passed in ecx and the global
1454  // object in eax.
1455  __ mov(edx, GlobalObjectOperand());
1456  __ mov(ecx, var->name());
1457  CallLoadIC(CONTEXTUAL);
1458  context()->Plug(eax);
1459  break;
1460  }
1461 
1462  case Variable::PARAMETER:
1463  case Variable::LOCAL:
1464  case Variable::CONTEXT: {
1465  Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1466  : "[ Stack variable");
1467  if (var->binding_needs_init()) {
1468  // var->scope() may be NULL when the proxy is located in eval code and
1469  // refers to a potential outside binding. Currently those bindings are
1470  // always looked up dynamically, i.e. in that case
1471  // var->location() == LOOKUP.
1472  // always holds.
1473  ASSERT(var->scope() != NULL);
1474 
1475  // Check if the binding really needs an initialization check. The check
1476  // can be skipped in the following situation: we have a LET or CONST
1477  // binding in harmony mode, both the Variable and the VariableProxy have
1478  // the same declaration scope (i.e. they are both in global code, in the
1479  // same function or in the same eval code) and the VariableProxy is in
1480  // the source physically located after the initializer of the variable.
1481  //
1482  // We cannot skip any initialization checks for CONST in non-harmony
1483  // mode because const variables may be declared but never initialized:
1484  // if (false) { const x; }; var y = x;
1485  //
1486  // The condition on the declaration scopes is a conservative check for
1487  // nested functions that access a binding and are called before the
1488  // binding is initialized:
1489  // function() { f(); let x = 1; function f() { x = 2; } }
1490  //
1491  bool skip_init_check;
1492  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1493  skip_init_check = false;
1494  } else {
1495  // Check that we always have valid source position.
1496  ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1497  ASSERT(proxy->position() != RelocInfo::kNoPosition);
1498  skip_init_check = var->mode() != CONST_LEGACY &&
1499  var->initializer_position() < proxy->position();
1500  }
1501 
1502  if (!skip_init_check) {
1503  // Let and const need a read barrier.
1504  Label done;
1505  GetVar(eax, var);
1506  __ cmp(eax, isolate()->factory()->the_hole_value());
1507  __ j(not_equal, &done, Label::kNear);
1508  if (var->mode() == LET || var->mode() == CONST) {
1509  // Throw a reference error when using an uninitialized let/const
1510  // binding in harmony mode.
1511  __ push(Immediate(var->name()));
1512  __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1513  } else {
1514  // Uninitalized const bindings outside of harmony mode are unholed.
1515  ASSERT(var->mode() == CONST_LEGACY);
1516  __ mov(eax, isolate()->factory()->undefined_value());
1517  }
1518  __ bind(&done);
1519  context()->Plug(eax);
1520  break;
1521  }
1522  }
1523  context()->Plug(var);
1524  break;
1525  }
1526 
1527  case Variable::LOOKUP: {
1528  Comment cmnt(masm_, "[ Lookup variable");
1529  Label done, slow;
1530  // Generate code for loading from variables potentially shadowed
1531  // by eval-introduced variables.
1532  EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1533  __ bind(&slow);
1534  __ push(esi); // Context.
1535  __ push(Immediate(var->name()));
1536  __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1537  __ bind(&done);
1538  context()->Plug(eax);
1539  break;
1540  }
1541  }
1542 }
1543 
1544 
1545 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1546  Comment cmnt(masm_, "[ RegExpLiteral");
1547  Label materialized;
1548  // Registers will be used as follows:
1549  // edi = JS function.
1550  // ecx = literals array.
1551  // ebx = regexp literal.
1552  // eax = regexp literal clone.
1555  int literal_offset =
1556  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1557  __ mov(ebx, FieldOperand(ecx, literal_offset));
1558  __ cmp(ebx, isolate()->factory()->undefined_value());
1559  __ j(not_equal, &materialized, Label::kNear);
1560 
1561  // Create regexp literal using runtime function
1562  // Result will be in eax.
1563  __ push(ecx);
1564  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1565  __ push(Immediate(expr->pattern()));
1566  __ push(Immediate(expr->flags()));
1567  __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1568  __ mov(ebx, eax);
1569 
1570  __ bind(&materialized);
1572  Label allocated, runtime_allocate;
1573  __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1574  __ jmp(&allocated);
1575 
1576  __ bind(&runtime_allocate);
1577  __ push(ebx);
1578  __ push(Immediate(Smi::FromInt(size)));
1579  __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1580  __ pop(ebx);
1581 
1582  __ bind(&allocated);
1583  // Copy the content into the newly allocated memory.
1584  // (Unroll copy loop once for better throughput).
1585  for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1586  __ mov(edx, FieldOperand(ebx, i));
1587  __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1588  __ mov(FieldOperand(eax, i), edx);
1589  __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1590  }
1591  if ((size % (2 * kPointerSize)) != 0) {
1592  __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1593  __ mov(FieldOperand(eax, size - kPointerSize), edx);
1594  }
1595  context()->Plug(eax);
1596 }
1597 
1598 
1599 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1600  if (expression == NULL) {
1601  __ push(Immediate(isolate()->factory()->null_value()));
1602  } else {
1603  VisitForStackValue(expression);
1604  }
1605 }
1606 
1607 
1608 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1609  Comment cmnt(masm_, "[ ObjectLiteral");
1610 
1611  expr->BuildConstantProperties(isolate());
1612  Handle<FixedArray> constant_properties = expr->constant_properties();
1613  int flags = expr->fast_elements()
1614  ? ObjectLiteral::kFastElements
1615  : ObjectLiteral::kNoFlags;
1616  flags |= expr->has_function()
1617  ? ObjectLiteral::kHasFunction
1618  : ObjectLiteral::kNoFlags;
1619  int properties_count = constant_properties->length() / 2;
1620  if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() ||
1621  flags != ObjectLiteral::kFastElements ||
1625  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1626  __ push(Immediate(constant_properties));
1627  __ push(Immediate(Smi::FromInt(flags)));
1628  __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1629  } else {
1632  __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1633  __ mov(ecx, Immediate(constant_properties));
1634  __ mov(edx, Immediate(Smi::FromInt(flags)));
1635  FastCloneShallowObjectStub stub(properties_count);
1636  __ CallStub(&stub);
1637  }
1638 
1639  // If result_saved is true the result is on top of the stack. If
1640  // result_saved is false the result is in eax.
1641  bool result_saved = false;
1642 
1643  // Mark all computed expressions that are bound to a key that
1644  // is shadowed by a later occurrence of the same key. For the
1645  // marked expressions, no store code is emitted.
1646  expr->CalculateEmitStore(zone());
1647 
1648  AccessorTable accessor_table(zone());
1649  for (int i = 0; i < expr->properties()->length(); i++) {
1650  ObjectLiteral::Property* property = expr->properties()->at(i);
1651  if (property->IsCompileTimeValue()) continue;
1652 
1653  Literal* key = property->key();
1654  Expression* value = property->value();
1655  if (!result_saved) {
1656  __ push(eax); // Save result on the stack
1657  result_saved = true;
1658  }
1659  switch (property->kind()) {
1661  UNREACHABLE();
1662  case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1664  // Fall through.
1665  case ObjectLiteral::Property::COMPUTED:
1666  if (key->value()->IsInternalizedString()) {
1667  if (property->emit_store()) {
1668  VisitForAccumulatorValue(value);
1669  __ mov(ecx, Immediate(key->value()));
1670  __ mov(edx, Operand(esp, 0));
1671  CallStoreIC(key->LiteralFeedbackId());
1672  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1673  } else {
1674  VisitForEffect(value);
1675  }
1676  break;
1677  }
1678  __ push(Operand(esp, 0)); // Duplicate receiver.
1679  VisitForStackValue(key);
1680  VisitForStackValue(value);
1681  if (property->emit_store()) {
1682  __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
1683  __ CallRuntime(Runtime::kSetProperty, 4);
1684  } else {
1685  __ Drop(3);
1686  }
1687  break;
1688  case ObjectLiteral::Property::PROTOTYPE:
1689  __ push(Operand(esp, 0)); // Duplicate receiver.
1690  VisitForStackValue(value);
1691  if (property->emit_store()) {
1692  __ CallRuntime(Runtime::kSetPrototype, 2);
1693  } else {
1694  __ Drop(2);
1695  }
1696  break;
1697  case ObjectLiteral::Property::GETTER:
1698  accessor_table.lookup(key)->second->getter = value;
1699  break;
1700  case ObjectLiteral::Property::SETTER:
1701  accessor_table.lookup(key)->second->setter = value;
1702  break;
1703  }
1704  }
1705 
1706  // Emit code to define accessors, using only a single call to the runtime for
1707  // each pair of corresponding getters and setters.
1708  for (AccessorTable::Iterator it = accessor_table.begin();
1709  it != accessor_table.end();
1710  ++it) {
1711  __ push(Operand(esp, 0)); // Duplicate receiver.
1712  VisitForStackValue(it->first);
1713  EmitAccessor(it->second->getter);
1714  EmitAccessor(it->second->setter);
1715  __ push(Immediate(Smi::FromInt(NONE)));
1716  __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1717  }
1718 
1719  if (expr->has_function()) {
1720  ASSERT(result_saved);
1721  __ push(Operand(esp, 0));
1722  __ CallRuntime(Runtime::kToFastProperties, 1);
1723  }
1724 
1725  if (result_saved) {
1726  context()->PlugTOS();
1727  } else {
1728  context()->Plug(eax);
1729  }
1730 }
1731 
1732 
1733 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1734  Comment cmnt(masm_, "[ ArrayLiteral");
1735 
1736  expr->BuildConstantElements(isolate());
1737  int flags = expr->depth() == 1
1738  ? ArrayLiteral::kShallowElements
1739  : ArrayLiteral::kNoFlags;
1740 
1741  ZoneList<Expression*>* subexprs = expr->values();
1742  int length = subexprs->length();
1743  Handle<FixedArray> constant_elements = expr->constant_elements();
1744  ASSERT_EQ(2, constant_elements->length());
1745  ElementsKind constant_elements_kind =
1746  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1747  bool has_constant_fast_elements =
1748  IsFastObjectElementsKind(constant_elements_kind);
1749  Handle<FixedArrayBase> constant_elements_values(
1750  FixedArrayBase::cast(constant_elements->get(1)));
1751 
1752  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1753  if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1754  // If the only customer of allocation sites is transitioning, then
1755  // we can turn it off if we don't have anywhere else to transition to.
1756  allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1757  }
1758 
1759  Heap* heap = isolate()->heap();
1760  if (has_constant_fast_elements &&
1761  constant_elements_values->map() == heap->fixed_cow_array_map()) {
1762  // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1763  // change, so it's possible to specialize the stub in advance.
1764  __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1767  __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1768  __ mov(ecx, Immediate(constant_elements));
1769  FastCloneShallowArrayStub stub(
1771  allocation_site_mode,
1772  length);
1773  __ CallStub(&stub);
1774  } else if (expr->depth() > 1 || Serializer::enabled() ||
1778  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1779  __ push(Immediate(constant_elements));
1780  __ push(Immediate(Smi::FromInt(flags)));
1781  __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1782  } else {
1783  ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1784  FLAG_smi_only_arrays);
1787 
1788  // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1789  // change, so it's possible to specialize the stub in advance.
1790  if (has_constant_fast_elements) {
1792  }
1793 
1796  __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1797  __ mov(ecx, Immediate(constant_elements));
1798  FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1799  __ CallStub(&stub);
1800  }
1801 
1802  bool result_saved = false; // Is the result saved to the stack?
1803 
1804  // Emit code to evaluate all the non-constant subexpressions and to store
1805  // them into the newly cloned array.
1806  for (int i = 0; i < length; i++) {
1807  Expression* subexpr = subexprs->at(i);
1808  // If the subexpression is a literal or a simple materialized literal it
1809  // is already set in the cloned array.
1810  if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1811 
1812  if (!result_saved) {
1813  __ push(eax); // array literal.
1814  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1815  result_saved = true;
1816  }
1817  VisitForAccumulatorValue(subexpr);
1818 
1819  if (IsFastObjectElementsKind(constant_elements_kind)) {
1820  // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1821  // cannot transition and don't need to call the runtime stub.
1822  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1823  __ mov(ebx, Operand(esp, kPointerSize)); // Copy of array literal.
1825  // Store the subexpression value in the array's elements.
1826  __ mov(FieldOperand(ebx, offset), result_register());
1827  // Update the write barrier for the array store.
1828  __ RecordWriteField(ebx, offset, result_register(), ecx,
1829  kDontSaveFPRegs,
1830  EMIT_REMEMBERED_SET,
1832  } else {
1833  // Store the subexpression value in the array's elements.
1834  __ mov(ecx, Immediate(Smi::FromInt(i)));
1835  StoreArrayLiteralElementStub stub;
1836  __ CallStub(&stub);
1837  }
1838 
1839  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1840  }
1841 
1842  if (result_saved) {
1843  __ add(esp, Immediate(kPointerSize)); // literal index
1844  context()->PlugTOS();
1845  } else {
1846  context()->Plug(eax);
1847  }
1848 }
1849 
1850 
1851 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1852  ASSERT(expr->target()->IsValidLeftHandSide());
1853 
1854  Comment cmnt(masm_, "[ Assignment");
1855 
1856  // Left-hand side can only be a property, a global or a (parameter or local)
1857  // slot.
1858  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1859  LhsKind assign_type = VARIABLE;
1860  Property* property = expr->target()->AsProperty();
1861  if (property != NULL) {
1862  assign_type = (property->key()->IsPropertyName())
1863  ? NAMED_PROPERTY
1864  : KEYED_PROPERTY;
1865  }
1866 
1867  // Evaluate LHS expression.
1868  switch (assign_type) {
1869  case VARIABLE:
1870  // Nothing to do here.
1871  break;
1872  case NAMED_PROPERTY:
1873  if (expr->is_compound()) {
1874  // We need the receiver both on the stack and in edx.
1875  VisitForStackValue(property->obj());
1876  __ mov(edx, Operand(esp, 0));
1877  } else {
1878  VisitForStackValue(property->obj());
1879  }
1880  break;
1881  case KEYED_PROPERTY: {
1882  if (expr->is_compound()) {
1883  VisitForStackValue(property->obj());
1884  VisitForStackValue(property->key());
1885  __ mov(edx, Operand(esp, kPointerSize)); // Object.
1886  __ mov(ecx, Operand(esp, 0)); // Key.
1887  } else {
1888  VisitForStackValue(property->obj());
1889  VisitForStackValue(property->key());
1890  }
1891  break;
1892  }
1893  }
1894 
1895  // For compound assignments we need another deoptimization point after the
1896  // variable/property load.
1897  if (expr->is_compound()) {
1898  AccumulatorValueContext result_context(this);
1899  { AccumulatorValueContext left_operand_context(this);
1900  switch (assign_type) {
1901  case VARIABLE:
1902  EmitVariableLoad(expr->target()->AsVariableProxy());
1903  PrepareForBailout(expr->target(), TOS_REG);
1904  break;
1905  case NAMED_PROPERTY:
1906  EmitNamedPropertyLoad(property);
1907  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1908  break;
1909  case KEYED_PROPERTY:
1910  EmitKeyedPropertyLoad(property);
1911  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1912  break;
1913  }
1914  }
1915 
1916  Token::Value op = expr->binary_op();
1917  __ push(eax); // Left operand goes on the stack.
1918  VisitForAccumulatorValue(expr->value());
1919 
1920  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1921  ? OVERWRITE_RIGHT
1922  : NO_OVERWRITE;
1923  SetSourcePosition(expr->position() + 1);
1924  if (ShouldInlineSmiCase(op)) {
1925  EmitInlineSmiBinaryOp(expr->binary_operation(),
1926  op,
1927  mode,
1928  expr->target(),
1929  expr->value());
1930  } else {
1931  EmitBinaryOp(expr->binary_operation(), op, mode);
1932  }
1933 
1934  // Deoptimization point in case the binary operation may have side effects.
1935  PrepareForBailout(expr->binary_operation(), TOS_REG);
1936  } else {
1937  VisitForAccumulatorValue(expr->value());
1938  }
1939 
1940  // Record source position before possible IC call.
1941  SetSourcePosition(expr->position());
1942 
1943  // Store the value.
1944  switch (assign_type) {
1945  case VARIABLE:
1946  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1947  expr->op());
1948  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1949  context()->Plug(eax);
1950  break;
1951  case NAMED_PROPERTY:
1952  EmitNamedPropertyAssignment(expr);
1953  break;
1954  case KEYED_PROPERTY:
1955  EmitKeyedPropertyAssignment(expr);
1956  break;
1957  }
1958 }
1959 
1960 
1961 void FullCodeGenerator::VisitYield(Yield* expr) {
1962  Comment cmnt(masm_, "[ Yield");
1963  // Evaluate yielded value first; the initial iterator definition depends on
1964  // this. It stays on the stack while we update the iterator.
1965  VisitForStackValue(expr->expression());
1966 
1967  switch (expr->yield_kind()) {
1968  case Yield::SUSPEND:
1969  // Pop value from top-of-stack slot; box result into result register.
1970  EmitCreateIteratorResult(false);
1971  __ push(result_register());
1972  // Fall through.
1973  case Yield::INITIAL: {
1974  Label suspend, continuation, post_runtime, resume;
1975 
1976  __ jmp(&suspend);
1977 
1978  __ bind(&continuation);
1979  __ jmp(&resume);
1980 
1981  __ bind(&suspend);
1982  VisitForAccumulatorValue(expr->generator_object());
1983  ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1985  Immediate(Smi::FromInt(continuation.pos())));
1987  __ mov(ecx, esi);
1988  __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1989  kDontSaveFPRegs);
1991  __ cmp(esp, ebx);
1992  __ j(equal, &post_runtime);
1993  __ push(eax); // generator object
1994  __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
1995  __ mov(context_register(),
1997  __ bind(&post_runtime);
1998  __ pop(result_register());
1999  EmitReturnSequence();
2000 
2001  __ bind(&resume);
2002  context()->Plug(result_register());
2003  break;
2004  }
2005 
2006  case Yield::FINAL: {
2007  VisitForAccumulatorValue(expr->generator_object());
2008  __ mov(FieldOperand(result_register(),
2011  // Pop value from top-of-stack slot, box result into result register.
2012  EmitCreateIteratorResult(true);
2013  EmitUnwindBeforeReturn();
2014  EmitReturnSequence();
2015  break;
2016  }
2017 
2018  case Yield::DELEGATING: {
2019  VisitForStackValue(expr->generator_object());
2020 
2021  // Initial stack layout is as follows:
2022  // [sp + 1 * kPointerSize] iter
2023  // [sp + 0 * kPointerSize] g
2024 
2025  Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2026  Label l_next, l_call, l_loop;
2027  // Initial send value is undefined.
2028  __ mov(eax, isolate()->factory()->undefined_value());
2029  __ jmp(&l_next);
2030 
2031  // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2032  __ bind(&l_catch);
2033  handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2034  __ mov(ecx, isolate()->factory()->throw_string()); // "throw"
2035  __ push(ecx); // "throw"
2036  __ push(Operand(esp, 2 * kPointerSize)); // iter
2037  __ push(eax); // exception
2038  __ jmp(&l_call);
2039 
2040  // try { received = %yield result }
2041  // Shuffle the received result above a try handler and yield it without
2042  // re-boxing.
2043  __ bind(&l_try);
2044  __ pop(eax); // result
2045  __ PushTryHandler(StackHandler::CATCH, expr->index());
2046  const int handler_size = StackHandlerConstants::kSize;
2047  __ push(eax); // result
2048  __ jmp(&l_suspend);
2049  __ bind(&l_continuation);
2050  __ jmp(&l_resume);
2051  __ bind(&l_suspend);
2052  const int generator_object_depth = kPointerSize + handler_size;
2053  __ mov(eax, Operand(esp, generator_object_depth));
2054  __ push(eax); // g
2055  ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2057  Immediate(Smi::FromInt(l_continuation.pos())));
2059  __ mov(ecx, esi);
2060  __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2061  kDontSaveFPRegs);
2062  __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2063  __ mov(context_register(),
2065  __ pop(eax); // result
2066  EmitReturnSequence();
2067  __ bind(&l_resume); // received in eax
2068  __ PopTryHandler();
2069 
2070  // receiver = iter; f = iter.next; arg = received;
2071  __ bind(&l_next);
2072  __ mov(ecx, isolate()->factory()->next_string()); // "next"
2073  __ push(ecx);
2074  __ push(Operand(esp, 2 * kPointerSize)); // iter
2075  __ push(eax); // received
2076 
2077  // result = receiver[f](arg);
2078  __ bind(&l_call);
2079  __ mov(edx, Operand(esp, kPointerSize));
2080  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2081  CallIC(ic, TypeFeedbackId::None());
2082  __ mov(edi, eax);
2083  __ mov(Operand(esp, 2 * kPointerSize), edi);
2084  CallFunctionStub stub(1, CALL_AS_METHOD);
2085  __ CallStub(&stub);
2086 
2088  __ Drop(1); // The function is still on the stack; drop it.
2089 
2090  // if (!result.done) goto l_try;
2091  __ bind(&l_loop);
2092  __ push(eax); // save result
2093  __ mov(edx, eax); // result
2094  __ mov(ecx, isolate()->factory()->done_string()); // "done"
2095  CallLoadIC(NOT_CONTEXTUAL); // result.done in eax
2096  Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2097  CallIC(bool_ic);
2098  __ test(eax, eax);
2099  __ j(zero, &l_try);
2100 
2101  // result.value
2102  __ pop(edx); // result
2103  __ mov(ecx, isolate()->factory()->value_string()); // "value"
2104  CallLoadIC(NOT_CONTEXTUAL); // result.value in eax
2105  context()->DropAndPlug(2, eax); // drop iter and g
2106  break;
2107  }
2108  }
2109 }
2110 
2111 
2112 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2113  Expression *value,
2114  JSGeneratorObject::ResumeMode resume_mode) {
2115  // The value stays in eax, and is ultimately read by the resumed generator, as
2116  // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
2117  // is read to throw the value when the resumed generator is already closed.
2118  // ebx will hold the generator object until the activation has been resumed.
2119  VisitForStackValue(generator);
2120  VisitForAccumulatorValue(value);
2121  __ pop(ebx);
2122 
2123  // Check generator state.
2124  Label wrong_state, closed_state, done;
2128  Immediate(Smi::FromInt(0)));
2129  __ j(equal, &closed_state);
2130  __ j(less, &wrong_state);
2131 
2132  // Load suspended function and context.
2135 
2136  // Push receiver.
2138 
2139  // Push holes for arguments to generator function.
2141  __ mov(edx,
2143  __ mov(ecx, isolate()->factory()->the_hole_value());
2144  Label push_argument_holes, push_frame;
2145  __ bind(&push_argument_holes);
2146  __ sub(edx, Immediate(Smi::FromInt(1)));
2147  __ j(carry, &push_frame);
2148  __ push(ecx);
2149  __ jmp(&push_argument_holes);
2150 
2151  // Enter a new JavaScript frame, and initialize its slots as they were when
2152  // the generator was suspended.
2153  Label resume_frame;
2154  __ bind(&push_frame);
2155  __ call(&resume_frame);
2156  __ jmp(&done);
2157  __ bind(&resume_frame);
2158  __ push(ebp); // Caller's frame pointer.
2159  __ mov(ebp, esp);
2160  __ push(esi); // Callee's context.
2161  __ push(edi); // Callee's JS Function.
2162 
2163  // Load the operand stack size.
2166  __ SmiUntag(edx);
2167 
2168  // If we are sending a value and there is no operand stack, we can jump back
2169  // in directly.
2170  if (resume_mode == JSGeneratorObject::NEXT) {
2171  Label slow_resume;
2172  __ cmp(edx, Immediate(0));
2173  __ j(not_zero, &slow_resume);
2176  __ SmiUntag(ecx);
2177  __ add(edx, ecx);
2180  __ jmp(edx);
2181  __ bind(&slow_resume);
2182  }
2183 
2184  // Otherwise, we push holes for the operand stack and call the runtime to fix
2185  // up the stack and the handlers.
2186  Label push_operand_holes, call_resume;
2187  __ bind(&push_operand_holes);
2188  __ sub(edx, Immediate(1));
2189  __ j(carry, &call_resume);
2190  __ push(ecx);
2191  __ jmp(&push_operand_holes);
2192  __ bind(&call_resume);
2193  __ push(ebx);
2194  __ push(result_register());
2195  __ Push(Smi::FromInt(resume_mode));
2196  __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
2197  // Not reached: the runtime call returns elsewhere.
2198  __ Abort(kGeneratorFailedToResume);
2199 
2200  // Reach here when generator is closed.
2201  __ bind(&closed_state);
2202  if (resume_mode == JSGeneratorObject::NEXT) {
2203  // Return completed iterator result when generator is closed.
2204  __ push(Immediate(isolate()->factory()->undefined_value()));
2205  // Pop value from top-of-stack slot; box result into result register.
2206  EmitCreateIteratorResult(true);
2207  } else {
2208  // Throw the provided value.
2209  __ push(eax);
2210  __ CallRuntime(Runtime::kHiddenThrow, 1);
2211  }
2212  __ jmp(&done);
2213 
2214  // Throw error if we attempt to operate on a running generator.
2215  __ bind(&wrong_state);
2216  __ push(ebx);
2217  __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
2218 
2219  __ bind(&done);
2220  context()->Plug(result_register());
2221 }
2222 
2223 
2224 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2225  Label gc_required;
2226  Label allocated;
2227 
2228  Handle<Map> map(isolate()->native_context()->generator_result_map());
2229 
2230  __ Allocate(map->instance_size(), eax, ecx, edx, &gc_required, TAG_OBJECT);
2231  __ jmp(&allocated);
2232 
2233  __ bind(&gc_required);
2234  __ Push(Smi::FromInt(map->instance_size()));
2235  __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
2236  __ mov(context_register(),
2238 
2239  __ bind(&allocated);
2240  __ mov(ebx, map);
2241  __ pop(ecx);
2242  __ mov(edx, isolate()->factory()->ToBoolean(done));
2243  ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2246  isolate()->factory()->empty_fixed_array());
2248  isolate()->factory()->empty_fixed_array());
2251 
2252  // Only the value field needs a write barrier, as the other values are in the
2253  // root set.
2255  ecx, edx, kDontSaveFPRegs);
2256 }
2257 
2258 
2259 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2260  SetSourcePosition(prop->position());
2261  Literal* key = prop->key()->AsLiteral();
2262  ASSERT(!key->value()->IsSmi());
2263  __ mov(ecx, Immediate(key->value()));
2264  CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2265 }
2266 
2267 
2268 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2269  SetSourcePosition(prop->position());
2270  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2271  CallIC(ic, prop->PropertyFeedbackId());
2272 }
2273 
2274 
2275 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2276  Token::Value op,
2277  OverwriteMode mode,
2278  Expression* left,
2279  Expression* right) {
2280  // Do combined smi check of the operands. Left operand is on the
2281  // stack. Right operand is in eax.
2282  Label smi_case, done, stub_call;
2283  __ pop(edx);
2284  __ mov(ecx, eax);
2285  __ or_(eax, edx);
2286  JumpPatchSite patch_site(masm_);
2287  patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2288 
2289  __ bind(&stub_call);
2290  __ mov(eax, ecx);
2291  BinaryOpICStub stub(op, mode);
2292  CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2293  patch_site.EmitPatchInfo();
2294  __ jmp(&done, Label::kNear);
2295 
2296  // Smi case.
2297  __ bind(&smi_case);
2298  __ mov(eax, edx); // Copy left operand in case of a stub call.
2299 
2300  switch (op) {
2301  case Token::SAR:
2302  __ SmiUntag(ecx);
2303  __ sar_cl(eax); // No checks of result necessary
2304  __ and_(eax, Immediate(~kSmiTagMask));
2305  break;
2306  case Token::SHL: {
2307  Label result_ok;
2308  __ SmiUntag(eax);
2309  __ SmiUntag(ecx);
2310  __ shl_cl(eax);
2311  // Check that the *signed* result fits in a smi.
2312  __ cmp(eax, 0xc0000000);
2313  __ j(positive, &result_ok);
2314  __ SmiTag(ecx);
2315  __ jmp(&stub_call);
2316  __ bind(&result_ok);
2317  __ SmiTag(eax);
2318  break;
2319  }
2320  case Token::SHR: {
2321  Label result_ok;
2322  __ SmiUntag(eax);
2323  __ SmiUntag(ecx);
2324  __ shr_cl(eax);
2325  __ test(eax, Immediate(0xc0000000));
2326  __ j(zero, &result_ok);
2327  __ SmiTag(ecx);
2328  __ jmp(&stub_call);
2329  __ bind(&result_ok);
2330  __ SmiTag(eax);
2331  break;
2332  }
2333  case Token::ADD:
2334  __ add(eax, ecx);
2335  __ j(overflow, &stub_call);
2336  break;
2337  case Token::SUB:
2338  __ sub(eax, ecx);
2339  __ j(overflow, &stub_call);
2340  break;
2341  case Token::MUL: {
2342  __ SmiUntag(eax);
2343  __ imul(eax, ecx);
2344  __ j(overflow, &stub_call);
2345  __ test(eax, eax);
2346  __ j(not_zero, &done, Label::kNear);
2347  __ mov(ebx, edx);
2348  __ or_(ebx, ecx);
2349  __ j(negative, &stub_call);
2350  break;
2351  }
2352  case Token::BIT_OR:
2353  __ or_(eax, ecx);
2354  break;
2355  case Token::BIT_AND:
2356  __ and_(eax, ecx);
2357  break;
2358  case Token::BIT_XOR:
2359  __ xor_(eax, ecx);
2360  break;
2361  default:
2362  UNREACHABLE();
2363  }
2364 
2365  __ bind(&done);
2366  context()->Plug(eax);
2367 }
2368 
2369 
2370 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2371  Token::Value op,
2372  OverwriteMode mode) {
2373  __ pop(edx);
2374  BinaryOpICStub stub(op, mode);
2375  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2376  CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2377  patch_site.EmitPatchInfo();
2378  context()->Plug(eax);
2379 }
2380 
2381 
2382 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2383  ASSERT(expr->IsValidLeftHandSide());
2384 
2385  // Left-hand side can only be a property, a global or a (parameter or local)
2386  // slot.
2387  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2388  LhsKind assign_type = VARIABLE;
2389  Property* prop = expr->AsProperty();
2390  if (prop != NULL) {
2391  assign_type = (prop->key()->IsPropertyName())
2392  ? NAMED_PROPERTY
2393  : KEYED_PROPERTY;
2394  }
2395 
2396  switch (assign_type) {
2397  case VARIABLE: {
2398  Variable* var = expr->AsVariableProxy()->var();
2399  EffectContext context(this);
2400  EmitVariableAssignment(var, Token::ASSIGN);
2401  break;
2402  }
2403  case NAMED_PROPERTY: {
2404  __ push(eax); // Preserve value.
2405  VisitForAccumulatorValue(prop->obj());
2406  __ mov(edx, eax);
2407  __ pop(eax); // Restore value.
2408  __ mov(ecx, prop->key()->AsLiteral()->value());
2409  CallStoreIC();
2410  break;
2411  }
2412  case KEYED_PROPERTY: {
2413  __ push(eax); // Preserve value.
2414  VisitForStackValue(prop->obj());
2415  VisitForAccumulatorValue(prop->key());
2416  __ mov(ecx, eax);
2417  __ pop(edx); // Receiver.
2418  __ pop(eax); // Restore value.
2419  Handle<Code> ic = strict_mode() == SLOPPY
2420  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2421  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2422  CallIC(ic);
2423  break;
2424  }
2425  }
2426  context()->Plug(eax);
2427 }
2428 
2429 
2430 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2431  Variable* var, MemOperand location) {
2432  __ mov(location, eax);
2433  if (var->IsContextSlot()) {
2434  __ mov(edx, eax);
2435  int offset = Context::SlotOffset(var->index());
2436  __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2437  }
2438 }
2439 
2440 
2441 void FullCodeGenerator::EmitCallStoreContextSlot(
2442  Handle<String> name, StrictMode strict_mode) {
2443  __ push(eax); // Value.
2444  __ push(esi); // Context.
2445  __ push(Immediate(name));
2446  __ push(Immediate(Smi::FromInt(strict_mode)));
2447  __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2448 }
2449 
2450 
2451 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2452  Token::Value op) {
2453  if (var->IsUnallocated()) {
2454  // Global var, const, or let.
2455  __ mov(ecx, var->name());
2456  __ mov(edx, GlobalObjectOperand());
2457  CallStoreIC();
2458 
2459  } else if (op == Token::INIT_CONST_LEGACY) {
2460  // Const initializers need a write barrier.
2461  ASSERT(!var->IsParameter()); // No const parameters.
2462  if (var->IsLookupSlot()) {
2463  __ push(eax);
2464  __ push(esi);
2465  __ push(Immediate(var->name()));
2466  __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2467  } else {
2468  ASSERT(var->IsStackLocal() || var->IsContextSlot());
2469  Label skip;
2470  MemOperand location = VarOperand(var, ecx);
2471  __ mov(edx, location);
2472  __ cmp(edx, isolate()->factory()->the_hole_value());
2473  __ j(not_equal, &skip, Label::kNear);
2474  EmitStoreToStackLocalOrContextSlot(var, location);
2475  __ bind(&skip);
2476  }
2477 
2478  } else if (var->mode() == LET && op != Token::INIT_LET) {
2479  // Non-initializing assignment to let variable needs a write barrier.
2480  if (var->IsLookupSlot()) {
2481  EmitCallStoreContextSlot(var->name(), strict_mode());
2482  } else {
2483  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2484  Label assign;
2485  MemOperand location = VarOperand(var, ecx);
2486  __ mov(edx, location);
2487  __ cmp(edx, isolate()->factory()->the_hole_value());
2488  __ j(not_equal, &assign, Label::kNear);
2489  __ push(Immediate(var->name()));
2490  __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2491  __ bind(&assign);
2492  EmitStoreToStackLocalOrContextSlot(var, location);
2493  }
2494 
2495  } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2496  // Assignment to var or initializing assignment to let/const
2497  // in harmony mode.
2498  if (var->IsLookupSlot()) {
2499  EmitCallStoreContextSlot(var->name(), strict_mode());
2500  } else {
2501  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2502  MemOperand location = VarOperand(var, ecx);
2503  if (generate_debug_code_ && op == Token::INIT_LET) {
2504  // Check for an uninitialized let binding.
2505  __ mov(edx, location);
2506  __ cmp(edx, isolate()->factory()->the_hole_value());
2507  __ Check(equal, kLetBindingReInitialization);
2508  }
2509  EmitStoreToStackLocalOrContextSlot(var, location);
2510  }
2511  }
2512  // Non-initializing assignments to consts are ignored.
2513 }
2514 
2515 
2516 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2517  // Assignment to a property, using a named store IC.
2518  // eax : value
2519  // esp[0] : receiver
2520 
2521  Property* prop = expr->target()->AsProperty();
2522  ASSERT(prop != NULL);
2523  ASSERT(prop->key()->AsLiteral() != NULL);
2524 
2525  // Record source code position before IC call.
2526  SetSourcePosition(expr->position());
2527  __ mov(ecx, prop->key()->AsLiteral()->value());
2528  __ pop(edx);
2529  CallStoreIC(expr->AssignmentFeedbackId());
2530  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2531  context()->Plug(eax);
2532 }
2533 
2534 
2535 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2536  // Assignment to a property, using a keyed store IC.
2537  // eax : value
2538  // esp[0] : key
2539  // esp[kPointerSize] : receiver
2540 
2541  __ pop(ecx); // Key.
2542  __ pop(edx);
2543  // Record source code position before IC call.
2544  SetSourcePosition(expr->position());
2545  Handle<Code> ic = strict_mode() == SLOPPY
2546  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2547  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2548  CallIC(ic, expr->AssignmentFeedbackId());
2549 
2550  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2551  context()->Plug(eax);
2552 }
2553 
2554 
2555 void FullCodeGenerator::VisitProperty(Property* expr) {
2556  Comment cmnt(masm_, "[ Property");
2557  Expression* key = expr->key();
2558 
2559  if (key->IsPropertyName()) {
2560  VisitForAccumulatorValue(expr->obj());
2561  __ mov(edx, result_register());
2562  EmitNamedPropertyLoad(expr);
2563  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2564  context()->Plug(eax);
2565  } else {
2566  VisitForStackValue(expr->obj());
2567  VisitForAccumulatorValue(expr->key());
2568  __ pop(edx); // Object.
2569  __ mov(ecx, result_register()); // Key.
2570  EmitKeyedPropertyLoad(expr);
2571  context()->Plug(eax);
2572  }
2573 }
2574 
2575 
2576 void FullCodeGenerator::CallIC(Handle<Code> code,
2577  TypeFeedbackId ast_id) {
2578  ic_total_count_++;
2579  __ call(code, RelocInfo::CODE_TARGET, ast_id);
2580 }
2581 
2582 
2583 
2584 
2585 // Code common for calls using the IC.
2586 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2587  Expression* callee = expr->expression();
2588  ZoneList<Expression*>* args = expr->arguments();
2589  int arg_count = args->length();
2590 
2592  // Get the target function.
2593  if (callee->IsVariableProxy()) {
2594  { StackValueContext context(this);
2595  EmitVariableLoad(callee->AsVariableProxy());
2596  PrepareForBailout(callee, NO_REGISTERS);
2597  }
2598  // Push undefined as receiver. This is patched in the method prologue if it
2599  // is a sloppy mode method.
2600  __ push(Immediate(isolate()->factory()->undefined_value()));
2601  flags = NO_CALL_FUNCTION_FLAGS;
2602  } else {
2603  // Load the function from the receiver.
2604  ASSERT(callee->IsProperty());
2605  __ mov(edx, Operand(esp, 0));
2606  EmitNamedPropertyLoad(callee->AsProperty());
2607  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2608  // Push the target function under the receiver.
2609  __ push(Operand(esp, 0));
2610  __ mov(Operand(esp, kPointerSize), eax);
2611  flags = CALL_AS_METHOD;
2612  }
2613 
2614  // Load the arguments.
2615  { PreservePositionScope scope(masm()->positions_recorder());
2616  for (int i = 0; i < arg_count; i++) {
2617  VisitForStackValue(args->at(i));
2618  }
2619  }
2620 
2621  // Record source position of the IC call.
2622  SetSourcePosition(expr->position());
2623  CallFunctionStub stub(arg_count, flags);
2624  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2625  __ CallStub(&stub);
2626  RecordJSReturnSite(expr);
2627 
2628  // Restore context register.
2630 
2631  context()->DropAndPlug(1, eax);
2632 }
2633 
2634 
2635 // Code common for calls using the IC.
2636 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2637  Expression* key) {
2638  // Load the key.
2639  VisitForAccumulatorValue(key);
2640 
2641  Expression* callee = expr->expression();
2642  ZoneList<Expression*>* args = expr->arguments();
2643  int arg_count = args->length();
2644 
2645  // Load the function from the receiver.
2646  ASSERT(callee->IsProperty());
2647  __ mov(edx, Operand(esp, 0));
2648  // Move the key into the right register for the keyed load IC.
2649  __ mov(ecx, eax);
2650  EmitKeyedPropertyLoad(callee->AsProperty());
2651  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2652 
2653  // Push the target function under the receiver.
2654  __ push(Operand(esp, 0));
2655  __ mov(Operand(esp, kPointerSize), eax);
2656 
2657  // Load the arguments.
2658  { PreservePositionScope scope(masm()->positions_recorder());
2659  for (int i = 0; i < arg_count; i++) {
2660  VisitForStackValue(args->at(i));
2661  }
2662  }
2663 
2664  // Record source position of the IC call.
2665  SetSourcePosition(expr->position());
2666  CallFunctionStub stub(arg_count, CALL_AS_METHOD);
2667  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2668  __ CallStub(&stub);
2669  RecordJSReturnSite(expr);
2670 
2671  // Restore context register.
2673 
2674  context()->DropAndPlug(1, eax);
2675 }
2676 
2677 
2678 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2679  // Code common for calls using the call stub.
2680  ZoneList<Expression*>* args = expr->arguments();
2681  int arg_count = args->length();
2682  { PreservePositionScope scope(masm()->positions_recorder());
2683  for (int i = 0; i < arg_count; i++) {
2684  VisitForStackValue(args->at(i));
2685  }
2686  }
2687  // Record source position for debugger.
2688  SetSourcePosition(expr->position());
2689 
2690  Handle<Object> uninitialized =
2692  StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2693  __ LoadHeapObject(ebx, FeedbackVector());
2694  __ mov(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot())));
2695 
2696  // Record call targets in unoptimized code.
2697  CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2698  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2699  __ CallStub(&stub);
2700 
2701  RecordJSReturnSite(expr);
2702  // Restore context register.
2704  context()->DropAndPlug(1, eax);
2705 }
2706 
2707 
2708 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2709  // Push copy of the first argument or undefined if it doesn't exist.
2710  if (arg_count > 0) {
2711  __ push(Operand(esp, arg_count * kPointerSize));
2712  } else {
2713  __ push(Immediate(isolate()->factory()->undefined_value()));
2714  }
2715 
2716  // Push the receiver of the enclosing function.
2717  __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2718  // Push the language mode.
2719  __ push(Immediate(Smi::FromInt(strict_mode())));
2720 
2721  // Push the start position of the scope the calls resides in.
2722  __ push(Immediate(Smi::FromInt(scope()->start_position())));
2723 
2724  // Do the runtime call.
2725  __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2726 }
2727 
2728 
2729 void FullCodeGenerator::VisitCall(Call* expr) {
2730 #ifdef DEBUG
2731  // We want to verify that RecordJSReturnSite gets called on all paths
2732  // through this function. Avoid early returns.
2733  expr->return_is_recorded_ = false;
2734 #endif
2735 
2736  Comment cmnt(masm_, "[ Call");
2737  Expression* callee = expr->expression();
2738  Call::CallType call_type = expr->GetCallType(isolate());
2739 
2740  if (call_type == Call::POSSIBLY_EVAL_CALL) {
2741  // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2742  // to resolve the function we need to call and the receiver of the call.
2743  // Then we call the resolved function using the given arguments.
2744  ZoneList<Expression*>* args = expr->arguments();
2745  int arg_count = args->length();
2746  { PreservePositionScope pos_scope(masm()->positions_recorder());
2747  VisitForStackValue(callee);
2748  // Reserved receiver slot.
2749  __ push(Immediate(isolate()->factory()->undefined_value()));
2750  // Push the arguments.
2751  for (int i = 0; i < arg_count; i++) {
2752  VisitForStackValue(args->at(i));
2753  }
2754 
2755  // Push a copy of the function (found below the arguments) and
2756  // resolve eval.
2757  __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2758  EmitResolvePossiblyDirectEval(arg_count);
2759 
2760  // The runtime call returns a pair of values in eax (function) and
2761  // edx (receiver). Touch up the stack with the right values.
2762  __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2763  __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2764  }
2765  // Record source position for debugger.
2766  SetSourcePosition(expr->position());
2767  CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
2768  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2769  __ CallStub(&stub);
2770  RecordJSReturnSite(expr);
2771  // Restore context register.
2773  context()->DropAndPlug(1, eax);
2774 
2775  } else if (call_type == Call::GLOBAL_CALL) {
2776  EmitCallWithIC(expr);
2777 
2778  } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2779  // Call to a lookup slot (dynamically introduced variable).
2780  VariableProxy* proxy = callee->AsVariableProxy();
2781  Label slow, done;
2782  { PreservePositionScope scope(masm()->positions_recorder());
2783  // Generate code for loading from variables potentially shadowed by
2784  // eval-introduced variables.
2785  EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2786  }
2787  __ bind(&slow);
2788  // Call the runtime to find the function to call (returned in eax) and
2789  // the object holding it (returned in edx).
2790  __ push(context_register());
2791  __ push(Immediate(proxy->name()));
2792  __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2793  __ push(eax); // Function.
2794  __ push(edx); // Receiver.
2795 
2796  // If fast case code has been generated, emit code to push the function
2797  // and receiver and have the slow path jump around this code.
2798  if (done.is_linked()) {
2799  Label call;
2800  __ jmp(&call, Label::kNear);
2801  __ bind(&done);
2802  // Push function.
2803  __ push(eax);
2804  // The receiver is implicitly the global receiver. Indicate this by
2805  // passing the hole to the call function stub.
2806  __ push(Immediate(isolate()->factory()->undefined_value()));
2807  __ bind(&call);
2808  }
2809 
2810  // The receiver is either the global receiver or an object found by
2811  // LoadContextSlot.
2812  EmitCallWithStub(expr);
2813 
2814  } else if (call_type == Call::PROPERTY_CALL) {
2815  Property* property = callee->AsProperty();
2816  { PreservePositionScope scope(masm()->positions_recorder());
2817  VisitForStackValue(property->obj());
2818  }
2819  if (property->key()->IsPropertyName()) {
2820  EmitCallWithIC(expr);
2821  } else {
2822  EmitKeyedCallWithIC(expr, property->key());
2823  }
2824 
2825  } else {
2826  ASSERT(call_type == Call::OTHER_CALL);
2827  // Call to an arbitrary expression not handled specially above.
2828  { PreservePositionScope scope(masm()->positions_recorder());
2829  VisitForStackValue(callee);
2830  }
2831  __ push(Immediate(isolate()->factory()->undefined_value()));
2832  // Emit function call.
2833  EmitCallWithStub(expr);
2834  }
2835 
2836 #ifdef DEBUG
2837  // RecordJSReturnSite should have been called.
2838  ASSERT(expr->return_is_recorded_);
2839 #endif
2840 }
2841 
2842 
2843 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2844  Comment cmnt(masm_, "[ CallNew");
2845  // According to ECMA-262, section 11.2.2, page 44, the function
2846  // expression in new calls must be evaluated before the
2847  // arguments.
2848 
2849  // Push constructor on the stack. If it's not a function it's used as
2850  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2851  // ignored.
2852  VisitForStackValue(expr->expression());
2853 
2854  // Push the arguments ("left-to-right") on the stack.
2855  ZoneList<Expression*>* args = expr->arguments();
2856  int arg_count = args->length();
2857  for (int i = 0; i < arg_count; i++) {
2858  VisitForStackValue(args->at(i));
2859  }
2860 
2861  // Call the construct call builtin that handles allocation and
2862  // constructor invocation.
2863  SetSourcePosition(expr->position());
2864 
2865  // Load function and argument count into edi and eax.
2866  __ Move(eax, Immediate(arg_count));
2867  __ mov(edi, Operand(esp, arg_count * kPointerSize));
2868 
2869  // Record call targets in unoptimized code.
2870  Handle<Object> uninitialized =
2872  StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2873  if (FLAG_pretenuring_call_new) {
2874  StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
2875  isolate()->factory()->NewAllocationSite());
2876  ASSERT(expr->AllocationSiteFeedbackSlot() ==
2877  expr->CallNewFeedbackSlot() + 1);
2878  }
2879 
2880  __ LoadHeapObject(ebx, FeedbackVector());
2881  __ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot())));
2882 
2883  CallConstructStub stub(RECORD_CALL_TARGET);
2884  __ call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2885  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2886  context()->Plug(eax);
2887 }
2888 
2889 
2890 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2891  ZoneList<Expression*>* args = expr->arguments();
2892  ASSERT(args->length() == 1);
2893 
2894  VisitForAccumulatorValue(args->at(0));
2895 
2896  Label materialize_true, materialize_false;
2897  Label* if_true = NULL;
2898  Label* if_false = NULL;
2899  Label* fall_through = NULL;
2900  context()->PrepareTest(&materialize_true, &materialize_false,
2901  &if_true, &if_false, &fall_through);
2902 
2903  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2904  __ test(eax, Immediate(kSmiTagMask));
2905  Split(zero, if_true, if_false, fall_through);
2906 
2907  context()->Plug(if_true, if_false);
2908 }
2909 
2910 
2911 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2912  ZoneList<Expression*>* args = expr->arguments();
2913  ASSERT(args->length() == 1);
2914 
2915  VisitForAccumulatorValue(args->at(0));
2916 
2917  Label materialize_true, materialize_false;
2918  Label* if_true = NULL;
2919  Label* if_false = NULL;
2920  Label* fall_through = NULL;
2921  context()->PrepareTest(&materialize_true, &materialize_false,
2922  &if_true, &if_false, &fall_through);
2923 
2924  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2925  __ test(eax, Immediate(kSmiTagMask | 0x80000000));
2926  Split(zero, if_true, if_false, fall_through);
2927 
2928  context()->Plug(if_true, if_false);
2929 }
2930 
2931 
2932 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2933  ZoneList<Expression*>* args = expr->arguments();
2934  ASSERT(args->length() == 1);
2935 
2936  VisitForAccumulatorValue(args->at(0));
2937 
2938  Label materialize_true, materialize_false;
2939  Label* if_true = NULL;
2940  Label* if_false = NULL;
2941  Label* fall_through = NULL;
2942  context()->PrepareTest(&materialize_true, &materialize_false,
2943  &if_true, &if_false, &fall_through);
2944 
2945  __ JumpIfSmi(eax, if_false);
2946  __ cmp(eax, isolate()->factory()->null_value());
2947  __ j(equal, if_true);
2949  // Undetectable objects behave like undefined when tested with typeof.
2950  __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
2951  __ test(ecx, Immediate(1 << Map::kIsUndetectable));
2952  __ j(not_zero, if_false);
2953  __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2955  __ j(below, if_false);
2957  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2958  Split(below_equal, if_true, if_false, fall_through);
2959 
2960  context()->Plug(if_true, if_false);
2961 }
2962 
2963 
2964 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2965  ZoneList<Expression*>* args = expr->arguments();
2966  ASSERT(args->length() == 1);
2967 
2968  VisitForAccumulatorValue(args->at(0));
2969 
2970  Label materialize_true, materialize_false;
2971  Label* if_true = NULL;
2972  Label* if_false = NULL;
2973  Label* fall_through = NULL;
2974  context()->PrepareTest(&materialize_true, &materialize_false,
2975  &if_true, &if_false, &fall_through);
2976 
2977  __ JumpIfSmi(eax, if_false);
2978  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
2979  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2980  Split(above_equal, if_true, if_false, fall_through);
2981 
2982  context()->Plug(if_true, if_false);
2983 }
2984 
2985 
2986 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2987  ZoneList<Expression*>* args = expr->arguments();
2988  ASSERT(args->length() == 1);
2989 
2990  VisitForAccumulatorValue(args->at(0));
2991 
2992  Label materialize_true, materialize_false;
2993  Label* if_true = NULL;
2994  Label* if_false = NULL;
2995  Label* fall_through = NULL;
2996  context()->PrepareTest(&materialize_true, &materialize_false,
2997  &if_true, &if_false, &fall_through);
2998 
2999  __ JumpIfSmi(eax, if_false);
3002  __ test(ebx, Immediate(1 << Map::kIsUndetectable));
3003  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3004  Split(not_zero, if_true, if_false, fall_through);
3005 
3006  context()->Plug(if_true, if_false);
3007 }
3008 
3009 
3010 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3011  CallRuntime* expr) {
3012  ZoneList<Expression*>* args = expr->arguments();
3013  ASSERT(args->length() == 1);
3014 
3015  VisitForAccumulatorValue(args->at(0));
3016 
3017  Label materialize_true, materialize_false, skip_lookup;
3018  Label* if_true = NULL;
3019  Label* if_false = NULL;
3020  Label* fall_through = NULL;
3021  context()->PrepareTest(&materialize_true, &materialize_false,
3022  &if_true, &if_false, &fall_through);
3023 
3024  __ AssertNotSmi(eax);
3025 
3026  // Check whether this map has already been checked to be safe for default
3027  // valueOf.
3031  __ j(not_zero, &skip_lookup);
3032 
3033  // Check for fast case object. Return false for slow case objects.
3035  __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3036  __ cmp(ecx, isolate()->factory()->hash_table_map());
3037  __ j(equal, if_false);
3038 
3039  // Look for valueOf string in the descriptor array, and indicate false if
3040  // found. Since we omit an enumeration index check, if it is added via a
3041  // transition that shares its descriptor array, this is a false positive.
3042  Label entry, loop, done;
3043 
3044  // Skip loop if no descriptors are valid.
3045  __ NumberOfOwnDescriptors(ecx, ebx);
3046  __ cmp(ecx, 0);
3047  __ j(equal, &done);
3048 
3049  __ LoadInstanceDescriptors(ebx, ebx);
3050  // ebx: descriptor array.
3051  // ecx: valid entries in the descriptor array.
3052  // Calculate the end of the descriptor array.
3053  STATIC_ASSERT(kSmiTag == 0);
3054  STATIC_ASSERT(kSmiTagSize == 1);
3055  STATIC_ASSERT(kPointerSize == 4);
3056  __ imul(ecx, ecx, DescriptorArray::kDescriptorSize);
3057  __ lea(ecx, Operand(ebx, ecx, times_2, DescriptorArray::kFirstOffset));
3058  // Calculate location of the first key name.
3059  __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
3060  // Loop through all the keys in the descriptor array. If one of these is the
3061  // internalized string "valueOf" the result is false.
3062  __ jmp(&entry);
3063  __ bind(&loop);
3064  __ mov(edx, FieldOperand(ebx, 0));
3065  __ cmp(edx, isolate()->factory()->value_of_string());
3066  __ j(equal, if_false);
3067  __ add(ebx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3068  __ bind(&entry);
3069  __ cmp(ebx, ecx);
3070  __ j(not_equal, &loop);
3071 
3072  __ bind(&done);
3073 
3074  // Reload map as register ebx was used as temporary above.
3076 
3077  // Set the bit in the map to indicate that there is no local valueOf field.
3080 
3081  __ bind(&skip_lookup);
3082 
3083  // If a valueOf property is not found on the object check that its
3084  // prototype is the un-modified String prototype. If not result is false.
3086  __ JumpIfSmi(ecx, if_false);
3087  __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3089  __ mov(edx,
3091  __ cmp(ecx,
3092  ContextOperand(edx,
3094  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3095  Split(equal, if_true, if_false, fall_through);
3096 
3097  context()->Plug(if_true, if_false);
3098 }
3099 
3100 
3101 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3102  ZoneList<Expression*>* args = expr->arguments();
3103  ASSERT(args->length() == 1);
3104 
3105  VisitForAccumulatorValue(args->at(0));
3106 
3107  Label materialize_true, materialize_false;
3108  Label* if_true = NULL;
3109  Label* if_false = NULL;
3110  Label* fall_through = NULL;
3111  context()->PrepareTest(&materialize_true, &materialize_false,
3112  &if_true, &if_false, &fall_through);
3113 
3114  __ JumpIfSmi(eax, if_false);
3115  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3116  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3117  Split(equal, if_true, if_false, fall_through);
3118 
3119  context()->Plug(if_true, if_false);
3120 }
3121 
3122 
3123 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3124  ZoneList<Expression*>* args = expr->arguments();
3125  ASSERT(args->length() == 1);
3126 
3127  VisitForAccumulatorValue(args->at(0));
3128 
3129  Label materialize_true, materialize_false;
3130  Label* if_true = NULL;
3131  Label* if_false = NULL;
3132  Label* fall_through = NULL;
3133  context()->PrepareTest(&materialize_true, &materialize_false,
3134  &if_true, &if_false, &fall_through);
3135 
3136  Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3137  __ CheckMap(eax, map, if_false, DO_SMI_CHECK);
3138  // Check if the exponent half is 0x80000000. Comparing against 1 and
3139  // checking for overflow is the shortest possible encoding.
3140  __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1));
3141  __ j(no_overflow, if_false);
3142  __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0));
3143  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3144  Split(equal, if_true, if_false, fall_through);
3145 
3146  context()->Plug(if_true, if_false);
3147 }
3148 
3149 
3150 
3151 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3152  ZoneList<Expression*>* args = expr->arguments();
3153  ASSERT(args->length() == 1);
3154 
3155  VisitForAccumulatorValue(args->at(0));
3156 
3157  Label materialize_true, materialize_false;
3158  Label* if_true = NULL;
3159  Label* if_false = NULL;
3160  Label* fall_through = NULL;
3161  context()->PrepareTest(&materialize_true, &materialize_false,
3162  &if_true, &if_false, &fall_through);
3163 
3164  __ JumpIfSmi(eax, if_false);
3165  __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3166  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3167  Split(equal, if_true, if_false, fall_through);
3168 
3169  context()->Plug(if_true, if_false);
3170 }
3171 
3172 
3173 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3174  ZoneList<Expression*>* args = expr->arguments();
3175  ASSERT(args->length() == 1);
3176 
3177  VisitForAccumulatorValue(args->at(0));
3178 
3179  Label materialize_true, materialize_false;
3180  Label* if_true = NULL;
3181  Label* if_false = NULL;
3182  Label* fall_through = NULL;
3183  context()->PrepareTest(&materialize_true, &materialize_false,
3184  &if_true, &if_false, &fall_through);
3185 
3186  __ JumpIfSmi(eax, if_false);
3187  __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
3188  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3189  Split(equal, if_true, if_false, fall_through);
3190 
3191  context()->Plug(if_true, if_false);
3192 }
3193 
3194 
3195 
3196 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3197  ASSERT(expr->arguments()->length() == 0);
3198 
3199  Label materialize_true, materialize_false;
3200  Label* if_true = NULL;
3201  Label* if_false = NULL;
3202  Label* fall_through = NULL;
3203  context()->PrepareTest(&materialize_true, &materialize_false,
3204  &if_true, &if_false, &fall_through);
3205 
3206  // Get the frame pointer for the calling frame.
3208 
3209  // Skip the arguments adaptor frame if it exists.
3210  Label check_frame_marker;
3213  __ j(not_equal, &check_frame_marker);
3215 
3216  // Check the marker in the calling frame.
3217  __ bind(&check_frame_marker);
3219  Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
3220  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3221  Split(equal, if_true, if_false, fall_through);
3222 
3223  context()->Plug(if_true, if_false);
3224 }
3225 
3226 
3227 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3228  ZoneList<Expression*>* args = expr->arguments();
3229  ASSERT(args->length() == 2);
3230 
3231  // Load the two objects into registers and perform the comparison.
3232  VisitForStackValue(args->at(0));
3233  VisitForAccumulatorValue(args->at(1));
3234 
3235  Label materialize_true, materialize_false;
3236  Label* if_true = NULL;
3237  Label* if_false = NULL;
3238  Label* fall_through = NULL;
3239  context()->PrepareTest(&materialize_true, &materialize_false,
3240  &if_true, &if_false, &fall_through);
3241 
3242  __ pop(ebx);
3243  __ cmp(eax, ebx);
3244  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3245  Split(equal, if_true, if_false, fall_through);
3246 
3247  context()->Plug(if_true, if_false);
3248 }
3249 
3250 
3251 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3252  ZoneList<Expression*>* args = expr->arguments();
3253  ASSERT(args->length() == 1);
3254 
3255  // ArgumentsAccessStub expects the key in edx and the formal
3256  // parameter count in eax.
3257  VisitForAccumulatorValue(args->at(0));
3258  __ mov(edx, eax);
3259  __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3260  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3261  __ CallStub(&stub);
3262  context()->Plug(eax);
3263 }
3264 
3265 
3266 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3267  ASSERT(expr->arguments()->length() == 0);
3268 
3269  Label exit;
3270  // Get the number of formal parameters.
3271  __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3272 
3273  // Check if the calling frame is an arguments adaptor frame.
3277  __ j(not_equal, &exit);
3278 
3279  // Arguments adaptor case: Read the arguments length from the
3280  // adaptor frame.
3282 
3283  __ bind(&exit);
3284  __ AssertSmi(eax);
3285  context()->Plug(eax);
3286 }
3287 
3288 
3289 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3290  ZoneList<Expression*>* args = expr->arguments();
3291  ASSERT(args->length() == 1);
3292  Label done, null, function, non_function_constructor;
3293 
3294  VisitForAccumulatorValue(args->at(0));
3295 
3296  // If the object is a smi, we return null.
3297  __ JumpIfSmi(eax, &null);
3298 
3299  // Check that the object is a JS object but take special care of JS
3300  // functions to make sure they have 'Function' as their class.
3301  // Assume that there are only two callable types, and one of them is at
3302  // either end of the type range for JS object types. Saves extra comparisons.
3304  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
3305  // Map is now in eax.
3306  __ j(below, &null);
3309  __ j(equal, &function);
3310 
3311  __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
3313  LAST_SPEC_OBJECT_TYPE - 1);
3314  __ j(equal, &function);
3315  // Assume that there is no larger type.
3317 
3318  // Check if the constructor in the map is a JS function.
3320  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3321  __ j(not_equal, &non_function_constructor);
3322 
3323  // eax now contains the constructor function. Grab the
3324  // instance class name from there.
3327  __ jmp(&done);
3328 
3329  // Functions have class 'Function'.
3330  __ bind(&function);
3331  __ mov(eax, isolate()->factory()->function_class_string());
3332  __ jmp(&done);
3333 
3334  // Objects with a non-function constructor have class 'Object'.
3335  __ bind(&non_function_constructor);
3336  __ mov(eax, isolate()->factory()->Object_string());
3337  __ jmp(&done);
3338 
3339  // Non-JS objects have class null.
3340  __ bind(&null);
3341  __ mov(eax, isolate()->factory()->null_value());
3342 
3343  // All done.
3344  __ bind(&done);
3345 
3346  context()->Plug(eax);
3347 }
3348 
3349 
3350 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3351  // Conditionally generate a log call.
3352  // Args:
3353  // 0 (literal string): The type of logging (corresponds to the flags).
3354  // This is used to determine whether or not to generate the log call.
3355  // 1 (string): Format string. Access the string at argument index 2
3356  // with '%2s' (see Logger::LogRuntime for all the formats).
3357  // 2 (array): Arguments to the format string.
3358  ZoneList<Expression*>* args = expr->arguments();
3359  ASSERT_EQ(args->length(), 3);
3360  if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3361  VisitForStackValue(args->at(1));
3362  VisitForStackValue(args->at(2));
3363  __ CallRuntime(Runtime::kHiddenLog, 2);
3364  }
3365  // Finally, we're expected to leave a value on the top of the stack.
3366  __ mov(eax, isolate()->factory()->undefined_value());
3367  context()->Plug(eax);
3368 }
3369 
3370 
3371 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3372  // Load the arguments on the stack and call the stub.
3373  SubStringStub stub;
3374  ZoneList<Expression*>* args = expr->arguments();
3375  ASSERT(args->length() == 3);
3376  VisitForStackValue(args->at(0));
3377  VisitForStackValue(args->at(1));
3378  VisitForStackValue(args->at(2));
3379  __ CallStub(&stub);
3380  context()->Plug(eax);
3381 }
3382 
3383 
3384 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3385  // Load the arguments on the stack and call the stub.
3386  RegExpExecStub stub;
3387  ZoneList<Expression*>* args = expr->arguments();
3388  ASSERT(args->length() == 4);
3389  VisitForStackValue(args->at(0));
3390  VisitForStackValue(args->at(1));
3391  VisitForStackValue(args->at(2));
3392  VisitForStackValue(args->at(3));
3393  __ CallStub(&stub);
3394  context()->Plug(eax);
3395 }
3396 
3397 
3398 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3399  ZoneList<Expression*>* args = expr->arguments();
3400  ASSERT(args->length() == 1);
3401 
3402  VisitForAccumulatorValue(args->at(0)); // Load the object.
3403 
3404  Label done;
3405  // If the object is a smi return the object.
3406  __ JumpIfSmi(eax, &done, Label::kNear);
3407  // If the object is not a value type, return the object.
3408  __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3409  __ j(not_equal, &done, Label::kNear);
3411 
3412  __ bind(&done);
3413  context()->Plug(eax);
3414 }
3415 
3416 
3417 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3418  ZoneList<Expression*>* args = expr->arguments();
3419  ASSERT(args->length() == 2);
3420  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3421  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3422 
3423  VisitForAccumulatorValue(args->at(0)); // Load the object.
3424 
3425  Label runtime, done, not_date_object;
3426  Register object = eax;
3427  Register result = eax;
3428  Register scratch = ecx;
3429 
3430  __ JumpIfSmi(object, &not_date_object);
3431  __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3432  __ j(not_equal, &not_date_object);
3433 
3434  if (index->value() == 0) {
3435  __ mov(result, FieldOperand(object, JSDate::kValueOffset));
3436  __ jmp(&done);
3437  } else {
3438  if (index->value() < JSDate::kFirstUncachedField) {
3439  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3440  __ mov(scratch, Operand::StaticVariable(stamp));
3441  __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3442  __ j(not_equal, &runtime, Label::kNear);
3443  __ mov(result, FieldOperand(object, JSDate::kValueOffset +
3444  kPointerSize * index->value()));
3445  __ jmp(&done);
3446  }
3447  __ bind(&runtime);
3448  __ PrepareCallCFunction(2, scratch);
3449  __ mov(Operand(esp, 0), object);
3450  __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3451  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3452  __ jmp(&done);
3453  }
3454 
3455  __ bind(&not_date_object);
3456  __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3457  __ bind(&done);
3458  context()->Plug(result);
3459 }
3460 
3461 
3462 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3463  ZoneList<Expression*>* args = expr->arguments();
3464  ASSERT_EQ(3, args->length());
3465 
3466  Register string = eax;
3467  Register index = ebx;
3468  Register value = ecx;
3469 
3470  VisitForStackValue(args->at(1)); // index
3471  VisitForStackValue(args->at(2)); // value
3472  VisitForAccumulatorValue(args->at(0)); // string
3473 
3474  __ pop(value);
3475  __ pop(index);
3476 
3477  if (FLAG_debug_code) {
3478  __ test(value, Immediate(kSmiTagMask));
3479  __ Check(zero, kNonSmiValue);
3480  __ test(index, Immediate(kSmiTagMask));
3481  __ Check(zero, kNonSmiValue);
3482  }
3483 
3484  __ SmiUntag(value);
3485  __ SmiUntag(index);
3486 
3487  if (FLAG_debug_code) {
3488  static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3489  __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3490  }
3491 
3492  __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3493  value);
3494  context()->Plug(string);
3495 }
3496 
3497 
3498 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3499  ZoneList<Expression*>* args = expr->arguments();
3500  ASSERT_EQ(3, args->length());
3501 
3502  Register string = eax;
3503  Register index = ebx;
3504  Register value = ecx;
3505 
3506  VisitForStackValue(args->at(1)); // index
3507  VisitForStackValue(args->at(2)); // value
3508  VisitForAccumulatorValue(args->at(0)); // string
3509  __ pop(value);
3510  __ pop(index);
3511 
3512  if (FLAG_debug_code) {
3513  __ test(value, Immediate(kSmiTagMask));
3514  __ Check(zero, kNonSmiValue);
3515  __ test(index, Immediate(kSmiTagMask));
3516  __ Check(zero, kNonSmiValue);
3517  __ SmiUntag(index);
3518  static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3519  __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3520  __ SmiTag(index);
3521  }
3522 
3523  __ SmiUntag(value);
3524  // No need to untag a smi for two-byte addressing.
3525  __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3526  value);
3527  context()->Plug(string);
3528 }
3529 
3530 
3531 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3532  // Load the arguments on the stack and call the runtime function.
3533  ZoneList<Expression*>* args = expr->arguments();
3534  ASSERT(args->length() == 2);
3535  VisitForStackValue(args->at(0));
3536  VisitForStackValue(args->at(1));
3537 
3539  MathPowStub stub(MathPowStub::ON_STACK);
3540  __ CallStub(&stub);
3541  } else {
3542  __ CallRuntime(Runtime::kMath_pow, 2);
3543  }
3544  context()->Plug(eax);
3545 }
3546 
3547 
3548 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3549  ZoneList<Expression*>* args = expr->arguments();
3550  ASSERT(args->length() == 2);
3551 
3552  VisitForStackValue(args->at(0)); // Load the object.
3553  VisitForAccumulatorValue(args->at(1)); // Load the value.
3554  __ pop(ebx); // eax = value. ebx = object.
3555 
3556  Label done;
3557  // If the object is a smi, return the value.
3558  __ JumpIfSmi(ebx, &done, Label::kNear);
3559 
3560  // If the object is not a value type, return the value.
3561  __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3562  __ j(not_equal, &done, Label::kNear);
3563 
3564  // Store the value.
3566 
3567  // Update the write barrier. Save the value as it will be
3568  // overwritten by the write barrier code and is needed afterward.
3569  __ mov(edx, eax);
3570  __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3571 
3572  __ bind(&done);
3573  context()->Plug(eax);
3574 }
3575 
3576 
3577 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3578  ZoneList<Expression*>* args = expr->arguments();
3579  ASSERT_EQ(args->length(), 1);
3580 
3581  // Load the argument into eax and call the stub.
3582  VisitForAccumulatorValue(args->at(0));
3583 
3584  NumberToStringStub stub;
3585  __ CallStub(&stub);
3586  context()->Plug(eax);
3587 }
3588 
3589 
3590 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3591  ZoneList<Expression*>* args = expr->arguments();
3592  ASSERT(args->length() == 1);
3593 
3594  VisitForAccumulatorValue(args->at(0));
3595 
3596  Label done;
3597  StringCharFromCodeGenerator generator(eax, ebx);
3598  generator.GenerateFast(masm_);
3599  __ jmp(&done);
3600 
3601  NopRuntimeCallHelper call_helper;
3602  generator.GenerateSlow(masm_, call_helper);
3603 
3604  __ bind(&done);
3605  context()->Plug(ebx);
3606 }
3607 
3608 
3609 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3610  ZoneList<Expression*>* args = expr->arguments();
3611  ASSERT(args->length() == 2);
3612 
3613  VisitForStackValue(args->at(0));
3614  VisitForAccumulatorValue(args->at(1));
3615 
3616  Register object = ebx;
3617  Register index = eax;
3618  Register result = edx;
3619 
3620  __ pop(object);
3621 
3622  Label need_conversion;
3623  Label index_out_of_range;
3624  Label done;
3625  StringCharCodeAtGenerator generator(object,
3626  index,
3627  result,
3628  &need_conversion,
3629  &need_conversion,
3630  &index_out_of_range,
3632  generator.GenerateFast(masm_);
3633  __ jmp(&done);
3634 
3635  __ bind(&index_out_of_range);
3636  // When the index is out of range, the spec requires us to return
3637  // NaN.
3638  __ Move(result, Immediate(isolate()->factory()->nan_value()));
3639  __ jmp(&done);
3640 
3641  __ bind(&need_conversion);
3642  // Move the undefined value into the result register, which will
3643  // trigger conversion.
3644  __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3645  __ jmp(&done);
3646 
3647  NopRuntimeCallHelper call_helper;
3648  generator.GenerateSlow(masm_, call_helper);
3649 
3650  __ bind(&done);
3651  context()->Plug(result);
3652 }
3653 
3654 
3655 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3656  ZoneList<Expression*>* args = expr->arguments();
3657  ASSERT(args->length() == 2);
3658 
3659  VisitForStackValue(args->at(0));
3660  VisitForAccumulatorValue(args->at(1));
3661 
3662  Register object = ebx;
3663  Register index = eax;
3664  Register scratch = edx;
3665  Register result = eax;
3666 
3667  __ pop(object);
3668 
3669  Label need_conversion;
3670  Label index_out_of_range;
3671  Label done;
3672  StringCharAtGenerator generator(object,
3673  index,
3674  scratch,
3675  result,
3676  &need_conversion,
3677  &need_conversion,
3678  &index_out_of_range,
3680  generator.GenerateFast(masm_);
3681  __ jmp(&done);
3682 
3683  __ bind(&index_out_of_range);
3684  // When the index is out of range, the spec requires us to return
3685  // the empty string.
3686  __ Move(result, Immediate(isolate()->factory()->empty_string()));
3687  __ jmp(&done);
3688 
3689  __ bind(&need_conversion);
3690  // Move smi zero into the result register, which will trigger
3691  // conversion.
3692  __ Move(result, Immediate(Smi::FromInt(0)));
3693  __ jmp(&done);
3694 
3695  NopRuntimeCallHelper call_helper;
3696  generator.GenerateSlow(masm_, call_helper);
3697 
3698  __ bind(&done);
3699  context()->Plug(result);
3700 }
3701 
3702 
3703 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3704  ZoneList<Expression*>* args = expr->arguments();
3705  ASSERT_EQ(2, args->length());
3706  VisitForStackValue(args->at(0));
3707  VisitForAccumulatorValue(args->at(1));
3708 
3709  __ pop(edx);
3710  StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
3711  __ CallStub(&stub);
3712  context()->Plug(eax);
3713 }
3714 
3715 
3716 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3717  ZoneList<Expression*>* args = expr->arguments();
3718  ASSERT_EQ(2, args->length());
3719 
3720  VisitForStackValue(args->at(0));
3721  VisitForStackValue(args->at(1));
3722 
3723  StringCompareStub stub;
3724  __ CallStub(&stub);
3725  context()->Plug(eax);
3726 }
3727 
3728 
3729 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3730  // Load the argument on the stack and call the runtime function.
3731  ZoneList<Expression*>* args = expr->arguments();
3732  ASSERT(args->length() == 1);
3733  VisitForStackValue(args->at(0));
3734  __ CallRuntime(Runtime::kMath_log, 1);
3735  context()->Plug(eax);
3736 }
3737 
3738 
3739 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3740  // Load the argument on the stack and call the runtime function.
3741  ZoneList<Expression*>* args = expr->arguments();
3742  ASSERT(args->length() == 1);
3743  VisitForStackValue(args->at(0));
3744  __ CallRuntime(Runtime::kMath_sqrt, 1);
3745  context()->Plug(eax);
3746 }
3747 
3748 
3749 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3750  ZoneList<Expression*>* args = expr->arguments();
3751  ASSERT(args->length() >= 2);
3752 
3753  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3754  for (int i = 0; i < arg_count + 1; ++i) {
3755  VisitForStackValue(args->at(i));
3756  }
3757  VisitForAccumulatorValue(args->last()); // Function.
3758 
3759  Label runtime, done;
3760  // Check for non-function argument (including proxy).
3761  __ JumpIfSmi(eax, &runtime);
3762  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3763  __ j(not_equal, &runtime);
3764 
3765  // InvokeFunction requires the function in edi. Move it in there.
3766  __ mov(edi, result_register());
3767  ParameterCount count(arg_count);
3768  __ InvokeFunction(edi, count, CALL_FUNCTION, NullCallWrapper());
3770  __ jmp(&done);
3771 
3772  __ bind(&runtime);
3773  __ push(eax);
3774  __ CallRuntime(Runtime::kCall, args->length());
3775  __ bind(&done);
3776 
3777  context()->Plug(eax);
3778 }
3779 
3780 
3781 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3782  // Load the arguments on the stack and call the stub.
3783  RegExpConstructResultStub stub;
3784  ZoneList<Expression*>* args = expr->arguments();
3785  ASSERT(args->length() == 3);
3786  VisitForStackValue(args->at(0));
3787  VisitForStackValue(args->at(1));
3788  VisitForAccumulatorValue(args->at(2));
3789  __ pop(ebx);
3790  __ pop(ecx);
3791  __ CallStub(&stub);
3792  context()->Plug(eax);
3793 }
3794 
3795 
3796 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3797  ZoneList<Expression*>* args = expr->arguments();
3798  ASSERT_EQ(2, args->length());
3799 
3800  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3801  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3802 
3803  Handle<FixedArray> jsfunction_result_caches(
3804  isolate()->native_context()->jsfunction_result_caches());
3805  if (jsfunction_result_caches->length() <= cache_id) {
3806  __ Abort(kAttemptToUseUndefinedCache);
3807  __ mov(eax, isolate()->factory()->undefined_value());
3808  context()->Plug(eax);
3809  return;
3810  }
3811 
3812  VisitForAccumulatorValue(args->at(1));
3813 
3814  Register key = eax;
3815  Register cache = ebx;
3816  Register tmp = ecx;
3818  __ mov(cache,
3821  __ mov(cache,
3822  FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3823 
3824  Label done, not_found;
3825  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3827  // tmp now holds finger offset as a smi.
3828  __ cmp(key, FixedArrayElementOperand(cache, tmp));
3829  __ j(not_equal, &not_found);
3830 
3831  __ mov(eax, FixedArrayElementOperand(cache, tmp, 1));
3832  __ jmp(&done);
3833 
3834  __ bind(&not_found);
3835  // Call runtime to perform the lookup.
3836  __ push(cache);
3837  __ push(key);
3838  __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3839 
3840  __ bind(&done);
3841  context()->Plug(eax);
3842 }
3843 
3844 
3845 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3846  ZoneList<Expression*>* args = expr->arguments();
3847  ASSERT(args->length() == 1);
3848 
3849  VisitForAccumulatorValue(args->at(0));
3850 
3851  __ AssertString(eax);
3852 
3853  Label materialize_true, materialize_false;
3854  Label* if_true = NULL;
3855  Label* if_false = NULL;
3856  Label* fall_through = NULL;
3857  context()->PrepareTest(&materialize_true, &materialize_false,
3858  &if_true, &if_false, &fall_through);
3859 
3862  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3863  Split(zero, if_true, if_false, fall_through);
3864 
3865  context()->Plug(if_true, if_false);
3866 }
3867 
3868 
3869 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3870  ZoneList<Expression*>* args = expr->arguments();
3871  ASSERT(args->length() == 1);
3872  VisitForAccumulatorValue(args->at(0));
3873 
3874  __ AssertString(eax);
3875 
3877  __ IndexFromHash(eax, eax);
3878 
3879  context()->Plug(eax);
3880 }
3881 
3882 
3883 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3884  Label bailout, done, one_char_separator, long_separator,
3885  non_trivial_array, not_size_one_array, loop,
3886  loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3887 
3888  ZoneList<Expression*>* args = expr->arguments();
3889  ASSERT(args->length() == 2);
3890  // We will leave the separator on the stack until the end of the function.
3891  VisitForStackValue(args->at(1));
3892  // Load this to eax (= array)
3893  VisitForAccumulatorValue(args->at(0));
3894  // All aliases of the same register have disjoint lifetimes.
3895  Register array = eax;
3896  Register elements = no_reg; // Will be eax.
3897 
3898  Register index = edx;
3899 
3900  Register string_length = ecx;
3901 
3902  Register string = esi;
3903 
3904  Register scratch = ebx;
3905 
3906  Register array_length = edi;
3907  Register result_pos = no_reg; // Will be edi.
3908 
3909  // Separator operand is already pushed.
3910  Operand separator_operand = Operand(esp, 2 * kPointerSize);
3911  Operand result_operand = Operand(esp, 1 * kPointerSize);
3912  Operand array_length_operand = Operand(esp, 0);
3913  __ sub(esp, Immediate(2 * kPointerSize));
3914  __ cld();
3915  // Check that the array is a JSArray
3916  __ JumpIfSmi(array, &bailout);
3917  __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3918  __ j(not_equal, &bailout);
3919 
3920  // Check that the array has fast elements.
3921  __ CheckFastElements(scratch, &bailout);
3922 
3923  // If the array has length zero, return the empty string.
3924  __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3925  __ SmiUntag(array_length);
3926  __ j(not_zero, &non_trivial_array);
3927  __ mov(result_operand, isolate()->factory()->empty_string());
3928  __ jmp(&done);
3929 
3930  // Save the array length.
3931  __ bind(&non_trivial_array);
3932  __ mov(array_length_operand, array_length);
3933 
3934  // Save the FixedArray containing array's elements.
3935  // End of array's live range.
3936  elements = array;
3937  __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3938  array = no_reg;
3939 
3940 
3941  // Check that all array elements are sequential ASCII strings, and
3942  // accumulate the sum of their lengths, as a smi-encoded value.
3943  __ Move(index, Immediate(0));
3944  __ Move(string_length, Immediate(0));
3945  // Loop condition: while (index < length).
3946  // Live loop registers: index, array_length, string,
3947  // scratch, string_length, elements.
3948  if (generate_debug_code_) {
3949  __ cmp(index, array_length);
3950  __ Assert(less, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3951  }
3952  __ bind(&loop);
3953  __ mov(string, FieldOperand(elements,
3954  index,
3957  __ JumpIfSmi(string, &bailout);
3958  __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3959  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3960  __ and_(scratch, Immediate(
3962  __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
3963  __ j(not_equal, &bailout);
3964  __ add(string_length,
3966  __ j(overflow, &bailout);
3967  __ add(index, Immediate(1));
3968  __ cmp(index, array_length);
3969  __ j(less, &loop);
3970 
3971  // If array_length is 1, return elements[0], a string.
3972  __ cmp(array_length, 1);
3973  __ j(not_equal, &not_size_one_array);
3974  __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
3975  __ mov(result_operand, scratch);
3976  __ jmp(&done);
3977 
3978  __ bind(&not_size_one_array);
3979 
3980  // End of array_length live range.
3981  result_pos = array_length;
3982  array_length = no_reg;
3983 
3984  // Live registers:
3985  // string_length: Sum of string lengths, as a smi.
3986  // elements: FixedArray of strings.
3987 
3988  // Check that the separator is a flat ASCII string.
3989  __ mov(string, separator_operand);
3990  __ JumpIfSmi(string, &bailout);
3991  __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3992  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3993  __ and_(scratch, Immediate(
3995  __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
3996  __ j(not_equal, &bailout);
3997 
3998  // Add (separator length times array_length) - separator length
3999  // to string_length.
4000  __ mov(scratch, separator_operand);
4001  __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
4002  __ sub(string_length, scratch); // May be negative, temporarily.
4003  __ imul(scratch, array_length_operand);
4004  __ j(overflow, &bailout);
4005  __ add(string_length, scratch);
4006  __ j(overflow, &bailout);
4007 
4008  __ shr(string_length, 1);
4009  // Live registers and stack values:
4010  // string_length
4011  // elements
4012  __ AllocateAsciiString(result_pos, string_length, scratch,
4013  index, string, &bailout);
4014  __ mov(result_operand, result_pos);
4015  __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4016 
4017 
4018  __ mov(string, separator_operand);
4020  Immediate(Smi::FromInt(1)));
4021  __ j(equal, &one_char_separator);
4022  __ j(greater, &long_separator);
4023 
4024 
4025  // Empty separator case
4026  __ mov(index, Immediate(0));
4027  __ jmp(&loop_1_condition);
4028  // Loop condition: while (index < length).
4029  __ bind(&loop_1);
4030  // Each iteration of the loop concatenates one string to the result.
4031  // Live values in registers:
4032  // index: which element of the elements array we are adding to the result.
4033  // result_pos: the position to which we are currently copying characters.
4034  // elements: the FixedArray of strings we are joining.
4035 
4036  // Get string = array[index].
4037  __ mov(string, FieldOperand(elements, index,
4040  __ mov(string_length,
4042  __ shr(string_length, 1);
4043  __ lea(string,
4045  __ CopyBytes(string, result_pos, string_length, scratch);
4046  __ add(index, Immediate(1));
4047  __ bind(&loop_1_condition);
4048  __ cmp(index, array_length_operand);
4049  __ j(less, &loop_1); // End while (index < length).
4050  __ jmp(&done);
4051 
4052 
4053 
4054  // One-character separator case
4055  __ bind(&one_char_separator);
4056  // Replace separator with its ASCII character value.
4057  __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4058  __ mov_b(separator_operand, scratch);
4059 
4060  __ Move(index, Immediate(0));
4061  // Jump into the loop after the code that copies the separator, so the first
4062  // element is not preceded by a separator
4063  __ jmp(&loop_2_entry);
4064  // Loop condition: while (index < length).
4065  __ bind(&loop_2);
4066  // Each iteration of the loop concatenates one string to the result.
4067  // Live values in registers:
4068  // index: which element of the elements array we are adding to the result.
4069  // result_pos: the position to which we are currently copying characters.
4070 
4071  // Copy the separator character to the result.
4072  __ mov_b(scratch, separator_operand);
4073  __ mov_b(Operand(result_pos, 0), scratch);
4074  __ inc(result_pos);
4075 
4076  __ bind(&loop_2_entry);
4077  // Get string = array[index].
4078  __ mov(string, FieldOperand(elements, index,
4081  __ mov(string_length,
4083  __ shr(string_length, 1);
4084  __ lea(string,
4086  __ CopyBytes(string, result_pos, string_length, scratch);
4087  __ add(index, Immediate(1));
4088 
4089  __ cmp(index, array_length_operand);
4090  __ j(less, &loop_2); // End while (index < length).
4091  __ jmp(&done);
4092 
4093 
4094  // Long separator case (separator is more than one character).
4095  __ bind(&long_separator);
4096 
4097  __ Move(index, Immediate(0));
4098  // Jump into the loop after the code that copies the separator, so the first
4099  // element is not preceded by a separator
4100  __ jmp(&loop_3_entry);
4101  // Loop condition: while (index < length).
4102  __ bind(&loop_3);
4103  // Each iteration of the loop concatenates one string to the result.
4104  // Live values in registers:
4105  // index: which element of the elements array we are adding to the result.
4106  // result_pos: the position to which we are currently copying characters.
4107 
4108  // Copy the separator to the result.
4109  __ mov(string, separator_operand);
4110  __ mov(string_length,
4112  __ shr(string_length, 1);
4113  __ lea(string,
4115  __ CopyBytes(string, result_pos, string_length, scratch);
4116 
4117  __ bind(&loop_3_entry);
4118  // Get string = array[index].
4119  __ mov(string, FieldOperand(elements, index,
4122  __ mov(string_length,
4124  __ shr(string_length, 1);
4125  __ lea(string,
4127  __ CopyBytes(string, result_pos, string_length, scratch);
4128  __ add(index, Immediate(1));
4129 
4130  __ cmp(index, array_length_operand);
4131  __ j(less, &loop_3); // End while (index < length).
4132  __ jmp(&done);
4133 
4134 
4135  __ bind(&bailout);
4136  __ mov(result_operand, isolate()->factory()->undefined_value());
4137  __ bind(&done);
4138  __ mov(eax, result_operand);
4139  // Drop temp values from the stack, and restore context register.
4140  __ add(esp, Immediate(3 * kPointerSize));
4141 
4143  context()->Plug(eax);
4144 }
4145 
4146 
4147 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4148  if (expr->function() != NULL &&
4149  expr->function()->intrinsic_type == Runtime::INLINE) {
4150  Comment cmnt(masm_, "[ InlineRuntimeCall");
4151  EmitInlineRuntimeCall(expr);
4152  return;
4153  }
4154 
4155  Comment cmnt(masm_, "[ CallRuntime");
4156  ZoneList<Expression*>* args = expr->arguments();
4157 
4158  if (expr->is_jsruntime()) {
4159  // Push the builtins object as receiver.
4160  __ mov(eax, GlobalObjectOperand());
4162 
4163  // Load the function from the receiver.
4164  __ mov(edx, Operand(esp, 0));
4165  __ mov(ecx, Immediate(expr->name()));
4166  CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4167 
4168  // Push the target function under the receiver.
4169  __ push(Operand(esp, 0));
4170  __ mov(Operand(esp, kPointerSize), eax);
4171 
4172  // Code common for calls using the IC.
4173  ZoneList<Expression*>* args = expr->arguments();
4174  int arg_count = args->length();
4175  for (int i = 0; i < arg_count; i++) {
4176  VisitForStackValue(args->at(i));
4177  }
4178 
4179  // Record source position of the IC call.
4180  SetSourcePosition(expr->position());
4181  CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
4182  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
4183  __ CallStub(&stub);
4184  // Restore context register.
4186  context()->DropAndPlug(1, eax);
4187 
4188  } else {
4189  // Push the arguments ("left-to-right").
4190  int arg_count = args->length();
4191  for (int i = 0; i < arg_count; i++) {
4192  VisitForStackValue(args->at(i));
4193  }
4194 
4195  // Call the C runtime function.
4196  __ CallRuntime(expr->function(), arg_count);
4197 
4198  context()->Plug(eax);
4199  }
4200 }
4201 
4202 
4203 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4204  switch (expr->op()) {
4205  case Token::DELETE: {
4206  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4207  Property* property = expr->expression()->AsProperty();
4208  VariableProxy* proxy = expr->expression()->AsVariableProxy();
4209 
4210  if (property != NULL) {
4211  VisitForStackValue(property->obj());
4212  VisitForStackValue(property->key());
4213  __ push(Immediate(Smi::FromInt(strict_mode())));
4214  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4215  context()->Plug(eax);
4216  } else if (proxy != NULL) {
4217  Variable* var = proxy->var();
4218  // Delete of an unqualified identifier is disallowed in strict mode
4219  // but "delete this" is allowed.
4220  ASSERT(strict_mode() == SLOPPY || var->is_this());
4221  if (var->IsUnallocated()) {
4222  __ push(GlobalObjectOperand());
4223  __ push(Immediate(var->name()));
4224  __ push(Immediate(Smi::FromInt(SLOPPY)));
4225  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4226  context()->Plug(eax);
4227  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4228  // Result of deleting non-global variables is false. 'this' is
4229  // not really a variable, though we implement it as one. The
4230  // subexpression does not have side effects.
4231  context()->Plug(var->is_this());
4232  } else {
4233  // Non-global variable. Call the runtime to try to delete from the
4234  // context where the variable was introduced.
4235  __ push(context_register());
4236  __ push(Immediate(var->name()));
4237  __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
4238  context()->Plug(eax);
4239  }
4240  } else {
4241  // Result of deleting non-property, non-variable reference is true.
4242  // The subexpression may have side effects.
4243  VisitForEffect(expr->expression());
4244  context()->Plug(true);
4245  }
4246  break;
4247  }
4248 
4249  case Token::VOID: {
4250  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4251  VisitForEffect(expr->expression());
4252  context()->Plug(isolate()->factory()->undefined_value());
4253  break;
4254  }
4255 
4256  case Token::NOT: {
4257  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4258  if (context()->IsEffect()) {
4259  // Unary NOT has no side effects so it's only necessary to visit the
4260  // subexpression. Match the optimizing compiler by not branching.
4261  VisitForEffect(expr->expression());
4262  } else if (context()->IsTest()) {
4263  const TestContext* test = TestContext::cast(context());
4264  // The labels are swapped for the recursive call.
4265  VisitForControl(expr->expression(),
4266  test->false_label(),
4267  test->true_label(),
4268  test->fall_through());
4269  context()->Plug(test->true_label(), test->false_label());
4270  } else {
4271  // We handle value contexts explicitly rather than simply visiting
4272  // for control and plugging the control flow into the context,
4273  // because we need to prepare a pair of extra administrative AST ids
4274  // for the optimizing compiler.
4275  ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4276  Label materialize_true, materialize_false, done;
4277  VisitForControl(expr->expression(),
4278  &materialize_false,
4279  &materialize_true,
4280  &materialize_true);
4281  __ bind(&materialize_true);
4282  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4283  if (context()->IsAccumulatorValue()) {
4284  __ mov(eax, isolate()->factory()->true_value());
4285  } else {
4286  __ Push(isolate()->factory()->true_value());
4287  }
4288  __ jmp(&done, Label::kNear);
4289  __ bind(&materialize_false);
4290  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4291  if (context()->IsAccumulatorValue()) {
4292  __ mov(eax, isolate()->factory()->false_value());
4293  } else {
4294  __ Push(isolate()->factory()->false_value());
4295  }
4296  __ bind(&done);
4297  }
4298  break;
4299  }
4300 
4301  case Token::TYPEOF: {
4302  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4303  { StackValueContext context(this);
4304  VisitForTypeofValue(expr->expression());
4305  }
4306  __ CallRuntime(Runtime::kTypeof, 1);
4307  context()->Plug(eax);
4308  break;
4309  }
4310 
4311  default:
4312  UNREACHABLE();
4313  }
4314 }
4315 
4316 
4317 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4318  ASSERT(expr->expression()->IsValidLeftHandSide());
4319 
4320  Comment cmnt(masm_, "[ CountOperation");
4321  SetSourcePosition(expr->position());
4322 
4323  // Expression can only be a property, a global or a (parameter or local)
4324  // slot.
4325  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4326  LhsKind assign_type = VARIABLE;
4327  Property* prop = expr->expression()->AsProperty();
4328  // In case of a property we use the uninitialized expression context
4329  // of the key to detect a named property.
4330  if (prop != NULL) {
4331  assign_type =
4332  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4333  }
4334 
4335  // Evaluate expression and get value.
4336  if (assign_type == VARIABLE) {
4337  ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4338  AccumulatorValueContext context(this);
4339  EmitVariableLoad(expr->expression()->AsVariableProxy());
4340  } else {
4341  // Reserve space for result of postfix operation.
4342  if (expr->is_postfix() && !context()->IsEffect()) {
4343  __ push(Immediate(Smi::FromInt(0)));
4344  }
4345  if (assign_type == NAMED_PROPERTY) {
4346  // Put the object both on the stack and in edx.
4347  VisitForAccumulatorValue(prop->obj());
4348  __ push(eax);
4349  __ mov(edx, eax);
4350  EmitNamedPropertyLoad(prop);
4351  } else {
4352  VisitForStackValue(prop->obj());
4353  VisitForStackValue(prop->key());
4354  __ mov(edx, Operand(esp, kPointerSize)); // Object.
4355  __ mov(ecx, Operand(esp, 0)); // Key.
4356  EmitKeyedPropertyLoad(prop);
4357  }
4358  }
4359 
4360  // We need a second deoptimization point after loading the value
4361  // in case evaluating the property load my have a side effect.
4362  if (assign_type == VARIABLE) {
4363  PrepareForBailout(expr->expression(), TOS_REG);
4364  } else {
4365  PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4366  }
4367 
4368  // Inline smi case if we are in a loop.
4369  Label done, stub_call;
4370  JumpPatchSite patch_site(masm_);
4371  if (ShouldInlineSmiCase(expr->op())) {
4372  Label slow;
4373  patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
4374 
4375  // Save result for postfix expressions.
4376  if (expr->is_postfix()) {
4377  if (!context()->IsEffect()) {
4378  // Save the result on the stack. If we have a named or keyed property
4379  // we store the result under the receiver that is currently on top
4380  // of the stack.
4381  switch (assign_type) {
4382  case VARIABLE:
4383  __ push(eax);
4384  break;
4385  case NAMED_PROPERTY:
4386  __ mov(Operand(esp, kPointerSize), eax);
4387  break;
4388  case KEYED_PROPERTY:
4389  __ mov(Operand(esp, 2 * kPointerSize), eax);
4390  break;
4391  }
4392  }
4393  }
4394 
4395  if (expr->op() == Token::INC) {
4396  __ add(eax, Immediate(Smi::FromInt(1)));
4397  } else {
4398  __ sub(eax, Immediate(Smi::FromInt(1)));
4399  }
4400  __ j(no_overflow, &done, Label::kNear);
4401  // Call stub. Undo operation first.
4402  if (expr->op() == Token::INC) {
4403  __ sub(eax, Immediate(Smi::FromInt(1)));
4404  } else {
4405  __ add(eax, Immediate(Smi::FromInt(1)));
4406  }
4407  __ jmp(&stub_call, Label::kNear);
4408  __ bind(&slow);
4409  }
4410  ToNumberStub convert_stub;
4411  __ CallStub(&convert_stub);
4412 
4413  // Save result for postfix expressions.
4414  if (expr->is_postfix()) {
4415  if (!context()->IsEffect()) {
4416  // Save the result on the stack. If we have a named or keyed property
4417  // we store the result under the receiver that is currently on top
4418  // of the stack.
4419  switch (assign_type) {
4420  case VARIABLE:
4421  __ push(eax);
4422  break;
4423  case NAMED_PROPERTY:
4424  __ mov(Operand(esp, kPointerSize), eax);
4425  break;
4426  case KEYED_PROPERTY:
4427  __ mov(Operand(esp, 2 * kPointerSize), eax);
4428  break;
4429  }
4430  }
4431  }
4432 
4433  // Record position before stub call.
4434  SetSourcePosition(expr->position());
4435 
4436  // Call stub for +1/-1.
4437  __ bind(&stub_call);
4438  __ mov(edx, eax);
4439  __ mov(eax, Immediate(Smi::FromInt(1)));
4440  BinaryOpICStub stub(expr->binary_op(), NO_OVERWRITE);
4441  CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4442  patch_site.EmitPatchInfo();
4443  __ bind(&done);
4444 
4445  // Store the value returned in eax.
4446  switch (assign_type) {
4447  case VARIABLE:
4448  if (expr->is_postfix()) {
4449  // Perform the assignment as if via '='.
4450  { EffectContext context(this);
4451  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4452  Token::ASSIGN);
4453  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4454  context.Plug(eax);
4455  }
4456  // For all contexts except EffectContext We have the result on
4457  // top of the stack.
4458  if (!context()->IsEffect()) {
4459  context()->PlugTOS();
4460  }
4461  } else {
4462  // Perform the assignment as if via '='.
4463  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4464  Token::ASSIGN);
4465  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4466  context()->Plug(eax);
4467  }
4468  break;
4469  case NAMED_PROPERTY: {
4470  __ mov(ecx, prop->key()->AsLiteral()->value());
4471  __ pop(edx);
4472  CallStoreIC(expr->CountStoreFeedbackId());
4473  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4474  if (expr->is_postfix()) {
4475  if (!context()->IsEffect()) {
4476  context()->PlugTOS();
4477  }
4478  } else {
4479  context()->Plug(eax);
4480  }
4481  break;
4482  }
4483  case KEYED_PROPERTY: {
4484  __ pop(ecx);
4485  __ pop(edx);
4486  Handle<Code> ic = strict_mode() == SLOPPY
4487  ? isolate()->builtins()->KeyedStoreIC_Initialize()
4488  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4489  CallIC(ic, expr->CountStoreFeedbackId());
4490  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4491  if (expr->is_postfix()) {
4492  // Result is on the stack
4493  if (!context()->IsEffect()) {
4494  context()->PlugTOS();
4495  }
4496  } else {
4497  context()->Plug(eax);
4498  }
4499  break;
4500  }
4501  }
4502 }
4503 
4504 
4505 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4506  VariableProxy* proxy = expr->AsVariableProxy();
4507  ASSERT(!context()->IsEffect());
4508  ASSERT(!context()->IsTest());
4509 
4510  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4511  Comment cmnt(masm_, "[ Global variable");
4512  __ mov(edx, GlobalObjectOperand());
4513  __ mov(ecx, Immediate(proxy->name()));
4514  // Use a regular load, not a contextual load, to avoid a reference
4515  // error.
4516  CallLoadIC(NOT_CONTEXTUAL);
4517  PrepareForBailout(expr, TOS_REG);
4518  context()->Plug(eax);
4519  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4520  Comment cmnt(masm_, "[ Lookup slot");
4521  Label done, slow;
4522 
4523  // Generate code for loading from variables potentially shadowed
4524  // by eval-introduced variables.
4525  EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4526 
4527  __ bind(&slow);
4528  __ push(esi);
4529  __ push(Immediate(proxy->name()));
4530  __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4531  PrepareForBailout(expr, TOS_REG);
4532  __ bind(&done);
4533 
4534  context()->Plug(eax);
4535  } else {
4536  // This expression cannot throw a reference error at the top level.
4537  VisitInDuplicateContext(expr);
4538  }
4539 }
4540 
4541 
4542 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4543  Expression* sub_expr,
4544  Handle<String> check) {
4545  Label materialize_true, materialize_false;
4546  Label* if_true = NULL;
4547  Label* if_false = NULL;
4548  Label* fall_through = NULL;
4549  context()->PrepareTest(&materialize_true, &materialize_false,
4550  &if_true, &if_false, &fall_through);
4551 
4552  { AccumulatorValueContext context(this);
4553  VisitForTypeofValue(sub_expr);
4554  }
4555  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4556 
4557  if (check->Equals(isolate()->heap()->number_string())) {
4558  __ JumpIfSmi(eax, if_true);
4560  isolate()->factory()->heap_number_map());
4561  Split(equal, if_true, if_false, fall_through);
4562  } else if (check->Equals(isolate()->heap()->string_string())) {
4563  __ JumpIfSmi(eax, if_false);
4564  __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4565  __ j(above_equal, if_false);
4566  // Check for undetectable objects => false.
4567  __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4568  1 << Map::kIsUndetectable);
4569  Split(zero, if_true, if_false, fall_through);
4570  } else if (check->Equals(isolate()->heap()->symbol_string())) {
4571  __ JumpIfSmi(eax, if_false);
4572  __ CmpObjectType(eax, SYMBOL_TYPE, edx);
4573  Split(equal, if_true, if_false, fall_through);
4574  } else if (check->Equals(isolate()->heap()->boolean_string())) {
4575  __ cmp(eax, isolate()->factory()->true_value());
4576  __ j(equal, if_true);
4577  __ cmp(eax, isolate()->factory()->false_value());
4578  Split(equal, if_true, if_false, fall_through);
4579  } else if (FLAG_harmony_typeof &&
4580  check->Equals(isolate()->heap()->null_string())) {
4581  __ cmp(eax, isolate()->factory()->null_value());
4582  Split(equal, if_true, if_false, fall_through);
4583  } else if (check->Equals(isolate()->heap()->undefined_string())) {
4584  __ cmp(eax, isolate()->factory()->undefined_value());
4585  __ j(equal, if_true);
4586  __ JumpIfSmi(eax, if_false);
4587  // Check for undetectable objects => true.
4589  __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
4590  __ test(ecx, Immediate(1 << Map::kIsUndetectable));
4591  Split(not_zero, if_true, if_false, fall_through);
4592  } else if (check->Equals(isolate()->heap()->function_string())) {
4593  __ JumpIfSmi(eax, if_false);
4595  __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
4596  __ j(equal, if_true);
4597  __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
4598  Split(equal, if_true, if_false, fall_through);
4599  } else if (check->Equals(isolate()->heap()->object_string())) {
4600  __ JumpIfSmi(eax, if_false);
4601  if (!FLAG_harmony_typeof) {
4602  __ cmp(eax, isolate()->factory()->null_value());
4603  __ j(equal, if_true);
4604  }
4605  __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
4606  __ j(below, if_false);
4607  __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4608  __ j(above, if_false);
4609  // Check for undetectable objects => false.
4610  __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4611  1 << Map::kIsUndetectable);
4612  Split(zero, if_true, if_false, fall_through);
4613  } else {
4614  if (if_false != fall_through) __ jmp(if_false);
4615  }
4616  context()->Plug(if_true, if_false);
4617 }
4618 
4619 
4620 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4621  Comment cmnt(masm_, "[ CompareOperation");
4622  SetSourcePosition(expr->position());
4623 
4624  // First we try a fast inlined version of the compare when one of
4625  // the operands is a literal.
4626  if (TryLiteralCompare(expr)) return;
4627 
4628  // Always perform the comparison for its control flow. Pack the result
4629  // into the expression's context after the comparison is performed.
4630  Label materialize_true, materialize_false;
4631  Label* if_true = NULL;
4632  Label* if_false = NULL;
4633  Label* fall_through = NULL;
4634  context()->PrepareTest(&materialize_true, &materialize_false,
4635  &if_true, &if_false, &fall_through);
4636 
4637  Token::Value op = expr->op();
4638  VisitForStackValue(expr->left());
4639  switch (op) {
4640  case Token::IN:
4641  VisitForStackValue(expr->right());
4642  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4643  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4644  __ cmp(eax, isolate()->factory()->true_value());
4645  Split(equal, if_true, if_false, fall_through);
4646  break;
4647 
4648  case Token::INSTANCEOF: {
4649  VisitForStackValue(expr->right());
4650  InstanceofStub stub(InstanceofStub::kNoFlags);
4651  __ CallStub(&stub);
4652  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4653  __ test(eax, eax);
4654  // The stub returns 0 for true.
4655  Split(zero, if_true, if_false, fall_through);
4656  break;
4657  }
4658 
4659  default: {
4660  VisitForAccumulatorValue(expr->right());
4662  __ pop(edx);
4663 
4664  bool inline_smi_code = ShouldInlineSmiCase(op);
4665  JumpPatchSite patch_site(masm_);
4666  if (inline_smi_code) {
4667  Label slow_case;
4668  __ mov(ecx, edx);
4669  __ or_(ecx, eax);
4670  patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4671  __ cmp(edx, eax);
4672  Split(cc, if_true, if_false, NULL);
4673  __ bind(&slow_case);
4674  }
4675 
4676  // Record position and call the compare IC.
4677  SetSourcePosition(expr->position());
4678  Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4679  CallIC(ic, expr->CompareOperationFeedbackId());
4680  patch_site.EmitPatchInfo();
4681 
4682  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4683  __ test(eax, eax);
4684  Split(cc, if_true, if_false, fall_through);
4685  }
4686  }
4687 
4688  // Convert the result of the comparison into one expected for this
4689  // expression's context.
4690  context()->Plug(if_true, if_false);
4691 }
4692 
4693 
4694 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4695  Expression* sub_expr,
4696  NilValue nil) {
4697  Label materialize_true, materialize_false;
4698  Label* if_true = NULL;
4699  Label* if_false = NULL;
4700  Label* fall_through = NULL;
4701  context()->PrepareTest(&materialize_true, &materialize_false,
4702  &if_true, &if_false, &fall_through);
4703 
4704  VisitForAccumulatorValue(sub_expr);
4705  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4706 
4707  Handle<Object> nil_value = nil == kNullValue
4708  ? isolate()->factory()->null_value()
4709  : isolate()->factory()->undefined_value();
4710  if (expr->op() == Token::EQ_STRICT) {
4711  __ cmp(eax, nil_value);
4712  Split(equal, if_true, if_false, fall_through);
4713  } else {
4714  Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4715  CallIC(ic, expr->CompareOperationFeedbackId());
4716  __ test(eax, eax);
4717  Split(not_zero, if_true, if_false, fall_through);
4718  }
4719  context()->Plug(if_true, if_false);
4720 }
4721 
4722 
4723 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4725  context()->Plug(eax);
4726 }
4727 
4728 
4729 Register FullCodeGenerator::result_register() {
4730  return eax;
4731 }
4732 
4733 
4734 Register FullCodeGenerator::context_register() {
4735  return esi;
4736 }
4737 
4738 
4739 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4740  ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4741  __ mov(Operand(ebp, frame_offset), value);
4742 }
4743 
4744 
4745 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4746  __ mov(dst, ContextOperand(esi, context_index));
4747 }
4748 
4749 
4750 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4751  Scope* declaration_scope = scope()->DeclarationScope();
4752  if (declaration_scope->is_global_scope() ||
4753  declaration_scope->is_module_scope()) {
4754  // Contexts nested in the native context have a canonical empty function
4755  // as their closure, not the anonymous closure containing the global
4756  // code. Pass a smi sentinel and let the runtime look up the empty
4757  // function.
4758  __ push(Immediate(Smi::FromInt(0)));
4759  } else if (declaration_scope->is_eval_scope()) {
4760  // Contexts nested inside eval code have the same closure as the context
4761  // calling eval, not the anonymous closure containing the eval code.
4762  // Fetch it from the context.
4764  } else {
4765  ASSERT(declaration_scope->is_function_scope());
4767  }
4768 }
4769 
4770 
4771 // ----------------------------------------------------------------------------
4772 // Non-local control flow support.
4773 
4774 void FullCodeGenerator::EnterFinallyBlock() {
4775  // Cook return address on top of stack (smi encoded Code* delta)
4776  ASSERT(!result_register().is(edx));
4777  __ pop(edx);
4778  __ sub(edx, Immediate(masm_->CodeObject()));
4780  STATIC_ASSERT(kSmiTag == 0);
4781  __ SmiTag(edx);
4782  __ push(edx);
4783 
4784  // Store result register while executing finally block.
4785  __ push(result_register());
4786 
4787  // Store pending message while executing finally block.
4788  ExternalReference pending_message_obj =
4789  ExternalReference::address_of_pending_message_obj(isolate());
4790  __ mov(edx, Operand::StaticVariable(pending_message_obj));
4791  __ push(edx);
4792 
4793  ExternalReference has_pending_message =
4794  ExternalReference::address_of_has_pending_message(isolate());
4795  __ mov(edx, Operand::StaticVariable(has_pending_message));
4796  __ SmiTag(edx);
4797  __ push(edx);
4798 
4799  ExternalReference pending_message_script =
4800  ExternalReference::address_of_pending_message_script(isolate());
4801  __ mov(edx, Operand::StaticVariable(pending_message_script));
4802  __ push(edx);
4803 }
4804 
4805 
4806 void FullCodeGenerator::ExitFinallyBlock() {
4807  ASSERT(!result_register().is(edx));
4808  // Restore pending message from stack.
4809  __ pop(edx);
4810  ExternalReference pending_message_script =
4811  ExternalReference::address_of_pending_message_script(isolate());
4812  __ mov(Operand::StaticVariable(pending_message_script), edx);
4813 
4814  __ pop(edx);
4815  __ SmiUntag(edx);
4816  ExternalReference has_pending_message =
4817  ExternalReference::address_of_has_pending_message(isolate());
4818  __ mov(Operand::StaticVariable(has_pending_message), edx);
4819 
4820  __ pop(edx);
4821  ExternalReference pending_message_obj =
4822  ExternalReference::address_of_pending_message_obj(isolate());
4823  __ mov(Operand::StaticVariable(pending_message_obj), edx);
4824 
4825  // Restore result register from stack.
4826  __ pop(result_register());
4827 
4828  // Uncook return address.
4829  __ pop(edx);
4830  __ SmiUntag(edx);
4831  __ add(edx, Immediate(masm_->CodeObject()));
4832  __ jmp(edx);
4833 }
4834 
4835 
4836 #undef __
4837 
4838 #define __ ACCESS_MASM(masm())
4839 
4840 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4841  int* stack_depth,
4842  int* context_length) {
4843  // The macros used here must preserve the result register.
4844 
4845  // Because the handler block contains the context of the finally
4846  // code, we can restore it directly from there for the finally code
4847  // rather than iteratively unwinding contexts via their previous
4848  // links.
4849  __ Drop(*stack_depth); // Down to the handler block.
4850  if (*context_length > 0) {
4851  // Restore the context to its dedicated register and the stack.
4854  }
4855  __ PopTryHandler();
4856  __ call(finally_entry_);
4857 
4858  *stack_depth = 0;
4859  *context_length = 0;
4860  return previous_;
4861 }
4862 
4863 #undef __
4864 
4865 
4866 static const byte kJnsInstruction = 0x79;
4867 static const byte kJnsOffset = 0x11;
4868 static const byte kNopByteOne = 0x66;
4869 static const byte kNopByteTwo = 0x90;
4870 #ifdef DEBUG
4871 static const byte kCallInstruction = 0xe8;
4872 #endif
4873 
4874 
4875 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4876  Address pc,
4877  BackEdgeState target_state,
4878  Code* replacement_code) {
4879  Address call_target_address = pc - kIntSize;
4880  Address jns_instr_address = call_target_address - 3;
4881  Address jns_offset_address = call_target_address - 2;
4882 
4883  switch (target_state) {
4884  case INTERRUPT:
4885  // sub <profiling_counter>, <delta> ;; Not changed
4886  // jns ok
4887  // call <interrupt stub>
4888  // ok:
4889  *jns_instr_address = kJnsInstruction;
4890  *jns_offset_address = kJnsOffset;
4891  break;
4892  case ON_STACK_REPLACEMENT:
4893  case OSR_AFTER_STACK_CHECK:
4894  // sub <profiling_counter>, <delta> ;; Not changed
4895  // nop
4896  // nop
4897  // call <on-stack replacment>
4898  // ok:
4899  *jns_instr_address = kNopByteOne;
4900  *jns_offset_address = kNopByteTwo;
4901  break;
4902  }
4903 
4904  Assembler::set_target_address_at(call_target_address,
4905  unoptimized_code,
4906  replacement_code->entry());
4907  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4908  unoptimized_code, call_target_address, replacement_code);
4909 }
4910 
4911 
4913  Isolate* isolate,
4914  Code* unoptimized_code,
4915  Address pc) {
4916  Address call_target_address = pc - kIntSize;
4917  Address jns_instr_address = call_target_address - 3;
4918  ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
4919 
4920  if (*jns_instr_address == kJnsInstruction) {
4921  ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
4922  ASSERT_EQ(isolate->builtins()->InterruptCheck()->entry(),
4923  Assembler::target_address_at(call_target_address,
4924  unoptimized_code));
4925  return INTERRUPT;
4926  }
4927 
4928  ASSERT_EQ(kNopByteOne, *jns_instr_address);
4929  ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
4930 
4931  if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
4932  isolate->builtins()->OnStackReplacement()->entry()) {
4933  return ON_STACK_REPLACEMENT;
4934  }
4935 
4936  ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4937  Assembler::target_address_at(call_target_address,
4938  unoptimized_code));
4939  return OSR_AFTER_STACK_CHECK;
4940 }
4941 
4942 
4943 } } // namespace v8::internal
4944 
4945 #endif // V8_TARGET_ARCH_IA32
static const int kFunctionOffset
Definition: objects.h:7324
byte * Address
Definition: globals.h:186
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
static const int kHashFieldOffset
Definition: objects.h:8629
static const int kBitFieldOffset
Definition: objects.h:6461
Scope * DeclarationScope()
Definition: scopes.cc:743
Isolate * isolate() const
Definition: assembler.h:62
const intptr_t kSmiTagMask
Definition: v8.h:5480
static const int kForInFastCaseMarker
Definition: objects.h:8230
VariableDeclaration * function() const
Definition: scopes.h:326
static const int kCodeEntryOffset
Definition: objects.h:7518
static const int kValueOffset
Definition: objects.h:9547
static int SlotOffset(int index)
Definition: contexts.h:498
static const int kBuiltinsOffset
Definition: objects.h:7610
static Handle< Code > GetUninitialized(Isolate *isolate)
Definition: code-stubs.h:2385
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
static const int kEnumCacheOffset
Definition: objects.h:3499
static String * cast(Object *obj)
const uint32_t kTwoByteStringTag
Definition: objects.h:610
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
static const int kResultValuePropertyOffset
Definition: objects.h:7342
bool IsFastObjectElementsKind(ElementsKind kind)
static TypeFeedbackId None()
Definition: utils.h:1149
static Handle< Code > GetUninitialized(Isolate *isolate, Token::Value op)
Definition: ic.cc:2489
const Register esp
static const int kGlobalReceiverOffset
Definition: objects.h:7613
T Max(T a, T b)
Definition: utils.h:227
Scope * outer_scope() const
Definition: scopes.h:350
static const int kGeneratorClosed
Definition: objects.h:7321
static const unsigned int kContainsCachedArrayIndexMask
Definition: objects.h:8673
static bool IsSupported(CpuFeature f)
Definition: assembler-arm.h:68
static const int kForInSlowCaseMarker
Definition: objects.h:8231
static bool enabled()
Definition: serialize.h:485
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
static const int kSize
Definition: objects.h:7922
static const int kResultDonePropertyOffset
Definition: objects.h:7343
#define ASSERT(condition)
Definition: checks.h:329
static const int kContextOffset
Definition: frames.h:185
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:121
static const int kInObjectFieldCount
Definition: objects.h:7976
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3090
#define POINTER_SIZE_ALIGN(value)
Definition: v8globals.h:390
const uint32_t kStringRepresentationMask
Definition: objects.h:615
static const int kReceiverOffset
Definition: objects.h:7326
MemOperand GlobalObjectOperand()
static const int kCallerFPOffset
Definition: frames.h:188
static const int kInstanceClassNameOffset
Definition: objects.h:7107
Factory * factory()
Definition: isolate.h:995
bool IsOptimizable() const
Definition: compiler.h:232
Variable * parameter(int index) const
Definition: scopes.h:333
PropertyAttributes
MemOperand ContextOperand(Register context, int index)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
const int kIntSize
Definition: globals.h:263
static Smi * cast(Object *object)
const Register edi
int ContextChainLength(Scope *scope)
Definition: scopes.cc:721
kInstanceClassNameOffset flag
Definition: objects-inl.h:5115
uint8_t byte
Definition: globals.h:185
#define IN
static const int kLiteralsOffset
Definition: objects.h:7524
const Register ebp
#define UNREACHABLE()
Definition: checks.h:52
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static Condition ComputeCondition(Token::Value op)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
static const int kLengthOffset
Definition: objects.h:8905
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const Register eax
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
Definition: flags.cc:665
Variable * arguments() const
Definition: scopes.h:341
static const int kFirstOffset
Definition: objects.h:3500
NilValue
Definition: v8.h:133
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1278
static BailoutId Declarations()
Definition: utils.h:1166
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
const int kPointerSize
Definition: globals.h:268
void check(i::Vector< const uint8_t > string)
static const int kJSReturnSequenceLength
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:6478
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
const Register ecx
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define __
static const int kCallerSPOffset
Definition: frames.h:190
static const int kCacheStampOffset
Definition: objects.h:7787
const Register pc
static const int kDescriptorSize
Definition: objects.h:3509
static const int kPropertiesOffset
Definition: objects.h:2755
int num_parameters() const
Definition: scopes.h:338
static const int kMarkerOffset
Definition: frames.h:184
static const int kExpressionsOffset
Definition: frames.h:183
static const int kHeaderSize
Definition: objects.h:9042
static const int kElementsOffset
Definition: objects.h:2756
static BailoutId FunctionEntry()
Definition: utils.h:1165
Operand FixedArrayElementOperand(Register array, Register index_as_smi, int additional_offset=0)
const uint32_t kStringTag
Definition: objects.h:598
#define BASE_EMBEDDED
Definition: allocation.h:68
OverwriteMode
Definition: ic.h:690
bool IsDeclaredVariableMode(VariableMode mode)
Definition: v8globals.h:503
Vector< const char > CStrVector(const char *data)
Definition: utils.h:574
static int OffsetOfElementAt(int index)
Definition: objects.h:3070
static const int kLengthOffset
Definition: objects.h:10076
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
Definition: objects.h:3016
Scope * GlobalScope()
Definition: scopes.cc:734
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
Definition: code-stubs.h:1406
static const int kContextOffset
Definition: objects.h:7325
static const int kMapOffset
Definition: objects.h:1890
static const int kValueOffset
Definition: objects.h:7779
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:3503
const uint32_t kIsNotStringMask
Definition: objects.h:597
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:545
static const int kLengthOffset
Definition: objects.h:3015
const Register ebx
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:6675
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
static const int kContextOffset
Definition: frames.h:97
static const int kFormalParameterCountOffset
Definition: objects.h:7156
const int kSmiShiftSize
Definition: v8.h:5539
const int kSmiTagSize
Definition: v8.h:5479
void CopyBytes(uint8_t *target, uint8_t *source)
Definition: runtime.cc:1309
static const int kGeneratorExecuting
Definition: objects.h:7320
Condition NegateCondition(Condition cond)
static bool ShouldGenerateLog(Isolate *isolate, Expression *type)
Definition: codegen.cc:191
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
const Register esi
static const int kContinuationOffset
Definition: objects.h:7327
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:996
static const int kConstructorOffset
Definition: objects.h:6428
const uint32_t kOneByteStringTag
Definition: objects.h:611
const int kSmiTag
Definition: v8.h:5478
#define ASSERT_NE(v1, v2)
Definition: checks.h:331
static const int kIsUndetectable
Definition: objects.h:6472
static const int kPrototypeOffset
Definition: objects.h:6427
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target)
const Register no_reg
static const int kValueOffset
Definition: objects.h:7701
bool IsImmutableVariableMode(VariableMode mode)
Definition: v8globals.h:513
static const int kNativeContextOffset
Definition: objects.h:7611
void AddNoFrameRange(int from, int to)
Definition: compiler.h:296
const Register edx
T Min(T a, T b)
Definition: utils.h:234
static const int kSharedFunctionInfoOffset
Definition: objects.h:7521
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:2121
static const int kBitField2Offset
Definition: objects.h:6462
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
#define VOID
static const int kExponentOffset
Definition: objects.h:1977
const uint32_t kStringEncodingMask
Definition: objects.h:609
static const int kInstanceTypeOffset
Definition: objects.h:6459
static const int kOperandStackOffset
Definition: objects.h:7328
static const int kMantissaOffset
Definition: objects.h:1976
TypeofState
Definition: codegen.h:69
Scope * scope() const
Definition: compiler.h:78